code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def generate_action(args): """Generate action.""" controller = args.get('<controller>') action = args.get('<action>') with_template = args.get('-t') current_path = os.getcwd() logger.info('Start generating action.') controller_file_path = os.path.join(current_path, 'application/controllers', controller + '.py') if not os.path.exists(controller_file_path): logger.warning("The controller %s does't exist." % controller) return if with_template: action_source_path = os.path.join(dirname(abspath(__file__)), 'templates/action.py') else: action_source_path = os.path.join(dirname(abspath(__file__)), 'templates/action_without_template.py') # Add action source codes with open(action_source_path, 'r') as action_source_file: with open(controller_file_path, 'a') as controller_file: for action_line in action_source_file: new_line = action_line.replace('#{controller}', controller). \ replace('#{action}', action) controller_file.write(new_line) logger.info("Updated: %s" % _relative_path(controller_file_path)) if with_template: # assets dir assets_dir_path = os.path.join(current_path, 'application/pages/%s/%s' % (controller, action)) _mkdir_p(assets_dir_path) # html action_html_template_path = os.path.join(dirname(abspath(__file__)), 'templates/action.html') action_html_path = os.path.join(assets_dir_path, '%s.html' % action) with open(action_html_template_path, 'r') as action_html_template_file: with open(action_html_path, 'w') as action_html_file: for line in action_html_template_file: new_line = line.replace('#{action}', action) \ .replace('#{action|title}', action.title()) \ .replace('#{controller}', controller) action_html_file.write(new_line) logger.info("New: %s" % _relative_path(action_html_path)) # js action_js_template_path = os.path.join(dirname(abspath(__file__)), 'templates/action.js') action_js_path = os.path.join(assets_dir_path, '%s.js' % action) shutil.copy(action_js_template_path, action_js_path) logger.info("New: %s" % _relative_path(action_js_path)) # less action_less_template_path = os.path.join(dirname(abspath(__file__)), 'templates/action.less') action_less_path = os.path.join(assets_dir_path, '%s.less' % action) shutil.copy(action_less_template_path, action_less_path) logger.info("New: %s" % _relative_path(action_less_path)) logger.info('Finish generating action.')
def function[generate_action, parameter[args]]: constant[Generate action.] variable[controller] assign[=] call[name[args].get, parameter[constant[<controller>]]] variable[action] assign[=] call[name[args].get, parameter[constant[<action>]]] variable[with_template] assign[=] call[name[args].get, parameter[constant[-t]]] variable[current_path] assign[=] call[name[os].getcwd, parameter[]] call[name[logger].info, parameter[constant[Start generating action.]]] variable[controller_file_path] assign[=] call[name[os].path.join, parameter[name[current_path], constant[application/controllers], binary_operation[name[controller] + constant[.py]]]] if <ast.UnaryOp object at 0x7da1b024ff10> begin[:] call[name[logger].warning, parameter[binary_operation[constant[The controller %s does't exist.] <ast.Mod object at 0x7da2590d6920> name[controller]]]] return[None] if name[with_template] begin[:] variable[action_source_path] assign[=] call[name[os].path.join, parameter[call[name[dirname], parameter[call[name[abspath], parameter[name[__file__]]]]], constant[templates/action.py]]] with call[name[open], parameter[name[action_source_path], constant[r]]] begin[:] with call[name[open], parameter[name[controller_file_path], constant[a]]] begin[:] for taget[name[action_line]] in starred[name[action_source_file]] begin[:] variable[new_line] assign[=] call[call[name[action_line].replace, parameter[constant[#{controller}], name[controller]]].replace, parameter[constant[#{action}], name[action]]] call[name[controller_file].write, parameter[name[new_line]]] call[name[logger].info, parameter[binary_operation[constant[Updated: %s] <ast.Mod object at 0x7da2590d6920> call[name[_relative_path], parameter[name[controller_file_path]]]]]] if name[with_template] begin[:] variable[assets_dir_path] assign[=] call[name[os].path.join, parameter[name[current_path], binary_operation[constant[application/pages/%s/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18bc71120>, <ast.Name object at 0x7da18bc718a0>]]]]] call[name[_mkdir_p], parameter[name[assets_dir_path]]] variable[action_html_template_path] assign[=] call[name[os].path.join, parameter[call[name[dirname], parameter[call[name[abspath], parameter[name[__file__]]]]], constant[templates/action.html]]] variable[action_html_path] assign[=] call[name[os].path.join, parameter[name[assets_dir_path], binary_operation[constant[%s.html] <ast.Mod object at 0x7da2590d6920> name[action]]]] with call[name[open], parameter[name[action_html_template_path], constant[r]]] begin[:] with call[name[open], parameter[name[action_html_path], constant[w]]] begin[:] for taget[name[line]] in starred[name[action_html_template_file]] begin[:] variable[new_line] assign[=] call[call[call[name[line].replace, parameter[constant[#{action}], name[action]]].replace, parameter[constant[#{action|title}], call[name[action].title, parameter[]]]].replace, parameter[constant[#{controller}], name[controller]]] call[name[action_html_file].write, parameter[name[new_line]]] call[name[logger].info, parameter[binary_operation[constant[New: %s] <ast.Mod object at 0x7da2590d6920> call[name[_relative_path], parameter[name[action_html_path]]]]]] variable[action_js_template_path] assign[=] call[name[os].path.join, parameter[call[name[dirname], parameter[call[name[abspath], parameter[name[__file__]]]]], constant[templates/action.js]]] variable[action_js_path] assign[=] call[name[os].path.join, parameter[name[assets_dir_path], binary_operation[constant[%s.js] <ast.Mod object at 0x7da2590d6920> name[action]]]] call[name[shutil].copy, parameter[name[action_js_template_path], name[action_js_path]]] call[name[logger].info, parameter[binary_operation[constant[New: %s] <ast.Mod object at 0x7da2590d6920> call[name[_relative_path], parameter[name[action_js_path]]]]]] variable[action_less_template_path] assign[=] call[name[os].path.join, parameter[call[name[dirname], parameter[call[name[abspath], parameter[name[__file__]]]]], constant[templates/action.less]]] variable[action_less_path] assign[=] call[name[os].path.join, parameter[name[assets_dir_path], binary_operation[constant[%s.less] <ast.Mod object at 0x7da2590d6920> name[action]]]] call[name[shutil].copy, parameter[name[action_less_template_path], name[action_less_path]]] call[name[logger].info, parameter[binary_operation[constant[New: %s] <ast.Mod object at 0x7da2590d6920> call[name[_relative_path], parameter[name[action_less_path]]]]]] call[name[logger].info, parameter[constant[Finish generating action.]]]
keyword[def] identifier[generate_action] ( identifier[args] ): literal[string] identifier[controller] = identifier[args] . identifier[get] ( literal[string] ) identifier[action] = identifier[args] . identifier[get] ( literal[string] ) identifier[with_template] = identifier[args] . identifier[get] ( literal[string] ) identifier[current_path] = identifier[os] . identifier[getcwd] () identifier[logger] . identifier[info] ( literal[string] ) identifier[controller_file_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[current_path] , literal[string] , identifier[controller] + literal[string] ) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[controller_file_path] ): identifier[logger] . identifier[warning] ( literal[string] % identifier[controller] ) keyword[return] keyword[if] identifier[with_template] : identifier[action_source_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[dirname] ( identifier[abspath] ( identifier[__file__] )), literal[string] ) keyword[else] : identifier[action_source_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[dirname] ( identifier[abspath] ( identifier[__file__] )), literal[string] ) keyword[with] identifier[open] ( identifier[action_source_path] , literal[string] ) keyword[as] identifier[action_source_file] : keyword[with] identifier[open] ( identifier[controller_file_path] , literal[string] ) keyword[as] identifier[controller_file] : keyword[for] identifier[action_line] keyword[in] identifier[action_source_file] : identifier[new_line] = identifier[action_line] . identifier[replace] ( literal[string] , identifier[controller] ). identifier[replace] ( literal[string] , identifier[action] ) identifier[controller_file] . identifier[write] ( identifier[new_line] ) identifier[logger] . identifier[info] ( literal[string] % identifier[_relative_path] ( identifier[controller_file_path] )) keyword[if] identifier[with_template] : identifier[assets_dir_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[current_path] , literal[string] %( identifier[controller] , identifier[action] )) identifier[_mkdir_p] ( identifier[assets_dir_path] ) identifier[action_html_template_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[dirname] ( identifier[abspath] ( identifier[__file__] )), literal[string] ) identifier[action_html_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[assets_dir_path] , literal[string] % identifier[action] ) keyword[with] identifier[open] ( identifier[action_html_template_path] , literal[string] ) keyword[as] identifier[action_html_template_file] : keyword[with] identifier[open] ( identifier[action_html_path] , literal[string] ) keyword[as] identifier[action_html_file] : keyword[for] identifier[line] keyword[in] identifier[action_html_template_file] : identifier[new_line] = identifier[line] . identifier[replace] ( literal[string] , identifier[action] ). identifier[replace] ( literal[string] , identifier[action] . identifier[title] ()). identifier[replace] ( literal[string] , identifier[controller] ) identifier[action_html_file] . identifier[write] ( identifier[new_line] ) identifier[logger] . identifier[info] ( literal[string] % identifier[_relative_path] ( identifier[action_html_path] )) identifier[action_js_template_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[dirname] ( identifier[abspath] ( identifier[__file__] )), literal[string] ) identifier[action_js_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[assets_dir_path] , literal[string] % identifier[action] ) identifier[shutil] . identifier[copy] ( identifier[action_js_template_path] , identifier[action_js_path] ) identifier[logger] . identifier[info] ( literal[string] % identifier[_relative_path] ( identifier[action_js_path] )) identifier[action_less_template_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[dirname] ( identifier[abspath] ( identifier[__file__] )), literal[string] ) identifier[action_less_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[assets_dir_path] , literal[string] % identifier[action] ) identifier[shutil] . identifier[copy] ( identifier[action_less_template_path] , identifier[action_less_path] ) identifier[logger] . identifier[info] ( literal[string] % identifier[_relative_path] ( identifier[action_less_path] )) identifier[logger] . identifier[info] ( literal[string] )
def generate_action(args): """Generate action.""" controller = args.get('<controller>') action = args.get('<action>') with_template = args.get('-t') current_path = os.getcwd() logger.info('Start generating action.') controller_file_path = os.path.join(current_path, 'application/controllers', controller + '.py') if not os.path.exists(controller_file_path): logger.warning("The controller %s does't exist." % controller) return # depends on [control=['if'], data=[]] if with_template: action_source_path = os.path.join(dirname(abspath(__file__)), 'templates/action.py') # depends on [control=['if'], data=[]] else: action_source_path = os.path.join(dirname(abspath(__file__)), 'templates/action_without_template.py') # Add action source codes with open(action_source_path, 'r') as action_source_file: with open(controller_file_path, 'a') as controller_file: for action_line in action_source_file: new_line = action_line.replace('#{controller}', controller).replace('#{action}', action) controller_file.write(new_line) # depends on [control=['for'], data=['action_line']] # depends on [control=['with'], data=['controller_file']] # depends on [control=['with'], data=['open', 'action_source_file']] logger.info('Updated: %s' % _relative_path(controller_file_path)) if with_template: # assets dir assets_dir_path = os.path.join(current_path, 'application/pages/%s/%s' % (controller, action)) _mkdir_p(assets_dir_path) # html action_html_template_path = os.path.join(dirname(abspath(__file__)), 'templates/action.html') action_html_path = os.path.join(assets_dir_path, '%s.html' % action) with open(action_html_template_path, 'r') as action_html_template_file: with open(action_html_path, 'w') as action_html_file: for line in action_html_template_file: new_line = line.replace('#{action}', action).replace('#{action|title}', action.title()).replace('#{controller}', controller) action_html_file.write(new_line) # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['action_html_file']] # depends on [control=['with'], data=['open', 'action_html_template_file']] logger.info('New: %s' % _relative_path(action_html_path)) # js action_js_template_path = os.path.join(dirname(abspath(__file__)), 'templates/action.js') action_js_path = os.path.join(assets_dir_path, '%s.js' % action) shutil.copy(action_js_template_path, action_js_path) logger.info('New: %s' % _relative_path(action_js_path)) # less action_less_template_path = os.path.join(dirname(abspath(__file__)), 'templates/action.less') action_less_path = os.path.join(assets_dir_path, '%s.less' % action) shutil.copy(action_less_template_path, action_less_path) logger.info('New: %s' % _relative_path(action_less_path)) # depends on [control=['if'], data=[]] logger.info('Finish generating action.')
def hexdump( source, start=None, end=None, length=None, major_len=8, minor_len=4, colour=True, address_base=None ): """Print the contents of a byte string in tabular hexadecimal/ASCII format. source The byte string to print. start Start offset to read from (default: start) end End offset to stop reading at (default: end) length Length to read in (optional replacement for end) major_len Number of hexadecimal groups per line minor_len Number of bytes per hexadecimal group colour Add ANSI colour formatting to output (default: true) address_base Base address to use for labels (default: start) Raises ValueError if both end and length are defined. """ for line in hexdump_iter( source, start, end, length, major_len, minor_len, colour, address_base ): print( line )
def function[hexdump, parameter[source, start, end, length, major_len, minor_len, colour, address_base]]: constant[Print the contents of a byte string in tabular hexadecimal/ASCII format. source The byte string to print. start Start offset to read from (default: start) end End offset to stop reading at (default: end) length Length to read in (optional replacement for end) major_len Number of hexadecimal groups per line minor_len Number of bytes per hexadecimal group colour Add ANSI colour formatting to output (default: true) address_base Base address to use for labels (default: start) Raises ValueError if both end and length are defined. ] for taget[name[line]] in starred[call[name[hexdump_iter], parameter[name[source], name[start], name[end], name[length], name[major_len], name[minor_len], name[colour], name[address_base]]]] begin[:] call[name[print], parameter[name[line]]]
keyword[def] identifier[hexdump] ( identifier[source] , identifier[start] = keyword[None] , identifier[end] = keyword[None] , identifier[length] = keyword[None] , identifier[major_len] = literal[int] , identifier[minor_len] = literal[int] , identifier[colour] = keyword[True] , identifier[address_base] = keyword[None] ): literal[string] keyword[for] identifier[line] keyword[in] identifier[hexdump_iter] ( identifier[source] , identifier[start] , identifier[end] , identifier[length] , identifier[major_len] , identifier[minor_len] , identifier[colour] , identifier[address_base] ): identifier[print] ( identifier[line] )
def hexdump(source, start=None, end=None, length=None, major_len=8, minor_len=4, colour=True, address_base=None): """Print the contents of a byte string in tabular hexadecimal/ASCII format. source The byte string to print. start Start offset to read from (default: start) end End offset to stop reading at (default: end) length Length to read in (optional replacement for end) major_len Number of hexadecimal groups per line minor_len Number of bytes per hexadecimal group colour Add ANSI colour formatting to output (default: true) address_base Base address to use for labels (default: start) Raises ValueError if both end and length are defined. """ for line in hexdump_iter(source, start, end, length, major_len, minor_len, colour, address_base): print(line) # depends on [control=['for'], data=['line']]
def p_align(p): """ asm : ALIGN expr | ALIGN pexpr """ align = p[2].eval() if align < 2: error(p.lineno(1), "ALIGN value must be greater than 1") return MEMORY.set_org(MEMORY.org + (align - MEMORY.org % align) % align, p.lineno(1))
def function[p_align, parameter[p]]: constant[ asm : ALIGN expr | ALIGN pexpr ] variable[align] assign[=] call[call[name[p]][constant[2]].eval, parameter[]] if compare[name[align] less[<] constant[2]] begin[:] call[name[error], parameter[call[name[p].lineno, parameter[constant[1]]], constant[ALIGN value must be greater than 1]]] return[None] call[name[MEMORY].set_org, parameter[binary_operation[name[MEMORY].org + binary_operation[binary_operation[name[align] - binary_operation[name[MEMORY].org <ast.Mod object at 0x7da2590d6920> name[align]]] <ast.Mod object at 0x7da2590d6920> name[align]]], call[name[p].lineno, parameter[constant[1]]]]]
keyword[def] identifier[p_align] ( identifier[p] ): literal[string] identifier[align] = identifier[p] [ literal[int] ]. identifier[eval] () keyword[if] identifier[align] < literal[int] : identifier[error] ( identifier[p] . identifier[lineno] ( literal[int] ), literal[string] ) keyword[return] identifier[MEMORY] . identifier[set_org] ( identifier[MEMORY] . identifier[org] +( identifier[align] - identifier[MEMORY] . identifier[org] % identifier[align] )% identifier[align] , identifier[p] . identifier[lineno] ( literal[int] ))
def p_align(p): """ asm : ALIGN expr | ALIGN pexpr """ align = p[2].eval() if align < 2: error(p.lineno(1), 'ALIGN value must be greater than 1') return # depends on [control=['if'], data=[]] MEMORY.set_org(MEMORY.org + (align - MEMORY.org % align) % align, p.lineno(1))
def allhexlify(data): """Hexlify given data into a string representation with hex values for all chars Input like 'ab\x04ce' becomes '\x61\x62\x04\x63\x65' """ hx = binascii.hexlify(data) return b''.join([b'\\x' + o for o in re.findall(b'..', hx)])
def function[allhexlify, parameter[data]]: constant[Hexlify given data into a string representation with hex values for all chars Input like 'abce' becomes 'abce' ] variable[hx] assign[=] call[name[binascii].hexlify, parameter[name[data]]] return[call[constant[b''].join, parameter[<ast.ListComp object at 0x7da1b1792650>]]]
keyword[def] identifier[allhexlify] ( identifier[data] ): literal[string] identifier[hx] = identifier[binascii] . identifier[hexlify] ( identifier[data] ) keyword[return] literal[string] . identifier[join] ([ literal[string] + identifier[o] keyword[for] identifier[o] keyword[in] identifier[re] . identifier[findall] ( literal[string] , identifier[hx] )])
def allhexlify(data): """Hexlify given data into a string representation with hex values for all chars Input like 'ab\x04ce' becomes 'ab\x04ce' """ hx = binascii.hexlify(data) return b''.join([b'\\x' + o for o in re.findall(b'..', hx)])
def replace_keys(record: Mapping, key_map: Mapping) -> dict: """New record with renamed keys including keys only found in key_map.""" return {key_map[k]: v for k, v in record.items() if k in key_map}
def function[replace_keys, parameter[record, key_map]]: constant[New record with renamed keys including keys only found in key_map.] return[<ast.DictComp object at 0x7da1b184ac20>]
keyword[def] identifier[replace_keys] ( identifier[record] : identifier[Mapping] , identifier[key_map] : identifier[Mapping] )-> identifier[dict] : literal[string] keyword[return] { identifier[key_map] [ identifier[k] ]: identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[record] . identifier[items] () keyword[if] identifier[k] keyword[in] identifier[key_map] }
def replace_keys(record: Mapping, key_map: Mapping) -> dict: """New record with renamed keys including keys only found in key_map.""" return {key_map[k]: v for (k, v) in record.items() if k in key_map}
def delete_repository_method(namespace, name, snapshot_id): """Redacts a method and all of its associated configurations. The method should exist in the methods repository. Args: namespace (str): Methods namespace method (str): method name snapshot_id (int): snapshot_id of the method Swagger: https://api.firecloud.org/#!/Method_Repository/delete_api_methods_namespace_name_snapshotId """ uri = "methods/{0}/{1}/{2}".format(namespace, name, snapshot_id) return __delete(uri)
def function[delete_repository_method, parameter[namespace, name, snapshot_id]]: constant[Redacts a method and all of its associated configurations. The method should exist in the methods repository. Args: namespace (str): Methods namespace method (str): method name snapshot_id (int): snapshot_id of the method Swagger: https://api.firecloud.org/#!/Method_Repository/delete_api_methods_namespace_name_snapshotId ] variable[uri] assign[=] call[constant[methods/{0}/{1}/{2}].format, parameter[name[namespace], name[name], name[snapshot_id]]] return[call[name[__delete], parameter[name[uri]]]]
keyword[def] identifier[delete_repository_method] ( identifier[namespace] , identifier[name] , identifier[snapshot_id] ): literal[string] identifier[uri] = literal[string] . identifier[format] ( identifier[namespace] , identifier[name] , identifier[snapshot_id] ) keyword[return] identifier[__delete] ( identifier[uri] )
def delete_repository_method(namespace, name, snapshot_id): """Redacts a method and all of its associated configurations. The method should exist in the methods repository. Args: namespace (str): Methods namespace method (str): method name snapshot_id (int): snapshot_id of the method Swagger: https://api.firecloud.org/#!/Method_Repository/delete_api_methods_namespace_name_snapshotId """ uri = 'methods/{0}/{1}/{2}'.format(namespace, name, snapshot_id) return __delete(uri)
def Input_setIgnoreInputEvents(self, ignore): """ Function path: Input.setIgnoreInputEvents Domain: Input Method name: setIgnoreInputEvents Parameters: Required arguments: 'ignore' (type: boolean) -> Ignores input events processing when set to true. No return value. Description: Ignores input events (useful while auditing page). """ assert isinstance(ignore, (bool,) ), "Argument 'ignore' must be of type '['bool']'. Received type: '%s'" % type( ignore) subdom_funcs = self.synchronous_command('Input.setIgnoreInputEvents', ignore=ignore) return subdom_funcs
def function[Input_setIgnoreInputEvents, parameter[self, ignore]]: constant[ Function path: Input.setIgnoreInputEvents Domain: Input Method name: setIgnoreInputEvents Parameters: Required arguments: 'ignore' (type: boolean) -> Ignores input events processing when set to true. No return value. Description: Ignores input events (useful while auditing page). ] assert[call[name[isinstance], parameter[name[ignore], tuple[[<ast.Name object at 0x7da1b101dbd0>]]]]] variable[subdom_funcs] assign[=] call[name[self].synchronous_command, parameter[constant[Input.setIgnoreInputEvents]]] return[name[subdom_funcs]]
keyword[def] identifier[Input_setIgnoreInputEvents] ( identifier[self] , identifier[ignore] ): literal[string] keyword[assert] identifier[isinstance] ( identifier[ignore] ,( identifier[bool] ,) ), literal[string] % identifier[type] ( identifier[ignore] ) identifier[subdom_funcs] = identifier[self] . identifier[synchronous_command] ( literal[string] , identifier[ignore] = identifier[ignore] ) keyword[return] identifier[subdom_funcs]
def Input_setIgnoreInputEvents(self, ignore): """ Function path: Input.setIgnoreInputEvents Domain: Input Method name: setIgnoreInputEvents Parameters: Required arguments: 'ignore' (type: boolean) -> Ignores input events processing when set to true. No return value. Description: Ignores input events (useful while auditing page). """ assert isinstance(ignore, (bool,)), "Argument 'ignore' must be of type '['bool']'. Received type: '%s'" % type(ignore) subdom_funcs = self.synchronous_command('Input.setIgnoreInputEvents', ignore=ignore) return subdom_funcs
def split_fasta(f, id2f): """ split fasta file into separate fasta files based on list of scaffolds that belong to each separate file """ opened = {} for seq in parse_fasta(f): id = seq[0].split('>')[1].split()[0] if id not in id2f: continue fasta = id2f[id] if fasta not in opened: opened[fasta] = '%s.fa' % fasta seq[1] += '\n' with open(opened[fasta], 'a+') as f_out: f_out.write('\n'.join(seq))
def function[split_fasta, parameter[f, id2f]]: constant[ split fasta file into separate fasta files based on list of scaffolds that belong to each separate file ] variable[opened] assign[=] dictionary[[], []] for taget[name[seq]] in starred[call[name[parse_fasta], parameter[name[f]]]] begin[:] variable[id] assign[=] call[call[call[call[call[name[seq]][constant[0]].split, parameter[constant[>]]]][constant[1]].split, parameter[]]][constant[0]] if compare[name[id] <ast.NotIn object at 0x7da2590d7190> name[id2f]] begin[:] continue variable[fasta] assign[=] call[name[id2f]][name[id]] if compare[name[fasta] <ast.NotIn object at 0x7da2590d7190> name[opened]] begin[:] call[name[opened]][name[fasta]] assign[=] binary_operation[constant[%s.fa] <ast.Mod object at 0x7da2590d6920> name[fasta]] <ast.AugAssign object at 0x7da2047ea440> with call[name[open], parameter[call[name[opened]][name[fasta]], constant[a+]]] begin[:] call[name[f_out].write, parameter[call[constant[ ].join, parameter[name[seq]]]]]
keyword[def] identifier[split_fasta] ( identifier[f] , identifier[id2f] ): literal[string] identifier[opened] ={} keyword[for] identifier[seq] keyword[in] identifier[parse_fasta] ( identifier[f] ): identifier[id] = identifier[seq] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]. identifier[split] ()[ literal[int] ] keyword[if] identifier[id] keyword[not] keyword[in] identifier[id2f] : keyword[continue] identifier[fasta] = identifier[id2f] [ identifier[id] ] keyword[if] identifier[fasta] keyword[not] keyword[in] identifier[opened] : identifier[opened] [ identifier[fasta] ]= literal[string] % identifier[fasta] identifier[seq] [ literal[int] ]+= literal[string] keyword[with] identifier[open] ( identifier[opened] [ identifier[fasta] ], literal[string] ) keyword[as] identifier[f_out] : identifier[f_out] . identifier[write] ( literal[string] . identifier[join] ( identifier[seq] ))
def split_fasta(f, id2f): """ split fasta file into separate fasta files based on list of scaffolds that belong to each separate file """ opened = {} for seq in parse_fasta(f): id = seq[0].split('>')[1].split()[0] if id not in id2f: continue # depends on [control=['if'], data=[]] fasta = id2f[id] if fasta not in opened: opened[fasta] = '%s.fa' % fasta # depends on [control=['if'], data=['fasta', 'opened']] seq[1] += '\n' with open(opened[fasta], 'a+') as f_out: f_out.write('\n'.join(seq)) # depends on [control=['with'], data=['f_out']] # depends on [control=['for'], data=['seq']]
def set_detail_level(self, detail_levels): """ Sets the detail levels from the input dictionary in detail_levels. """ if detail_levels is None: return self.detail_levels = detail_levels if 'api' in detail_levels: self.api_detail_level = detail_levels['api'] if 'http' in detail_levels: self.http_detail_level = detail_levels['http'] if isinstance(self.api_detail_level, int): self.api_maxlen = self.api_detail_level if isinstance(self.http_detail_level, int): self.http_maxlen = self.http_detail_level
def function[set_detail_level, parameter[self, detail_levels]]: constant[ Sets the detail levels from the input dictionary in detail_levels. ] if compare[name[detail_levels] is constant[None]] begin[:] return[None] name[self].detail_levels assign[=] name[detail_levels] if compare[constant[api] in name[detail_levels]] begin[:] name[self].api_detail_level assign[=] call[name[detail_levels]][constant[api]] if compare[constant[http] in name[detail_levels]] begin[:] name[self].http_detail_level assign[=] call[name[detail_levels]][constant[http]] if call[name[isinstance], parameter[name[self].api_detail_level, name[int]]] begin[:] name[self].api_maxlen assign[=] name[self].api_detail_level if call[name[isinstance], parameter[name[self].http_detail_level, name[int]]] begin[:] name[self].http_maxlen assign[=] name[self].http_detail_level
keyword[def] identifier[set_detail_level] ( identifier[self] , identifier[detail_levels] ): literal[string] keyword[if] identifier[detail_levels] keyword[is] keyword[None] : keyword[return] identifier[self] . identifier[detail_levels] = identifier[detail_levels] keyword[if] literal[string] keyword[in] identifier[detail_levels] : identifier[self] . identifier[api_detail_level] = identifier[detail_levels] [ literal[string] ] keyword[if] literal[string] keyword[in] identifier[detail_levels] : identifier[self] . identifier[http_detail_level] = identifier[detail_levels] [ literal[string] ] keyword[if] identifier[isinstance] ( identifier[self] . identifier[api_detail_level] , identifier[int] ): identifier[self] . identifier[api_maxlen] = identifier[self] . identifier[api_detail_level] keyword[if] identifier[isinstance] ( identifier[self] . identifier[http_detail_level] , identifier[int] ): identifier[self] . identifier[http_maxlen] = identifier[self] . identifier[http_detail_level]
def set_detail_level(self, detail_levels): """ Sets the detail levels from the input dictionary in detail_levels. """ if detail_levels is None: return # depends on [control=['if'], data=[]] self.detail_levels = detail_levels if 'api' in detail_levels: self.api_detail_level = detail_levels['api'] # depends on [control=['if'], data=['detail_levels']] if 'http' in detail_levels: self.http_detail_level = detail_levels['http'] # depends on [control=['if'], data=['detail_levels']] if isinstance(self.api_detail_level, int): self.api_maxlen = self.api_detail_level # depends on [control=['if'], data=[]] if isinstance(self.http_detail_level, int): self.http_maxlen = self.http_detail_level # depends on [control=['if'], data=[]]
def p_notminus_assignment(self, t): '''notminus_assignment : IDENT EQ NOTMINUS''' self.accu.add(Term('obs_vlabel', [self.name,"gen(\""+t[1]+"\")","notMinus"]))
def function[p_notminus_assignment, parameter[self, t]]: constant[notminus_assignment : IDENT EQ NOTMINUS] call[name[self].accu.add, parameter[call[name[Term], parameter[constant[obs_vlabel], list[[<ast.Attribute object at 0x7da1b28ae710>, <ast.BinOp object at 0x7da1b28acc10>, <ast.Constant object at 0x7da1b28af6a0>]]]]]]
keyword[def] identifier[p_notminus_assignment] ( identifier[self] , identifier[t] ): literal[string] identifier[self] . identifier[accu] . identifier[add] ( identifier[Term] ( literal[string] ,[ identifier[self] . identifier[name] , literal[string] + identifier[t] [ literal[int] ]+ literal[string] , literal[string] ]))
def p_notminus_assignment(self, t): """notminus_assignment : IDENT EQ NOTMINUS""" self.accu.add(Term('obs_vlabel', [self.name, 'gen("' + t[1] + '")', 'notMinus']))
def imbox(xy, w, h, angle=0.0, **kwargs): """ draw boundary box :param xy: start index xy (ji) :param w: width :param h: height :param angle: :param kwargs: :return: """ from matplotlib.patches import Rectangle return imbound(Rectangle, xy, w, h, angle, **kwargs)
def function[imbox, parameter[xy, w, h, angle]]: constant[ draw boundary box :param xy: start index xy (ji) :param w: width :param h: height :param angle: :param kwargs: :return: ] from relative_module[matplotlib.patches] import module[Rectangle] return[call[name[imbound], parameter[name[Rectangle], name[xy], name[w], name[h], name[angle]]]]
keyword[def] identifier[imbox] ( identifier[xy] , identifier[w] , identifier[h] , identifier[angle] = literal[int] ,** identifier[kwargs] ): literal[string] keyword[from] identifier[matplotlib] . identifier[patches] keyword[import] identifier[Rectangle] keyword[return] identifier[imbound] ( identifier[Rectangle] , identifier[xy] , identifier[w] , identifier[h] , identifier[angle] ,** identifier[kwargs] )
def imbox(xy, w, h, angle=0.0, **kwargs): """ draw boundary box :param xy: start index xy (ji) :param w: width :param h: height :param angle: :param kwargs: :return: """ from matplotlib.patches import Rectangle return imbound(Rectangle, xy, w, h, angle, **kwargs)
def twoline2rv(longstr1, longstr2, whichconst, afspc_mode=False): """Return a Satellite imported from two lines of TLE data. Provide the two TLE lines as strings `longstr1` and `longstr2`, and select which standard set of gravitational constants you want by providing `gravity_constants`: `sgp4.earth_gravity.wgs72` - Standard WGS 72 model `sgp4.earth_gravity.wgs84` - More recent WGS 84 model `sgp4.earth_gravity.wgs72old` - Legacy support for old SGP4 behavior Normally, computations are made using various recent improvements to the algorithm. If you want to turn some of these off and go back into "afspc" mode, then set `afspc_mode` to `True`. """ deg2rad = pi / 180.0; # 0.0174532925199433 xpdotp = 1440.0 / (2.0 *pi); # 229.1831180523293 tumin = whichconst.tumin satrec = Satellite() satrec.error = 0; satrec.whichconst = whichconst # Python extension: remembers its consts line = longstr1.rstrip() # try/except is not well supported by Numba if (len(line) >= 64 and line.startswith('1 ') and line[8] == ' ' and line[23] == '.' and line[32] == ' ' and line[34] == '.' and line[43] == ' ' and line[52] == ' ' and line[61] == ' ' and line[63] == ' '): _saved_satnum = satrec.satnum = int(line[2:7]) # classification = line[7] or 'U' # intldesg = line[9:17] two_digit_year = int(line[18:20]) satrec.epochdays = float(line[20:32]) satrec.ndot = float(line[33:43]) satrec.nddot = float(line[44] + '.' + line[45:50]) nexp = int(line[50:52]) satrec.bstar = float(line[53] + '.' + line[54:59]) ibexp = int(line[59:61]) # numb = int(line[62]) # elnum = int(line[64:68]) else: raise ValueError(error_message.format(1, LINE1, line)) line = longstr2.rstrip() if (len(line) >= 69 and line.startswith('2 ') and line[7] == ' ' and line[11] == '.' and line[16] == ' ' and line[20] == '.' and line[25] == ' ' and line[33] == ' ' and line[37] == '.' and line[42] == ' ' and line[46] == '.' and line[51] == ' '): satrec.satnum = int(line[2:7]) if _saved_satnum != satrec.satnum: raise ValueError('Object numbers in lines 1 and 2 do not match') satrec.inclo = float(line[8:16]) satrec.nodeo = float(line[17:25]) satrec.ecco = float('0.' + line[26:33].replace(' ', '0')) satrec.argpo = float(line[34:42]) satrec.mo = float(line[43:51]) satrec.no = float(line[52:63]) #revnum = line[63:68] #except (AssertionError, IndexError, ValueError): else: raise ValueError(error_message.format(2, LINE2, line)) # ---- find no, ndot, nddot ---- satrec.no = satrec.no / xpdotp; # rad/min satrec.nddot= satrec.nddot * pow(10.0, nexp); satrec.bstar= satrec.bstar * pow(10.0, ibexp); # ---- convert to sgp4 units ---- satrec.a = pow( satrec.no*tumin , (-2.0/3.0) ); satrec.ndot = satrec.ndot / (xpdotp*1440.0); # ? * minperday satrec.nddot= satrec.nddot / (xpdotp*1440.0*1440); # ---- find standard orbital elements ---- satrec.inclo = satrec.inclo * deg2rad; satrec.nodeo = satrec.nodeo * deg2rad; satrec.argpo = satrec.argpo * deg2rad; satrec.mo = satrec.mo * deg2rad; satrec.alta = satrec.a*(1.0 + satrec.ecco) - 1.0; satrec.altp = satrec.a*(1.0 - satrec.ecco) - 1.0; """ // ---------------------------------------------------------------- // find sgp4epoch time of element set // remember that sgp4 uses units of days from 0 jan 1950 (sgp4epoch) // and minutes from the epoch (time) // ---------------------------------------------------------------- // ---------------- temp fix for years from 1957-2056 ------------------- // --------- correct fix will occur when year is 4-digit in tle --------- """ if two_digit_year < 57: year = two_digit_year + 2000; else: year = two_digit_year + 1900; mon,day,hr,minute,sec = days2mdhms(year, satrec.epochdays); sec_whole, sec_fraction = divmod(sec, 1.0) satrec.epochyr = year satrec.jdsatepoch = jday(year,mon,day,hr,minute,sec); satrec.epoch = datetime(year, mon, day, hr, minute, int(sec_whole), int(sec_fraction * 1000000.0 // 1.0)) # ---------------- initialize the orbit at sgp4epoch ------------------- sgp4init(whichconst, afspc_mode, satrec.satnum, satrec.jdsatepoch-2433281.5, satrec.bstar, satrec.ecco, satrec.argpo, satrec.inclo, satrec.mo, satrec.no, satrec.nodeo, satrec) return satrec
def function[twoline2rv, parameter[longstr1, longstr2, whichconst, afspc_mode]]: constant[Return a Satellite imported from two lines of TLE data. Provide the two TLE lines as strings `longstr1` and `longstr2`, and select which standard set of gravitational constants you want by providing `gravity_constants`: `sgp4.earth_gravity.wgs72` - Standard WGS 72 model `sgp4.earth_gravity.wgs84` - More recent WGS 84 model `sgp4.earth_gravity.wgs72old` - Legacy support for old SGP4 behavior Normally, computations are made using various recent improvements to the algorithm. If you want to turn some of these off and go back into "afspc" mode, then set `afspc_mode` to `True`. ] variable[deg2rad] assign[=] binary_operation[name[pi] / constant[180.0]] variable[xpdotp] assign[=] binary_operation[constant[1440.0] / binary_operation[constant[2.0] * name[pi]]] variable[tumin] assign[=] name[whichconst].tumin variable[satrec] assign[=] call[name[Satellite], parameter[]] name[satrec].error assign[=] constant[0] name[satrec].whichconst assign[=] name[whichconst] variable[line] assign[=] call[name[longstr1].rstrip, parameter[]] if <ast.BoolOp object at 0x7da1b0e6c7f0> begin[:] variable[_saved_satnum] assign[=] call[name[int], parameter[call[name[line]][<ast.Slice object at 0x7da1b0cb94e0>]]] variable[two_digit_year] assign[=] call[name[int], parameter[call[name[line]][<ast.Slice object at 0x7da1b0cb9330>]]] name[satrec].epochdays assign[=] call[name[float], parameter[call[name[line]][<ast.Slice object at 0x7da1b0cb9030>]]] name[satrec].ndot assign[=] call[name[float], parameter[call[name[line]][<ast.Slice object at 0x7da1b0cb91b0>]]] name[satrec].nddot assign[=] call[name[float], parameter[binary_operation[binary_operation[call[name[line]][constant[44]] + constant[.]] + call[name[line]][<ast.Slice object at 0x7da1b0cb8b20>]]]] variable[nexp] assign[=] call[name[int], parameter[call[name[line]][<ast.Slice object at 0x7da1b0cb86d0>]]] name[satrec].bstar assign[=] call[name[float], parameter[binary_operation[binary_operation[call[name[line]][constant[53]] + constant[.]] + call[name[line]][<ast.Slice object at 0x7da1b0cb85e0>]]]] variable[ibexp] assign[=] call[name[int], parameter[call[name[line]][<ast.Slice object at 0x7da1b0cb8250>]]] variable[line] assign[=] call[name[longstr2].rstrip, parameter[]] if <ast.BoolOp object at 0x7da1b0cb8940> begin[:] name[satrec].satnum assign[=] call[name[int], parameter[call[name[line]][<ast.Slice object at 0x7da1b0cba440>]]] if compare[name[_saved_satnum] not_equal[!=] name[satrec].satnum] begin[:] <ast.Raise object at 0x7da1b0c34190> name[satrec].inclo assign[=] call[name[float], parameter[call[name[line]][<ast.Slice object at 0x7da1b0c343a0>]]] name[satrec].nodeo assign[=] call[name[float], parameter[call[name[line]][<ast.Slice object at 0x7da1b0c34580>]]] name[satrec].ecco assign[=] call[name[float], parameter[binary_operation[constant[0.] + call[call[name[line]][<ast.Slice object at 0x7da1b0c34820>].replace, parameter[constant[ ], constant[0]]]]]] name[satrec].argpo assign[=] call[name[float], parameter[call[name[line]][<ast.Slice object at 0x7da1b0c34a60>]]] name[satrec].mo assign[=] call[name[float], parameter[call[name[line]][<ast.Slice object at 0x7da1b0c34c40>]]] name[satrec].no assign[=] call[name[float], parameter[call[name[line]][<ast.Slice object at 0x7da1b0c34e20>]]] name[satrec].no assign[=] binary_operation[name[satrec].no / name[xpdotp]] name[satrec].nddot assign[=] binary_operation[name[satrec].nddot * call[name[pow], parameter[constant[10.0], name[nexp]]]] name[satrec].bstar assign[=] binary_operation[name[satrec].bstar * call[name[pow], parameter[constant[10.0], name[ibexp]]]] name[satrec].a assign[=] call[name[pow], parameter[binary_operation[name[satrec].no * name[tumin]], binary_operation[<ast.UnaryOp object at 0x7da1b0c36c80> / constant[3.0]]]] name[satrec].ndot assign[=] binary_operation[name[satrec].ndot / binary_operation[name[xpdotp] * constant[1440.0]]] name[satrec].nddot assign[=] binary_operation[name[satrec].nddot / binary_operation[binary_operation[name[xpdotp] * constant[1440.0]] * constant[1440]]] name[satrec].inclo assign[=] binary_operation[name[satrec].inclo * name[deg2rad]] name[satrec].nodeo assign[=] binary_operation[name[satrec].nodeo * name[deg2rad]] name[satrec].argpo assign[=] binary_operation[name[satrec].argpo * name[deg2rad]] name[satrec].mo assign[=] binary_operation[name[satrec].mo * name[deg2rad]] name[satrec].alta assign[=] binary_operation[binary_operation[name[satrec].a * binary_operation[constant[1.0] + name[satrec].ecco]] - constant[1.0]] name[satrec].altp assign[=] binary_operation[binary_operation[name[satrec].a * binary_operation[constant[1.0] - name[satrec].ecco]] - constant[1.0]] constant[ // ---------------------------------------------------------------- // find sgp4epoch time of element set // remember that sgp4 uses units of days from 0 jan 1950 (sgp4epoch) // and minutes from the epoch (time) // ---------------------------------------------------------------- // ---------------- temp fix for years from 1957-2056 ------------------- // --------- correct fix will occur when year is 4-digit in tle --------- ] if compare[name[two_digit_year] less[<] constant[57]] begin[:] variable[year] assign[=] binary_operation[name[two_digit_year] + constant[2000]] <ast.Tuple object at 0x7da1b0c35db0> assign[=] call[name[days2mdhms], parameter[name[year], name[satrec].epochdays]] <ast.Tuple object at 0x7da1b0c35b40> assign[=] call[name[divmod], parameter[name[sec], constant[1.0]]] name[satrec].epochyr assign[=] name[year] name[satrec].jdsatepoch assign[=] call[name[jday], parameter[name[year], name[mon], name[day], name[hr], name[minute], name[sec]]] name[satrec].epoch assign[=] call[name[datetime], parameter[name[year], name[mon], name[day], name[hr], name[minute], call[name[int], parameter[name[sec_whole]]], call[name[int], parameter[binary_operation[binary_operation[name[sec_fraction] * constant[1000000.0]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[1.0]]]]]] call[name[sgp4init], parameter[name[whichconst], name[afspc_mode], name[satrec].satnum, binary_operation[name[satrec].jdsatepoch - constant[2433281.5]], name[satrec].bstar, name[satrec].ecco, name[satrec].argpo, name[satrec].inclo, name[satrec].mo, name[satrec].no, name[satrec].nodeo, name[satrec]]] return[name[satrec]]
keyword[def] identifier[twoline2rv] ( identifier[longstr1] , identifier[longstr2] , identifier[whichconst] , identifier[afspc_mode] = keyword[False] ): literal[string] identifier[deg2rad] = identifier[pi] / literal[int] ; identifier[xpdotp] = literal[int] /( literal[int] * identifier[pi] ); identifier[tumin] = identifier[whichconst] . identifier[tumin] identifier[satrec] = identifier[Satellite] () identifier[satrec] . identifier[error] = literal[int] ; identifier[satrec] . identifier[whichconst] = identifier[whichconst] identifier[line] = identifier[longstr1] . identifier[rstrip] () keyword[if] ( identifier[len] ( identifier[line] )>= literal[int] keyword[and] identifier[line] . identifier[startswith] ( literal[string] ) keyword[and] identifier[line] [ literal[int] ]== literal[string] keyword[and] identifier[line] [ literal[int] ]== literal[string] keyword[and] identifier[line] [ literal[int] ]== literal[string] keyword[and] identifier[line] [ literal[int] ]== literal[string] keyword[and] identifier[line] [ literal[int] ]== literal[string] keyword[and] identifier[line] [ literal[int] ]== literal[string] keyword[and] identifier[line] [ literal[int] ]== literal[string] keyword[and] identifier[line] [ literal[int] ]== literal[string] ): identifier[_saved_satnum] = identifier[satrec] . identifier[satnum] = identifier[int] ( identifier[line] [ literal[int] : literal[int] ]) identifier[two_digit_year] = identifier[int] ( identifier[line] [ literal[int] : literal[int] ]) identifier[satrec] . identifier[epochdays] = identifier[float] ( identifier[line] [ literal[int] : literal[int] ]) identifier[satrec] . identifier[ndot] = identifier[float] ( identifier[line] [ literal[int] : literal[int] ]) identifier[satrec] . identifier[nddot] = identifier[float] ( identifier[line] [ literal[int] ]+ literal[string] + identifier[line] [ literal[int] : literal[int] ]) identifier[nexp] = identifier[int] ( identifier[line] [ literal[int] : literal[int] ]) identifier[satrec] . identifier[bstar] = identifier[float] ( identifier[line] [ literal[int] ]+ literal[string] + identifier[line] [ literal[int] : literal[int] ]) identifier[ibexp] = identifier[int] ( identifier[line] [ literal[int] : literal[int] ]) keyword[else] : keyword[raise] identifier[ValueError] ( identifier[error_message] . identifier[format] ( literal[int] , identifier[LINE1] , identifier[line] )) identifier[line] = identifier[longstr2] . identifier[rstrip] () keyword[if] ( identifier[len] ( identifier[line] )>= literal[int] keyword[and] identifier[line] . identifier[startswith] ( literal[string] ) keyword[and] identifier[line] [ literal[int] ]== literal[string] keyword[and] identifier[line] [ literal[int] ]== literal[string] keyword[and] identifier[line] [ literal[int] ]== literal[string] keyword[and] identifier[line] [ literal[int] ]== literal[string] keyword[and] identifier[line] [ literal[int] ]== literal[string] keyword[and] identifier[line] [ literal[int] ]== literal[string] keyword[and] identifier[line] [ literal[int] ]== literal[string] keyword[and] identifier[line] [ literal[int] ]== literal[string] keyword[and] identifier[line] [ literal[int] ]== literal[string] keyword[and] identifier[line] [ literal[int] ]== literal[string] ): identifier[satrec] . identifier[satnum] = identifier[int] ( identifier[line] [ literal[int] : literal[int] ]) keyword[if] identifier[_saved_satnum] != identifier[satrec] . identifier[satnum] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[satrec] . identifier[inclo] = identifier[float] ( identifier[line] [ literal[int] : literal[int] ]) identifier[satrec] . identifier[nodeo] = identifier[float] ( identifier[line] [ literal[int] : literal[int] ]) identifier[satrec] . identifier[ecco] = identifier[float] ( literal[string] + identifier[line] [ literal[int] : literal[int] ]. identifier[replace] ( literal[string] , literal[string] )) identifier[satrec] . identifier[argpo] = identifier[float] ( identifier[line] [ literal[int] : literal[int] ]) identifier[satrec] . identifier[mo] = identifier[float] ( identifier[line] [ literal[int] : literal[int] ]) identifier[satrec] . identifier[no] = identifier[float] ( identifier[line] [ literal[int] : literal[int] ]) keyword[else] : keyword[raise] identifier[ValueError] ( identifier[error_message] . identifier[format] ( literal[int] , identifier[LINE2] , identifier[line] )) identifier[satrec] . identifier[no] = identifier[satrec] . identifier[no] / identifier[xpdotp] ; identifier[satrec] . identifier[nddot] = identifier[satrec] . identifier[nddot] * identifier[pow] ( literal[int] , identifier[nexp] ); identifier[satrec] . identifier[bstar] = identifier[satrec] . identifier[bstar] * identifier[pow] ( literal[int] , identifier[ibexp] ); identifier[satrec] . identifier[a] = identifier[pow] ( identifier[satrec] . identifier[no] * identifier[tumin] ,(- literal[int] / literal[int] )); identifier[satrec] . identifier[ndot] = identifier[satrec] . identifier[ndot] /( identifier[xpdotp] * literal[int] ); identifier[satrec] . identifier[nddot] = identifier[satrec] . identifier[nddot] /( identifier[xpdotp] * literal[int] * literal[int] ); identifier[satrec] . identifier[inclo] = identifier[satrec] . identifier[inclo] * identifier[deg2rad] ; identifier[satrec] . identifier[nodeo] = identifier[satrec] . identifier[nodeo] * identifier[deg2rad] ; identifier[satrec] . identifier[argpo] = identifier[satrec] . identifier[argpo] * identifier[deg2rad] ; identifier[satrec] . identifier[mo] = identifier[satrec] . identifier[mo] * identifier[deg2rad] ; identifier[satrec] . identifier[alta] = identifier[satrec] . identifier[a] *( literal[int] + identifier[satrec] . identifier[ecco] )- literal[int] ; identifier[satrec] . identifier[altp] = identifier[satrec] . identifier[a] *( literal[int] - identifier[satrec] . identifier[ecco] )- literal[int] ; literal[string] keyword[if] identifier[two_digit_year] < literal[int] : identifier[year] = identifier[two_digit_year] + literal[int] ; keyword[else] : identifier[year] = identifier[two_digit_year] + literal[int] ; identifier[mon] , identifier[day] , identifier[hr] , identifier[minute] , identifier[sec] = identifier[days2mdhms] ( identifier[year] , identifier[satrec] . identifier[epochdays] ); identifier[sec_whole] , identifier[sec_fraction] = identifier[divmod] ( identifier[sec] , literal[int] ) identifier[satrec] . identifier[epochyr] = identifier[year] identifier[satrec] . identifier[jdsatepoch] = identifier[jday] ( identifier[year] , identifier[mon] , identifier[day] , identifier[hr] , identifier[minute] , identifier[sec] ); identifier[satrec] . identifier[epoch] = identifier[datetime] ( identifier[year] , identifier[mon] , identifier[day] , identifier[hr] , identifier[minute] , identifier[int] ( identifier[sec_whole] ), identifier[int] ( identifier[sec_fraction] * literal[int] // literal[int] )) identifier[sgp4init] ( identifier[whichconst] , identifier[afspc_mode] , identifier[satrec] . identifier[satnum] , identifier[satrec] . identifier[jdsatepoch] - literal[int] , identifier[satrec] . identifier[bstar] , identifier[satrec] . identifier[ecco] , identifier[satrec] . identifier[argpo] , identifier[satrec] . identifier[inclo] , identifier[satrec] . identifier[mo] , identifier[satrec] . identifier[no] , identifier[satrec] . identifier[nodeo] , identifier[satrec] ) keyword[return] identifier[satrec]
def twoline2rv(longstr1, longstr2, whichconst, afspc_mode=False): """Return a Satellite imported from two lines of TLE data. Provide the two TLE lines as strings `longstr1` and `longstr2`, and select which standard set of gravitational constants you want by providing `gravity_constants`: `sgp4.earth_gravity.wgs72` - Standard WGS 72 model `sgp4.earth_gravity.wgs84` - More recent WGS 84 model `sgp4.earth_gravity.wgs72old` - Legacy support for old SGP4 behavior Normally, computations are made using various recent improvements to the algorithm. If you want to turn some of these off and go back into "afspc" mode, then set `afspc_mode` to `True`. """ deg2rad = pi / 180.0 # 0.0174532925199433 xpdotp = 1440.0 / (2.0 * pi) # 229.1831180523293 tumin = whichconst.tumin satrec = Satellite() satrec.error = 0 satrec.whichconst = whichconst # Python extension: remembers its consts line = longstr1.rstrip() # try/except is not well supported by Numba if len(line) >= 64 and line.startswith('1 ') and (line[8] == ' ') and (line[23] == '.') and (line[32] == ' ') and (line[34] == '.') and (line[43] == ' ') and (line[52] == ' ') and (line[61] == ' ') and (line[63] == ' '): _saved_satnum = satrec.satnum = int(line[2:7]) # classification = line[7] or 'U' # intldesg = line[9:17] two_digit_year = int(line[18:20]) satrec.epochdays = float(line[20:32]) satrec.ndot = float(line[33:43]) satrec.nddot = float(line[44] + '.' + line[45:50]) nexp = int(line[50:52]) satrec.bstar = float(line[53] + '.' + line[54:59]) ibexp = int(line[59:61]) # depends on [control=['if'], data=[]] else: # numb = int(line[62]) # elnum = int(line[64:68]) raise ValueError(error_message.format(1, LINE1, line)) line = longstr2.rstrip() if len(line) >= 69 and line.startswith('2 ') and (line[7] == ' ') and (line[11] == '.') and (line[16] == ' ') and (line[20] == '.') and (line[25] == ' ') and (line[33] == ' ') and (line[37] == '.') and (line[42] == ' ') and (line[46] == '.') and (line[51] == ' '): satrec.satnum = int(line[2:7]) if _saved_satnum != satrec.satnum: raise ValueError('Object numbers in lines 1 and 2 do not match') # depends on [control=['if'], data=[]] satrec.inclo = float(line[8:16]) satrec.nodeo = float(line[17:25]) satrec.ecco = float('0.' + line[26:33].replace(' ', '0')) satrec.argpo = float(line[34:42]) satrec.mo = float(line[43:51]) satrec.no = float(line[52:63]) # depends on [control=['if'], data=[]] else: #revnum = line[63:68] #except (AssertionError, IndexError, ValueError): raise ValueError(error_message.format(2, LINE2, line)) # ---- find no, ndot, nddot ---- satrec.no = satrec.no / xpdotp # rad/min satrec.nddot = satrec.nddot * pow(10.0, nexp) satrec.bstar = satrec.bstar * pow(10.0, ibexp) # ---- convert to sgp4 units ---- satrec.a = pow(satrec.no * tumin, -2.0 / 3.0) satrec.ndot = satrec.ndot / (xpdotp * 1440.0) # ? * minperday satrec.nddot = satrec.nddot / (xpdotp * 1440.0 * 1440) # ---- find standard orbital elements ---- satrec.inclo = satrec.inclo * deg2rad satrec.nodeo = satrec.nodeo * deg2rad satrec.argpo = satrec.argpo * deg2rad satrec.mo = satrec.mo * deg2rad satrec.alta = satrec.a * (1.0 + satrec.ecco) - 1.0 satrec.altp = satrec.a * (1.0 - satrec.ecco) - 1.0 '\n // ----------------------------------------------------------------\n // find sgp4epoch time of element set\n // remember that sgp4 uses units of days from 0 jan 1950 (sgp4epoch)\n // and minutes from the epoch (time)\n // ----------------------------------------------------------------\n\n // ---------------- temp fix for years from 1957-2056 -------------------\n // --------- correct fix will occur when year is 4-digit in tle ---------\n ' if two_digit_year < 57: year = two_digit_year + 2000 # depends on [control=['if'], data=['two_digit_year']] else: year = two_digit_year + 1900 (mon, day, hr, minute, sec) = days2mdhms(year, satrec.epochdays) (sec_whole, sec_fraction) = divmod(sec, 1.0) satrec.epochyr = year satrec.jdsatepoch = jday(year, mon, day, hr, minute, sec) satrec.epoch = datetime(year, mon, day, hr, minute, int(sec_whole), int(sec_fraction * 1000000.0 // 1.0)) # ---------------- initialize the orbit at sgp4epoch ------------------- sgp4init(whichconst, afspc_mode, satrec.satnum, satrec.jdsatepoch - 2433281.5, satrec.bstar, satrec.ecco, satrec.argpo, satrec.inclo, satrec.mo, satrec.no, satrec.nodeo, satrec) return satrec
def _read_section(self): """Read and return an entire section""" lines = [self._last[self._last.find(":")+1:]] self._last = self._f.readline() while len(self._last) > 0 and len(self._last[0].strip()) == 0: lines.append(self._last) self._last = self._f.readline() return lines
def function[_read_section, parameter[self]]: constant[Read and return an entire section] variable[lines] assign[=] list[[<ast.Subscript object at 0x7da20c6aa110>]] name[self]._last assign[=] call[name[self]._f.readline, parameter[]] while <ast.BoolOp object at 0x7da20c6ab430> begin[:] call[name[lines].append, parameter[name[self]._last]] name[self]._last assign[=] call[name[self]._f.readline, parameter[]] return[name[lines]]
keyword[def] identifier[_read_section] ( identifier[self] ): literal[string] identifier[lines] =[ identifier[self] . identifier[_last] [ identifier[self] . identifier[_last] . identifier[find] ( literal[string] )+ literal[int] :]] identifier[self] . identifier[_last] = identifier[self] . identifier[_f] . identifier[readline] () keyword[while] identifier[len] ( identifier[self] . identifier[_last] )> literal[int] keyword[and] identifier[len] ( identifier[self] . identifier[_last] [ literal[int] ]. identifier[strip] ())== literal[int] : identifier[lines] . identifier[append] ( identifier[self] . identifier[_last] ) identifier[self] . identifier[_last] = identifier[self] . identifier[_f] . identifier[readline] () keyword[return] identifier[lines]
def _read_section(self): """Read and return an entire section""" lines = [self._last[self._last.find(':') + 1:]] self._last = self._f.readline() while len(self._last) > 0 and len(self._last[0].strip()) == 0: lines.append(self._last) self._last = self._f.readline() # depends on [control=['while'], data=[]] return lines
def update(self, _values=None, **values): """ Update a record in the database :param values: The values of the update :type values: dict :return: The number of records affected :rtype: int """ if _values is not None: values.update(_values) values = OrderedDict(sorted(values.items())) bindings = list(values.values()) + self.get_bindings() sql = self._grammar.compile_update(self, values) return self._connection.update(sql, self._clean_bindings(bindings))
def function[update, parameter[self, _values]]: constant[ Update a record in the database :param values: The values of the update :type values: dict :return: The number of records affected :rtype: int ] if compare[name[_values] is_not constant[None]] begin[:] call[name[values].update, parameter[name[_values]]] variable[values] assign[=] call[name[OrderedDict], parameter[call[name[sorted], parameter[call[name[values].items, parameter[]]]]]] variable[bindings] assign[=] binary_operation[call[name[list], parameter[call[name[values].values, parameter[]]]] + call[name[self].get_bindings, parameter[]]] variable[sql] assign[=] call[name[self]._grammar.compile_update, parameter[name[self], name[values]]] return[call[name[self]._connection.update, parameter[name[sql], call[name[self]._clean_bindings, parameter[name[bindings]]]]]]
keyword[def] identifier[update] ( identifier[self] , identifier[_values] = keyword[None] ,** identifier[values] ): literal[string] keyword[if] identifier[_values] keyword[is] keyword[not] keyword[None] : identifier[values] . identifier[update] ( identifier[_values] ) identifier[values] = identifier[OrderedDict] ( identifier[sorted] ( identifier[values] . identifier[items] ())) identifier[bindings] = identifier[list] ( identifier[values] . identifier[values] ())+ identifier[self] . identifier[get_bindings] () identifier[sql] = identifier[self] . identifier[_grammar] . identifier[compile_update] ( identifier[self] , identifier[values] ) keyword[return] identifier[self] . identifier[_connection] . identifier[update] ( identifier[sql] , identifier[self] . identifier[_clean_bindings] ( identifier[bindings] ))
def update(self, _values=None, **values): """ Update a record in the database :param values: The values of the update :type values: dict :return: The number of records affected :rtype: int """ if _values is not None: values.update(_values) # depends on [control=['if'], data=['_values']] values = OrderedDict(sorted(values.items())) bindings = list(values.values()) + self.get_bindings() sql = self._grammar.compile_update(self, values) return self._connection.update(sql, self._clean_bindings(bindings))
def _add_header_domains_xml(self, document): """ Generates the XML elements for allowed header domains. """ for domain, attrs in self.header_domains.items(): header_element = document.createElement( 'allow-http-request-headers-from' ) header_element.setAttribute('domain', domain) header_element.setAttribute('headers', ','.join(attrs['headers'])) if not attrs['secure']: header_element.setAttribute('secure', 'false') document.documentElement.appendChild(header_element)
def function[_add_header_domains_xml, parameter[self, document]]: constant[ Generates the XML elements for allowed header domains. ] for taget[tuple[[<ast.Name object at 0x7da1b176ab00>, <ast.Name object at 0x7da1b17698a0>]]] in starred[call[name[self].header_domains.items, parameter[]]] begin[:] variable[header_element] assign[=] call[name[document].createElement, parameter[constant[allow-http-request-headers-from]]] call[name[header_element].setAttribute, parameter[constant[domain], name[domain]]] call[name[header_element].setAttribute, parameter[constant[headers], call[constant[,].join, parameter[call[name[attrs]][constant[headers]]]]]] if <ast.UnaryOp object at 0x7da1b176b430> begin[:] call[name[header_element].setAttribute, parameter[constant[secure], constant[false]]] call[name[document].documentElement.appendChild, parameter[name[header_element]]]
keyword[def] identifier[_add_header_domains_xml] ( identifier[self] , identifier[document] ): literal[string] keyword[for] identifier[domain] , identifier[attrs] keyword[in] identifier[self] . identifier[header_domains] . identifier[items] (): identifier[header_element] = identifier[document] . identifier[createElement] ( literal[string] ) identifier[header_element] . identifier[setAttribute] ( literal[string] , identifier[domain] ) identifier[header_element] . identifier[setAttribute] ( literal[string] , literal[string] . identifier[join] ( identifier[attrs] [ literal[string] ])) keyword[if] keyword[not] identifier[attrs] [ literal[string] ]: identifier[header_element] . identifier[setAttribute] ( literal[string] , literal[string] ) identifier[document] . identifier[documentElement] . identifier[appendChild] ( identifier[header_element] )
def _add_header_domains_xml(self, document): """ Generates the XML elements for allowed header domains. """ for (domain, attrs) in self.header_domains.items(): header_element = document.createElement('allow-http-request-headers-from') header_element.setAttribute('domain', domain) header_element.setAttribute('headers', ','.join(attrs['headers'])) if not attrs['secure']: header_element.setAttribute('secure', 'false') # depends on [control=['if'], data=[]] document.documentElement.appendChild(header_element) # depends on [control=['for'], data=[]]
def __build_parser_for_fileobject_and_desiredtype(self, obj_on_filesystem: PersistedObject, object_typ: Type[T], logger: Logger = None) -> Parser: """ Builds from the registry, a parser to parse object obj_on_filesystem as an object of type object_type. To do that, it iterates through all registered parsers in the list in reverse order (last inserted first), and checks if they support the provided object format (single or multifile) and type. If several parsers match, it returns a cascadingparser that will try them in order. :param obj_on_filesystem: :param object_typ: :param logger: :return: """ # first remove any non-generic customization object_type = get_base_generic_type(object_typ) # find all matching parsers for this matching, no_type_match_but_ext_match, no_ext_match_but_type_match, no_match = \ self.find_all_matching_parsers(strict=self.is_strict, desired_type=object_type, required_ext=obj_on_filesystem.ext) matching_parsers = matching[0] + matching[1] + matching[2] if len(matching_parsers) == 0: # No match. Do we have a close match ? (correct type, but not correct extension ?) if len(no_ext_match_but_type_match) > 0: raise NoParserFoundForObjectExt.create(obj_on_filesystem, object_type, set([ext_ for ext_set in [p.supported_exts for p in no_ext_match_but_type_match] for ext_ in ext_set])) else: # no, no match at all raise NoParserFoundForObjectType.create(obj_on_filesystem, object_type, set([typ_ for typ_set in [p.supported_types for p in no_type_match_but_ext_match] for typ_ in typ_set])) elif len(matching_parsers) == 1: # return the match directly return matching_parsers[0] else: # return a cascade of all parsers, in reverse order (since last is our preferred one) # print('----- WARNING : Found several parsers able to parse this item. Combining them into a cascade.') return CascadingParser(list(reversed(matching_parsers)))
def function[__build_parser_for_fileobject_and_desiredtype, parameter[self, obj_on_filesystem, object_typ, logger]]: constant[ Builds from the registry, a parser to parse object obj_on_filesystem as an object of type object_type. To do that, it iterates through all registered parsers in the list in reverse order (last inserted first), and checks if they support the provided object format (single or multifile) and type. If several parsers match, it returns a cascadingparser that will try them in order. :param obj_on_filesystem: :param object_typ: :param logger: :return: ] variable[object_type] assign[=] call[name[get_base_generic_type], parameter[name[object_typ]]] <ast.Tuple object at 0x7da2046239a0> assign[=] call[name[self].find_all_matching_parsers, parameter[]] variable[matching_parsers] assign[=] binary_operation[binary_operation[call[name[matching]][constant[0]] + call[name[matching]][constant[1]]] + call[name[matching]][constant[2]]] if compare[call[name[len], parameter[name[matching_parsers]]] equal[==] constant[0]] begin[:] if compare[call[name[len], parameter[name[no_ext_match_but_type_match]]] greater[>] constant[0]] begin[:] <ast.Raise object at 0x7da2046202b0>
keyword[def] identifier[__build_parser_for_fileobject_and_desiredtype] ( identifier[self] , identifier[obj_on_filesystem] : identifier[PersistedObject] , identifier[object_typ] : identifier[Type] [ identifier[T] ], identifier[logger] : identifier[Logger] = keyword[None] )-> identifier[Parser] : literal[string] identifier[object_type] = identifier[get_base_generic_type] ( identifier[object_typ] ) identifier[matching] , identifier[no_type_match_but_ext_match] , identifier[no_ext_match_but_type_match] , identifier[no_match] = identifier[self] . identifier[find_all_matching_parsers] ( identifier[strict] = identifier[self] . identifier[is_strict] , identifier[desired_type] = identifier[object_type] , identifier[required_ext] = identifier[obj_on_filesystem] . identifier[ext] ) identifier[matching_parsers] = identifier[matching] [ literal[int] ]+ identifier[matching] [ literal[int] ]+ identifier[matching] [ literal[int] ] keyword[if] identifier[len] ( identifier[matching_parsers] )== literal[int] : keyword[if] identifier[len] ( identifier[no_ext_match_but_type_match] )> literal[int] : keyword[raise] identifier[NoParserFoundForObjectExt] . identifier[create] ( identifier[obj_on_filesystem] , identifier[object_type] , identifier[set] ([ identifier[ext_] keyword[for] identifier[ext_set] keyword[in] [ identifier[p] . identifier[supported_exts] keyword[for] identifier[p] keyword[in] identifier[no_ext_match_but_type_match] ] keyword[for] identifier[ext_] keyword[in] identifier[ext_set] ])) keyword[else] : keyword[raise] identifier[NoParserFoundForObjectType] . identifier[create] ( identifier[obj_on_filesystem] , identifier[object_type] , identifier[set] ([ identifier[typ_] keyword[for] identifier[typ_set] keyword[in] [ identifier[p] . identifier[supported_types] keyword[for] identifier[p] keyword[in] identifier[no_type_match_but_ext_match] ] keyword[for] identifier[typ_] keyword[in] identifier[typ_set] ])) keyword[elif] identifier[len] ( identifier[matching_parsers] )== literal[int] : keyword[return] identifier[matching_parsers] [ literal[int] ] keyword[else] : keyword[return] identifier[CascadingParser] ( identifier[list] ( identifier[reversed] ( identifier[matching_parsers] )))
def __build_parser_for_fileobject_and_desiredtype(self, obj_on_filesystem: PersistedObject, object_typ: Type[T], logger: Logger=None) -> Parser: """ Builds from the registry, a parser to parse object obj_on_filesystem as an object of type object_type. To do that, it iterates through all registered parsers in the list in reverse order (last inserted first), and checks if they support the provided object format (single or multifile) and type. If several parsers match, it returns a cascadingparser that will try them in order. :param obj_on_filesystem: :param object_typ: :param logger: :return: """ # first remove any non-generic customization object_type = get_base_generic_type(object_typ) # find all matching parsers for this (matching, no_type_match_but_ext_match, no_ext_match_but_type_match, no_match) = self.find_all_matching_parsers(strict=self.is_strict, desired_type=object_type, required_ext=obj_on_filesystem.ext) matching_parsers = matching[0] + matching[1] + matching[2] if len(matching_parsers) == 0: # No match. Do we have a close match ? (correct type, but not correct extension ?) if len(no_ext_match_but_type_match) > 0: raise NoParserFoundForObjectExt.create(obj_on_filesystem, object_type, set([ext_ for ext_set in [p.supported_exts for p in no_ext_match_but_type_match] for ext_ in ext_set])) # depends on [control=['if'], data=[]] else: # no, no match at all raise NoParserFoundForObjectType.create(obj_on_filesystem, object_type, set([typ_ for typ_set in [p.supported_types for p in no_type_match_but_ext_match] for typ_ in typ_set])) # depends on [control=['if'], data=[]] elif len(matching_parsers) == 1: # return the match directly return matching_parsers[0] # depends on [control=['if'], data=[]] else: # return a cascade of all parsers, in reverse order (since last is our preferred one) # print('----- WARNING : Found several parsers able to parse this item. Combining them into a cascade.') return CascadingParser(list(reversed(matching_parsers)))
def from_file(filename, use_cores=True, thresh=1.e-4): """ Reads an xr-formatted file to create an Xr object. Args: filename (str): name of file to read from. use_cores (bool): use core positions and discard shell positions if set to True (default). Otherwise, use shell positions and discard core positions. thresh (float): relative threshold for consistency check between cell parameters (lengths and angles) from header information and cell vectors, respectively. Returns: xr (Xr): Xr object corresponding to the input file. """ with zopen(filename, "rt") as f: return Xr.from_string( f.read(), use_cores=use_cores, thresh=thresh)
def function[from_file, parameter[filename, use_cores, thresh]]: constant[ Reads an xr-formatted file to create an Xr object. Args: filename (str): name of file to read from. use_cores (bool): use core positions and discard shell positions if set to True (default). Otherwise, use shell positions and discard core positions. thresh (float): relative threshold for consistency check between cell parameters (lengths and angles) from header information and cell vectors, respectively. Returns: xr (Xr): Xr object corresponding to the input file. ] with call[name[zopen], parameter[name[filename], constant[rt]]] begin[:] return[call[name[Xr].from_string, parameter[call[name[f].read, parameter[]]]]]
keyword[def] identifier[from_file] ( identifier[filename] , identifier[use_cores] = keyword[True] , identifier[thresh] = literal[int] ): literal[string] keyword[with] identifier[zopen] ( identifier[filename] , literal[string] ) keyword[as] identifier[f] : keyword[return] identifier[Xr] . identifier[from_string] ( identifier[f] . identifier[read] (), identifier[use_cores] = identifier[use_cores] , identifier[thresh] = identifier[thresh] )
def from_file(filename, use_cores=True, thresh=0.0001): """ Reads an xr-formatted file to create an Xr object. Args: filename (str): name of file to read from. use_cores (bool): use core positions and discard shell positions if set to True (default). Otherwise, use shell positions and discard core positions. thresh (float): relative threshold for consistency check between cell parameters (lengths and angles) from header information and cell vectors, respectively. Returns: xr (Xr): Xr object corresponding to the input file. """ with zopen(filename, 'rt') as f: return Xr.from_string(f.read(), use_cores=use_cores, thresh=thresh) # depends on [control=['with'], data=['f']]
def _exec_info(self): """ Caching wrapper around client.exec_inspect """ if self._info is None: self._info = self.client.exec_inspect(self.exec_id) return self._info
def function[_exec_info, parameter[self]]: constant[ Caching wrapper around client.exec_inspect ] if compare[name[self]._info is constant[None]] begin[:] name[self]._info assign[=] call[name[self].client.exec_inspect, parameter[name[self].exec_id]] return[name[self]._info]
keyword[def] identifier[_exec_info] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_info] keyword[is] keyword[None] : identifier[self] . identifier[_info] = identifier[self] . identifier[client] . identifier[exec_inspect] ( identifier[self] . identifier[exec_id] ) keyword[return] identifier[self] . identifier[_info]
def _exec_info(self): """ Caching wrapper around client.exec_inspect """ if self._info is None: self._info = self.client.exec_inspect(self.exec_id) # depends on [control=['if'], data=[]] return self._info
def calculate_size(name, thread_id): """ Calculates the request payload size""" data_size = 0 data_size += calculate_size_str(name) data_size += LONG_SIZE_IN_BYTES return data_size
def function[calculate_size, parameter[name, thread_id]]: constant[ Calculates the request payload size] variable[data_size] assign[=] constant[0] <ast.AugAssign object at 0x7da1b26ad960> <ast.AugAssign object at 0x7da1b26ac310> return[name[data_size]]
keyword[def] identifier[calculate_size] ( identifier[name] , identifier[thread_id] ): literal[string] identifier[data_size] = literal[int] identifier[data_size] += identifier[calculate_size_str] ( identifier[name] ) identifier[data_size] += identifier[LONG_SIZE_IN_BYTES] keyword[return] identifier[data_size]
def calculate_size(name, thread_id): """ Calculates the request payload size""" data_size = 0 data_size += calculate_size_str(name) data_size += LONG_SIZE_IN_BYTES return data_size
def GetTypeManager(self): """ Get dynamic type manager """ dynTypeMgr = None if self.hostSystem: try: dynTypeMgr = self.hostSystem.RetrieveDynamicTypeManager() except vmodl.fault.MethodNotFound as err: pass if not dynTypeMgr: # Older host not support RetrieveDynamicTypeManager cmdlineTypesMoId = "ha-dynamic-type-manager" dynTypeMgr = vmodl.reflect.DynamicTypeManager(cmdlineTypesMoId, self.stub) return dynTypeMgr
def function[GetTypeManager, parameter[self]]: constant[ Get dynamic type manager ] variable[dynTypeMgr] assign[=] constant[None] if name[self].hostSystem begin[:] <ast.Try object at 0x7da18ede7880> if <ast.UnaryOp object at 0x7da18ede7cd0> begin[:] variable[cmdlineTypesMoId] assign[=] constant[ha-dynamic-type-manager] variable[dynTypeMgr] assign[=] call[name[vmodl].reflect.DynamicTypeManager, parameter[name[cmdlineTypesMoId], name[self].stub]] return[name[dynTypeMgr]]
keyword[def] identifier[GetTypeManager] ( identifier[self] ): literal[string] identifier[dynTypeMgr] = keyword[None] keyword[if] identifier[self] . identifier[hostSystem] : keyword[try] : identifier[dynTypeMgr] = identifier[self] . identifier[hostSystem] . identifier[RetrieveDynamicTypeManager] () keyword[except] identifier[vmodl] . identifier[fault] . identifier[MethodNotFound] keyword[as] identifier[err] : keyword[pass] keyword[if] keyword[not] identifier[dynTypeMgr] : identifier[cmdlineTypesMoId] = literal[string] identifier[dynTypeMgr] = identifier[vmodl] . identifier[reflect] . identifier[DynamicTypeManager] ( identifier[cmdlineTypesMoId] , identifier[self] . identifier[stub] ) keyword[return] identifier[dynTypeMgr]
def GetTypeManager(self): """ Get dynamic type manager """ dynTypeMgr = None if self.hostSystem: try: dynTypeMgr = self.hostSystem.RetrieveDynamicTypeManager() # depends on [control=['try'], data=[]] except vmodl.fault.MethodNotFound as err: pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] if not dynTypeMgr: # Older host not support RetrieveDynamicTypeManager cmdlineTypesMoId = 'ha-dynamic-type-manager' dynTypeMgr = vmodl.reflect.DynamicTypeManager(cmdlineTypesMoId, self.stub) # depends on [control=['if'], data=[]] return dynTypeMgr
def can_user_approve_this_page(self, user): """Check if a user can approve this page.""" self.ensure_one() # if it's not required, anyone can approve if not self.is_approval_required: return True # if user belongs to 'Knowledge / Manager', he can approve anything if user.has_group('document_page.group_document_manager'): return True # to approve, user must have approver rights if not user.has_group( 'document_page_approval.group_document_approver_user'): return False # if there aren't any approver_groups_defined, user can approve if not self.approver_group_ids: return True # to approve, user must belong to any of the approver groups return len(user.groups_id & self.approver_group_ids) > 0
def function[can_user_approve_this_page, parameter[self, user]]: constant[Check if a user can approve this page.] call[name[self].ensure_one, parameter[]] if <ast.UnaryOp object at 0x7da18f58e980> begin[:] return[constant[True]] if call[name[user].has_group, parameter[constant[document_page.group_document_manager]]] begin[:] return[constant[True]] if <ast.UnaryOp object at 0x7da18f58dff0> begin[:] return[constant[False]] if <ast.UnaryOp object at 0x7da18f58ee00> begin[:] return[constant[True]] return[compare[call[name[len], parameter[binary_operation[name[user].groups_id <ast.BitAnd object at 0x7da2590d6b60> name[self].approver_group_ids]]] greater[>] constant[0]]]
keyword[def] identifier[can_user_approve_this_page] ( identifier[self] , identifier[user] ): literal[string] identifier[self] . identifier[ensure_one] () keyword[if] keyword[not] identifier[self] . identifier[is_approval_required] : keyword[return] keyword[True] keyword[if] identifier[user] . identifier[has_group] ( literal[string] ): keyword[return] keyword[True] keyword[if] keyword[not] identifier[user] . identifier[has_group] ( literal[string] ): keyword[return] keyword[False] keyword[if] keyword[not] identifier[self] . identifier[approver_group_ids] : keyword[return] keyword[True] keyword[return] identifier[len] ( identifier[user] . identifier[groups_id] & identifier[self] . identifier[approver_group_ids] )> literal[int]
def can_user_approve_this_page(self, user): """Check if a user can approve this page.""" self.ensure_one() # if it's not required, anyone can approve if not self.is_approval_required: return True # depends on [control=['if'], data=[]] # if user belongs to 'Knowledge / Manager', he can approve anything if user.has_group('document_page.group_document_manager'): return True # depends on [control=['if'], data=[]] # to approve, user must have approver rights if not user.has_group('document_page_approval.group_document_approver_user'): return False # depends on [control=['if'], data=[]] # if there aren't any approver_groups_defined, user can approve if not self.approver_group_ids: return True # depends on [control=['if'], data=[]] # to approve, user must belong to any of the approver groups return len(user.groups_id & self.approver_group_ids) > 0
def ensure_str(s): r""" Ensure that s is a str and not a bytes (.decode() if necessary) >>> ensure_str(b"I'm 2. When I grow up I want to be a str!") "I'm 2. When I grow up I want to be a str!" >>> ensure_str(42) '42' """ try: return s.decode() except AttributeError: if isinstance(s, str): return s return repr(s)
def function[ensure_str, parameter[s]]: constant[ Ensure that s is a str and not a bytes (.decode() if necessary) >>> ensure_str(b"I'm 2. When I grow up I want to be a str!") "I'm 2. When I grow up I want to be a str!" >>> ensure_str(42) '42' ] <ast.Try object at 0x7da2054a69e0> return[call[name[repr], parameter[name[s]]]]
keyword[def] identifier[ensure_str] ( identifier[s] ): literal[string] keyword[try] : keyword[return] identifier[s] . identifier[decode] () keyword[except] identifier[AttributeError] : keyword[if] identifier[isinstance] ( identifier[s] , identifier[str] ): keyword[return] identifier[s] keyword[return] identifier[repr] ( identifier[s] )
def ensure_str(s): """ Ensure that s is a str and not a bytes (.decode() if necessary) >>> ensure_str(b"I'm 2. When I grow up I want to be a str!") "I'm 2. When I grow up I want to be a str!" >>> ensure_str(42) '42' """ try: return s.decode() # depends on [control=['try'], data=[]] except AttributeError: if isinstance(s, str): return s # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]] return repr(s)
def pid(self): """ The integer PID of the subprocess or None. """ pf = self.path('cmd.pid') if not os.path.exists(pf): return None with open(pf, 'r') as f: return int(f.read())
def function[pid, parameter[self]]: constant[ The integer PID of the subprocess or None. ] variable[pf] assign[=] call[name[self].path, parameter[constant[cmd.pid]]] if <ast.UnaryOp object at 0x7da204565450> begin[:] return[constant[None]] with call[name[open], parameter[name[pf], constant[r]]] begin[:] return[call[name[int], parameter[call[name[f].read, parameter[]]]]]
keyword[def] identifier[pid] ( identifier[self] ): literal[string] identifier[pf] = identifier[self] . identifier[path] ( literal[string] ) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[pf] ): keyword[return] keyword[None] keyword[with] identifier[open] ( identifier[pf] , literal[string] ) keyword[as] identifier[f] : keyword[return] identifier[int] ( identifier[f] . identifier[read] ())
def pid(self): """ The integer PID of the subprocess or None. """ pf = self.path('cmd.pid') if not os.path.exists(pf): return None # depends on [control=['if'], data=[]] with open(pf, 'r') as f: return int(f.read()) # depends on [control=['with'], data=['f']]
def _bool_encode(self, d): """ Converts bool values to lowercase strings """ for k, v in d.items(): if isinstance(v, bool): d[k] = str(v).lower() return d
def function[_bool_encode, parameter[self, d]]: constant[ Converts bool values to lowercase strings ] for taget[tuple[[<ast.Name object at 0x7da1b0e0ee00>, <ast.Name object at 0x7da1b0e0e980>]]] in starred[call[name[d].items, parameter[]]] begin[:] if call[name[isinstance], parameter[name[v], name[bool]]] begin[:] call[name[d]][name[k]] assign[=] call[call[name[str], parameter[name[v]]].lower, parameter[]] return[name[d]]
keyword[def] identifier[_bool_encode] ( identifier[self] , identifier[d] ): literal[string] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[d] . identifier[items] (): keyword[if] identifier[isinstance] ( identifier[v] , identifier[bool] ): identifier[d] [ identifier[k] ]= identifier[str] ( identifier[v] ). identifier[lower] () keyword[return] identifier[d]
def _bool_encode(self, d): """ Converts bool values to lowercase strings """ for (k, v) in d.items(): if isinstance(v, bool): d[k] = str(v).lower() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return d
def upload(training_dir, algorithm_id=None, writeup=None, api_key=None, ignore_open_monitors=False): """Upload the results of training (as automatically recorded by your env's monitor) to OpenAI Gym. Args: training_dir (Optional[str]): A directory containing the results of a training run. algorithm_id (Optional[str]): An algorithm id indicating the particular version of the algorithm (including choices of parameters) you are running (visit https://gym.openai.com/algorithms to create an id) writeup (Optional[str]): A Gist URL (of the form https://gist.github.com/<user>/<id>) containing your writeup for this evaluation. api_key (Optional[str]): Your OpenAI API key. Can also be provided as an environment variable (OPENAI_GYM_API_KEY). """ if not ignore_open_monitors: open_monitors = monitoring._open_monitors() if len(open_monitors) > 0: envs = [m.env.spec.id if m.env.spec else '(unknown)' for m in open_monitors] raise error.Error("Still have an open monitor on {}. You must run 'env.monitor.close()' before uploading.".format(', '.join(envs))) env_info, training_episode_batch, training_video = upload_training_data(training_dir, api_key=api_key) env_id = env_info['env_id'] training_episode_batch_id = training_video_id = None if training_episode_batch: training_episode_batch_id = training_episode_batch.id if training_video: training_video_id = training_video.id if logger.level <= logging.INFO: if training_episode_batch_id is not None and training_video_id is not None: logger.info('[%s] Creating evaluation object from %s with learning curve and training video', env_id, training_dir) elif training_episode_batch_id is not None: logger.info('[%s] Creating evaluation object from %s with learning curve', env_id, training_dir) elif training_video_id is not None: logger.info('[%s] Creating evaluation object from %s with training video', env_id, training_dir) else: raise error.Error("[%s] You didn't have any recorded training data in {}. Once you've used 'env.monitor.start(training_dir)' to start recording, you need to actually run some rollouts. Please join the community chat on https://gym.openai.com if you have any issues.".format(env_id, training_dir)) evaluation = resource.Evaluation.create( training_episode_batch=training_episode_batch_id, training_video=training_video_id, env=env_info['env_id'], algorithm={ 'id': algorithm_id, }, writeup=writeup, gym_version=env_info['gym_version'], api_key=api_key, # >>>>>>>>> START changes >>>>>>>>>>>>>>>>>>>>>>>> env_info=env_info, # <<<<<<<<< END changes <<<<<<<<<<<<<<<<<<<<<<<<<< ) logger.info( """ **************************************************** You successfully uploaded your evaluation on %s to OpenAI Gym! You can find it at: %s **************************************************** """.rstrip(), env_id, evaluation.web_url()) return evaluation
def function[upload, parameter[training_dir, algorithm_id, writeup, api_key, ignore_open_monitors]]: constant[Upload the results of training (as automatically recorded by your env's monitor) to OpenAI Gym. Args: training_dir (Optional[str]): A directory containing the results of a training run. algorithm_id (Optional[str]): An algorithm id indicating the particular version of the algorithm (including choices of parameters) you are running (visit https://gym.openai.com/algorithms to create an id) writeup (Optional[str]): A Gist URL (of the form https://gist.github.com/<user>/<id>) containing your writeup for this evaluation. api_key (Optional[str]): Your OpenAI API key. Can also be provided as an environment variable (OPENAI_GYM_API_KEY). ] if <ast.UnaryOp object at 0x7da18f58ed70> begin[:] variable[open_monitors] assign[=] call[name[monitoring]._open_monitors, parameter[]] if compare[call[name[len], parameter[name[open_monitors]]] greater[>] constant[0]] begin[:] variable[envs] assign[=] <ast.ListComp object at 0x7da18f58d030> <ast.Raise object at 0x7da18f58e920> <ast.Tuple object at 0x7da18f58da20> assign[=] call[name[upload_training_data], parameter[name[training_dir]]] variable[env_id] assign[=] call[name[env_info]][constant[env_id]] variable[training_episode_batch_id] assign[=] constant[None] if name[training_episode_batch] begin[:] variable[training_episode_batch_id] assign[=] name[training_episode_batch].id if name[training_video] begin[:] variable[training_video_id] assign[=] name[training_video].id if compare[name[logger].level less_or_equal[<=] name[logging].INFO] begin[:] if <ast.BoolOp object at 0x7da18f58e950> begin[:] call[name[logger].info, parameter[constant[[%s] Creating evaluation object from %s with learning curve and training video], name[env_id], name[training_dir]]] variable[evaluation] assign[=] call[name[resource].Evaluation.create, parameter[]] call[name[logger].info, parameter[call[constant[ **************************************************** You successfully uploaded your evaluation on %s to OpenAI Gym! You can find it at: %s **************************************************** ].rstrip, parameter[]], name[env_id], call[name[evaluation].web_url, parameter[]]]] return[name[evaluation]]
keyword[def] identifier[upload] ( identifier[training_dir] , identifier[algorithm_id] = keyword[None] , identifier[writeup] = keyword[None] , identifier[api_key] = keyword[None] , identifier[ignore_open_monitors] = keyword[False] ): literal[string] keyword[if] keyword[not] identifier[ignore_open_monitors] : identifier[open_monitors] = identifier[monitoring] . identifier[_open_monitors] () keyword[if] identifier[len] ( identifier[open_monitors] )> literal[int] : identifier[envs] =[ identifier[m] . identifier[env] . identifier[spec] . identifier[id] keyword[if] identifier[m] . identifier[env] . identifier[spec] keyword[else] literal[string] keyword[for] identifier[m] keyword[in] identifier[open_monitors] ] keyword[raise] identifier[error] . identifier[Error] ( literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[envs] ))) identifier[env_info] , identifier[training_episode_batch] , identifier[training_video] = identifier[upload_training_data] ( identifier[training_dir] , identifier[api_key] = identifier[api_key] ) identifier[env_id] = identifier[env_info] [ literal[string] ] identifier[training_episode_batch_id] = identifier[training_video_id] = keyword[None] keyword[if] identifier[training_episode_batch] : identifier[training_episode_batch_id] = identifier[training_episode_batch] . identifier[id] keyword[if] identifier[training_video] : identifier[training_video_id] = identifier[training_video] . identifier[id] keyword[if] identifier[logger] . identifier[level] <= identifier[logging] . identifier[INFO] : keyword[if] identifier[training_episode_batch_id] keyword[is] keyword[not] keyword[None] keyword[and] identifier[training_video_id] keyword[is] keyword[not] keyword[None] : identifier[logger] . identifier[info] ( literal[string] , identifier[env_id] , identifier[training_dir] ) keyword[elif] identifier[training_episode_batch_id] keyword[is] keyword[not] keyword[None] : identifier[logger] . identifier[info] ( literal[string] , identifier[env_id] , identifier[training_dir] ) keyword[elif] identifier[training_video_id] keyword[is] keyword[not] keyword[None] : identifier[logger] . identifier[info] ( literal[string] , identifier[env_id] , identifier[training_dir] ) keyword[else] : keyword[raise] identifier[error] . identifier[Error] ( literal[string] . identifier[format] ( identifier[env_id] , identifier[training_dir] )) identifier[evaluation] = identifier[resource] . identifier[Evaluation] . identifier[create] ( identifier[training_episode_batch] = identifier[training_episode_batch_id] , identifier[training_video] = identifier[training_video_id] , identifier[env] = identifier[env_info] [ literal[string] ], identifier[algorithm] ={ literal[string] : identifier[algorithm_id] , }, identifier[writeup] = identifier[writeup] , identifier[gym_version] = identifier[env_info] [ literal[string] ], identifier[api_key] = identifier[api_key] , identifier[env_info] = identifier[env_info] , ) identifier[logger] . identifier[info] ( literal[string] . identifier[rstrip] (), identifier[env_id] , identifier[evaluation] . identifier[web_url] ()) keyword[return] identifier[evaluation]
def upload(training_dir, algorithm_id=None, writeup=None, api_key=None, ignore_open_monitors=False): """Upload the results of training (as automatically recorded by your env's monitor) to OpenAI Gym. Args: training_dir (Optional[str]): A directory containing the results of a training run. algorithm_id (Optional[str]): An algorithm id indicating the particular version of the algorithm (including choices of parameters) you are running (visit https://gym.openai.com/algorithms to create an id) writeup (Optional[str]): A Gist URL (of the form https://gist.github.com/<user>/<id>) containing your writeup for this evaluation. api_key (Optional[str]): Your OpenAI API key. Can also be provided as an environment variable (OPENAI_GYM_API_KEY). """ if not ignore_open_monitors: open_monitors = monitoring._open_monitors() if len(open_monitors) > 0: envs = [m.env.spec.id if m.env.spec else '(unknown)' for m in open_monitors] raise error.Error("Still have an open monitor on {}. You must run 'env.monitor.close()' before uploading.".format(', '.join(envs))) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] (env_info, training_episode_batch, training_video) = upload_training_data(training_dir, api_key=api_key) env_id = env_info['env_id'] training_episode_batch_id = training_video_id = None if training_episode_batch: training_episode_batch_id = training_episode_batch.id # depends on [control=['if'], data=[]] if training_video: training_video_id = training_video.id # depends on [control=['if'], data=[]] if logger.level <= logging.INFO: if training_episode_batch_id is not None and training_video_id is not None: logger.info('[%s] Creating evaluation object from %s with learning curve and training video', env_id, training_dir) # depends on [control=['if'], data=[]] elif training_episode_batch_id is not None: logger.info('[%s] Creating evaluation object from %s with learning curve', env_id, training_dir) # depends on [control=['if'], data=[]] elif training_video_id is not None: logger.info('[%s] Creating evaluation object from %s with training video', env_id, training_dir) # depends on [control=['if'], data=[]] else: raise error.Error("[%s] You didn't have any recorded training data in {}. Once you've used 'env.monitor.start(training_dir)' to start recording, you need to actually run some rollouts. Please join the community chat on https://gym.openai.com if you have any issues.".format(env_id, training_dir)) # depends on [control=['if'], data=[]] # >>>>>>>>> START changes >>>>>>>>>>>>>>>>>>>>>>>> # <<<<<<<<< END changes <<<<<<<<<<<<<<<<<<<<<<<<<< evaluation = resource.Evaluation.create(training_episode_batch=training_episode_batch_id, training_video=training_video_id, env=env_info['env_id'], algorithm={'id': algorithm_id}, writeup=writeup, gym_version=env_info['gym_version'], api_key=api_key, env_info=env_info) logger.info('\n****************************************************\nYou successfully uploaded your evaluation on %s to\nOpenAI Gym! You can find it at:\n\n %s\n\n****************************************************\n '.rstrip(), env_id, evaluation.web_url()) return evaluation
def VRRPNewMaster_originator_switch_info_switchIpV4Address(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") VRRPNewMaster = ET.SubElement(config, "VRRPNewMaster", xmlns="http://brocade.com/ns/brocade-notification-stream") originator_switch_info = ET.SubElement(VRRPNewMaster, "originator-switch-info") switchIpV4Address = ET.SubElement(originator_switch_info, "switchIpV4Address") switchIpV4Address.text = kwargs.pop('switchIpV4Address') callback = kwargs.pop('callback', self._callback) return callback(config)
def function[VRRPNewMaster_originator_switch_info_switchIpV4Address, parameter[self]]: constant[Auto Generated Code ] variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]] variable[VRRPNewMaster] assign[=] call[name[ET].SubElement, parameter[name[config], constant[VRRPNewMaster]]] variable[originator_switch_info] assign[=] call[name[ET].SubElement, parameter[name[VRRPNewMaster], constant[originator-switch-info]]] variable[switchIpV4Address] assign[=] call[name[ET].SubElement, parameter[name[originator_switch_info], constant[switchIpV4Address]]] name[switchIpV4Address].text assign[=] call[name[kwargs].pop, parameter[constant[switchIpV4Address]]] variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]] return[call[name[callback], parameter[name[config]]]]
keyword[def] identifier[VRRPNewMaster_originator_switch_info_switchIpV4Address] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[config] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[VRRPNewMaster] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] ) identifier[originator_switch_info] = identifier[ET] . identifier[SubElement] ( identifier[VRRPNewMaster] , literal[string] ) identifier[switchIpV4Address] = identifier[ET] . identifier[SubElement] ( identifier[originator_switch_info] , literal[string] ) identifier[switchIpV4Address] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] ) keyword[return] identifier[callback] ( identifier[config] )
def VRRPNewMaster_originator_switch_info_switchIpV4Address(self, **kwargs): """Auto Generated Code """ config = ET.Element('config') VRRPNewMaster = ET.SubElement(config, 'VRRPNewMaster', xmlns='http://brocade.com/ns/brocade-notification-stream') originator_switch_info = ET.SubElement(VRRPNewMaster, 'originator-switch-info') switchIpV4Address = ET.SubElement(originator_switch_info, 'switchIpV4Address') switchIpV4Address.text = kwargs.pop('switchIpV4Address') callback = kwargs.pop('callback', self._callback) return callback(config)
def fetch_list_members(list_url): """ Get all members of the list specified by the given url. E.g., https://twitter.com/lore77/lists/libri-cultura-education """ match = re.match(r'.+twitter\.com\/(.+)\/lists\/(.+)', list_url) if not match: print('cannot parse list url %s' % list_url) return [] screen_name, slug = match.groups() print('collecting list %s/%s' % (screen_name, slug)) return twutil.collect.list_members(slug, screen_name)
def function[fetch_list_members, parameter[list_url]]: constant[ Get all members of the list specified by the given url. E.g., https://twitter.com/lore77/lists/libri-cultura-education ] variable[match] assign[=] call[name[re].match, parameter[constant[.+twitter\.com\/(.+)\/lists\/(.+)], name[list_url]]] if <ast.UnaryOp object at 0x7da1b234b2b0> begin[:] call[name[print], parameter[binary_operation[constant[cannot parse list url %s] <ast.Mod object at 0x7da2590d6920> name[list_url]]]] return[list[[]]] <ast.Tuple object at 0x7da1b234b340> assign[=] call[name[match].groups, parameter[]] call[name[print], parameter[binary_operation[constant[collecting list %s/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b234ac80>, <ast.Name object at 0x7da1b234a6b0>]]]]] return[call[name[twutil].collect.list_members, parameter[name[slug], name[screen_name]]]]
keyword[def] identifier[fetch_list_members] ( identifier[list_url] ): literal[string] identifier[match] = identifier[re] . identifier[match] ( literal[string] , identifier[list_url] ) keyword[if] keyword[not] identifier[match] : identifier[print] ( literal[string] % identifier[list_url] ) keyword[return] [] identifier[screen_name] , identifier[slug] = identifier[match] . identifier[groups] () identifier[print] ( literal[string] %( identifier[screen_name] , identifier[slug] )) keyword[return] identifier[twutil] . identifier[collect] . identifier[list_members] ( identifier[slug] , identifier[screen_name] )
def fetch_list_members(list_url): """ Get all members of the list specified by the given url. E.g., https://twitter.com/lore77/lists/libri-cultura-education """ match = re.match('.+twitter\\.com\\/(.+)\\/lists\\/(.+)', list_url) if not match: print('cannot parse list url %s' % list_url) return [] # depends on [control=['if'], data=[]] (screen_name, slug) = match.groups() print('collecting list %s/%s' % (screen_name, slug)) return twutil.collect.list_members(slug, screen_name)
def _to_output_code(self): """Return a unicode object with the Gremlin/MATCH representation of this Literal.""" # All supported Literal objects serialize to identical strings both in Gremlin and MATCH. self.validate() if self.value is None: return u'null' elif self.value is True: return u'true' elif self.value is False: return u'false' elif isinstance(self.value, six.string_types): return safe_quoted_string(self.value) elif isinstance(self.value, int): return six.text_type(self.value) elif isinstance(self.value, list): if len(self.value) == 0: return '[]' elif all(isinstance(x, six.string_types) for x in self.value): list_contents = ', '.join(safe_quoted_string(x) for x in sorted(self.value)) return '[' + list_contents + ']' else: pass # Fall through to assertion error below. raise AssertionError(u'Unreachable state reached: {}'.format(self))
def function[_to_output_code, parameter[self]]: constant[Return a unicode object with the Gremlin/MATCH representation of this Literal.] call[name[self].validate, parameter[]] if compare[name[self].value is constant[None]] begin[:] return[constant[null]] <ast.Raise object at 0x7da1b17cf910>
keyword[def] identifier[_to_output_code] ( identifier[self] ): literal[string] identifier[self] . identifier[validate] () keyword[if] identifier[self] . identifier[value] keyword[is] keyword[None] : keyword[return] literal[string] keyword[elif] identifier[self] . identifier[value] keyword[is] keyword[True] : keyword[return] literal[string] keyword[elif] identifier[self] . identifier[value] keyword[is] keyword[False] : keyword[return] literal[string] keyword[elif] identifier[isinstance] ( identifier[self] . identifier[value] , identifier[six] . identifier[string_types] ): keyword[return] identifier[safe_quoted_string] ( identifier[self] . identifier[value] ) keyword[elif] identifier[isinstance] ( identifier[self] . identifier[value] , identifier[int] ): keyword[return] identifier[six] . identifier[text_type] ( identifier[self] . identifier[value] ) keyword[elif] identifier[isinstance] ( identifier[self] . identifier[value] , identifier[list] ): keyword[if] identifier[len] ( identifier[self] . identifier[value] )== literal[int] : keyword[return] literal[string] keyword[elif] identifier[all] ( identifier[isinstance] ( identifier[x] , identifier[six] . identifier[string_types] ) keyword[for] identifier[x] keyword[in] identifier[self] . identifier[value] ): identifier[list_contents] = literal[string] . identifier[join] ( identifier[safe_quoted_string] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[sorted] ( identifier[self] . identifier[value] )) keyword[return] literal[string] + identifier[list_contents] + literal[string] keyword[else] : keyword[pass] keyword[raise] identifier[AssertionError] ( literal[string] . identifier[format] ( identifier[self] ))
def _to_output_code(self): """Return a unicode object with the Gremlin/MATCH representation of this Literal.""" # All supported Literal objects serialize to identical strings both in Gremlin and MATCH. self.validate() if self.value is None: return u'null' # depends on [control=['if'], data=[]] elif self.value is True: return u'true' # depends on [control=['if'], data=[]] elif self.value is False: return u'false' # depends on [control=['if'], data=[]] elif isinstance(self.value, six.string_types): return safe_quoted_string(self.value) # depends on [control=['if'], data=[]] elif isinstance(self.value, int): return six.text_type(self.value) # depends on [control=['if'], data=[]] elif isinstance(self.value, list): if len(self.value) == 0: return '[]' # depends on [control=['if'], data=[]] elif all((isinstance(x, six.string_types) for x in self.value)): list_contents = ', '.join((safe_quoted_string(x) for x in sorted(self.value))) return '[' + list_contents + ']' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: pass # Fall through to assertion error below. raise AssertionError(u'Unreachable state reached: {}'.format(self))
def get_instance_of(self, model_cls): """ Search the data to find a instance of a model specified in the template """ for obj in self.data.values(): if isinstance(obj, model_cls): return obj LOGGER.error('Context Not Found') raise Exception('Context Not Found')
def function[get_instance_of, parameter[self, model_cls]]: constant[ Search the data to find a instance of a model specified in the template ] for taget[name[obj]] in starred[call[name[self].data.values, parameter[]]] begin[:] if call[name[isinstance], parameter[name[obj], name[model_cls]]] begin[:] return[name[obj]] call[name[LOGGER].error, parameter[constant[Context Not Found]]] <ast.Raise object at 0x7da18f09c700>
keyword[def] identifier[get_instance_of] ( identifier[self] , identifier[model_cls] ): literal[string] keyword[for] identifier[obj] keyword[in] identifier[self] . identifier[data] . identifier[values] (): keyword[if] identifier[isinstance] ( identifier[obj] , identifier[model_cls] ): keyword[return] identifier[obj] identifier[LOGGER] . identifier[error] ( literal[string] ) keyword[raise] identifier[Exception] ( literal[string] )
def get_instance_of(self, model_cls): """ Search the data to find a instance of a model specified in the template """ for obj in self.data.values(): if isinstance(obj, model_cls): return obj # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['obj']] LOGGER.error('Context Not Found') raise Exception('Context Not Found')
def _try_recover(self, trial, error_msg): """Tries to recover trial. Notifies SearchAlgorithm and Scheduler if failure to recover. Args: trial (Trial): Trial to recover. error_msg (str): Error message from prior to invoking this method. """ try: self.trial_executor.stop_trial( trial, error=error_msg is not None, error_msg=error_msg, stop_logger=False) trial.result_logger.flush() if self.trial_executor.has_resources(trial.resources): logger.info("Attempting to recover" " trial state from last checkpoint.") self.trial_executor.start_trial(trial) if trial.status == Trial.ERROR: raise RuntimeError("Trial did not start correctly.") else: logger.debug("Notifying Scheduler and requeueing trial.") self._requeue_trial(trial) except Exception: logger.exception("Error recovering trial from checkpoint, abort.") self._scheduler_alg.on_trial_error(self, trial) self._search_alg.on_trial_complete(trial.trial_id, error=True)
def function[_try_recover, parameter[self, trial, error_msg]]: constant[Tries to recover trial. Notifies SearchAlgorithm and Scheduler if failure to recover. Args: trial (Trial): Trial to recover. error_msg (str): Error message from prior to invoking this method. ] <ast.Try object at 0x7da18f09de70>
keyword[def] identifier[_try_recover] ( identifier[self] , identifier[trial] , identifier[error_msg] ): literal[string] keyword[try] : identifier[self] . identifier[trial_executor] . identifier[stop_trial] ( identifier[trial] , identifier[error] = identifier[error_msg] keyword[is] keyword[not] keyword[None] , identifier[error_msg] = identifier[error_msg] , identifier[stop_logger] = keyword[False] ) identifier[trial] . identifier[result_logger] . identifier[flush] () keyword[if] identifier[self] . identifier[trial_executor] . identifier[has_resources] ( identifier[trial] . identifier[resources] ): identifier[logger] . identifier[info] ( literal[string] literal[string] ) identifier[self] . identifier[trial_executor] . identifier[start_trial] ( identifier[trial] ) keyword[if] identifier[trial] . identifier[status] == identifier[Trial] . identifier[ERROR] : keyword[raise] identifier[RuntimeError] ( literal[string] ) keyword[else] : identifier[logger] . identifier[debug] ( literal[string] ) identifier[self] . identifier[_requeue_trial] ( identifier[trial] ) keyword[except] identifier[Exception] : identifier[logger] . identifier[exception] ( literal[string] ) identifier[self] . identifier[_scheduler_alg] . identifier[on_trial_error] ( identifier[self] , identifier[trial] ) identifier[self] . identifier[_search_alg] . identifier[on_trial_complete] ( identifier[trial] . identifier[trial_id] , identifier[error] = keyword[True] )
def _try_recover(self, trial, error_msg): """Tries to recover trial. Notifies SearchAlgorithm and Scheduler if failure to recover. Args: trial (Trial): Trial to recover. error_msg (str): Error message from prior to invoking this method. """ try: self.trial_executor.stop_trial(trial, error=error_msg is not None, error_msg=error_msg, stop_logger=False) trial.result_logger.flush() if self.trial_executor.has_resources(trial.resources): logger.info('Attempting to recover trial state from last checkpoint.') self.trial_executor.start_trial(trial) if trial.status == Trial.ERROR: raise RuntimeError('Trial did not start correctly.') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: logger.debug('Notifying Scheduler and requeueing trial.') self._requeue_trial(trial) # depends on [control=['try'], data=[]] except Exception: logger.exception('Error recovering trial from checkpoint, abort.') self._scheduler_alg.on_trial_error(self, trial) self._search_alg.on_trial_complete(trial.trial_id, error=True) # depends on [control=['except'], data=[]]
def unassign_item_from_bank(self, item_id, bank_id): """Removes an ``Item`` from a ``Bank``. arg: item_id (osid.id.Id): the ``Id`` of the ``Item`` arg: bank_id (osid.id.Id): the ``Id`` of the ``Bank`` raise: NotFound - ``item_id`` or ``bank_id`` not found or ``item_id`` not assigned to ``bank_id`` raise: NullArgument - ``item_id`` or ``bank_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure occurred *compliance: mandatory -- This method must be implemented.* """ # Implemented from template for # osid.resource.ResourceBinAssignmentSession.unassign_resource_from_bin mgr = self._get_provider_manager('ASSESSMENT', local=True) lookup_session = mgr.get_bank_lookup_session(proxy=self._proxy) lookup_session.get_bank(bank_id) # to raise NotFound self._unassign_object_from_catalog(item_id, bank_id)
def function[unassign_item_from_bank, parameter[self, item_id, bank_id]]: constant[Removes an ``Item`` from a ``Bank``. arg: item_id (osid.id.Id): the ``Id`` of the ``Item`` arg: bank_id (osid.id.Id): the ``Id`` of the ``Bank`` raise: NotFound - ``item_id`` or ``bank_id`` not found or ``item_id`` not assigned to ``bank_id`` raise: NullArgument - ``item_id`` or ``bank_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure occurred *compliance: mandatory -- This method must be implemented.* ] variable[mgr] assign[=] call[name[self]._get_provider_manager, parameter[constant[ASSESSMENT]]] variable[lookup_session] assign[=] call[name[mgr].get_bank_lookup_session, parameter[]] call[name[lookup_session].get_bank, parameter[name[bank_id]]] call[name[self]._unassign_object_from_catalog, parameter[name[item_id], name[bank_id]]]
keyword[def] identifier[unassign_item_from_bank] ( identifier[self] , identifier[item_id] , identifier[bank_id] ): literal[string] identifier[mgr] = identifier[self] . identifier[_get_provider_manager] ( literal[string] , identifier[local] = keyword[True] ) identifier[lookup_session] = identifier[mgr] . identifier[get_bank_lookup_session] ( identifier[proxy] = identifier[self] . identifier[_proxy] ) identifier[lookup_session] . identifier[get_bank] ( identifier[bank_id] ) identifier[self] . identifier[_unassign_object_from_catalog] ( identifier[item_id] , identifier[bank_id] )
def unassign_item_from_bank(self, item_id, bank_id): """Removes an ``Item`` from a ``Bank``. arg: item_id (osid.id.Id): the ``Id`` of the ``Item`` arg: bank_id (osid.id.Id): the ``Id`` of the ``Bank`` raise: NotFound - ``item_id`` or ``bank_id`` not found or ``item_id`` not assigned to ``bank_id`` raise: NullArgument - ``item_id`` or ``bank_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure occurred *compliance: mandatory -- This method must be implemented.* """ # Implemented from template for # osid.resource.ResourceBinAssignmentSession.unassign_resource_from_bin mgr = self._get_provider_manager('ASSESSMENT', local=True) lookup_session = mgr.get_bank_lookup_session(proxy=self._proxy) lookup_session.get_bank(bank_id) # to raise NotFound self._unassign_object_from_catalog(item_id, bank_id)
def calculate_pore_shape(elements, coordinates, adjust=1, increment=0.1, **kwargs): """Return average diameter for a molecule.""" # Copy the coordinates as will perform many opertaions on them coordinates = deepcopy(coordinates) # Center of our cartesian system is always at origin origin = np.array([0, 0, 0]) # Initial center of mass to reverse translation at the end initial_com = center_of_mass(elements, coordinates) # We just shift the cage to the origin. coordinates = shift_com(elements, coordinates) # We create an array of vdw radii of elements. elements_vdw = np.array([[atomic_vdw_radius[x.upper()]] for x in elements]) # We calculate maximum diameter of a molecule to determine the radius # of a sampling sphere neccessary to enclose the whole molecule. shpere_radius = max_dim(elements, coordinates)[2]/2 sphere_surface_area = 4 * np.pi * shpere_radius**2 # Here we determine the number of sampling points necessary for a fine # sampling. Smaller molecules require more finner density of sampling # points on the sampling sphere's surface, whereas largen require less. # This formula was created so that larger molecule do not take much longer # to analyse, as number_sampling_points*length_of_sampling_vectors # results in quadratic increase of sampling time. The 250 factor was # specificly determined to produce close to 1 sampling point /Angstrom^2 # for a sphere of radius ~ 24 Angstrom. We can adjust how fine is the # sampling by changing the adjust factor. number_of_points = int(np.log10(sphere_surface_area) * 250 * adjust) # Here I use code by Alexandre Devert for spreading points on a sphere: # http://blog.marmakoide.org/?p=1 golden_angle = np.pi * (3 - np.sqrt(5)) theta = golden_angle * np.arange(number_of_points) z = np.linspace(1 - 1.0 / number_of_points, 1.0 / number_of_points - 1.0, number_of_points) radius = np.sqrt(1 - z * z) points = np.zeros((number_of_points, 3)) points[:, 0] = radius * np.cos(theta) * shpere_radius points[:, 1] = radius * np.sin(theta) * shpere_radius points[:, 2] = z * shpere_radius # Here we will compute the eps parameter for the sklearn.cluster.DBSCAN # (3-dimensional spatial clustering algorithm) which is the mean distance # to the closest point of all points. values = [] tree = KDTree(points) for i in points: dist, ind = tree.query(i.reshape(1, -1), k=10) values.extend(dist) mean_distance = np.mean(values) # The best eps is parametrized when adding the mean distance and it's root. eps = mean_distance + mean_distance**0.5 # Here we either run the sampling points vectors analysis in serial # or parallel. The vectors that go through molecular voids return # as analysed list with the increment at vector's path with largest # included sphere, coordinates for this narrow channel point. vectors # that find molecule on theirs path are return as NoneType object. results = [ vector_analysis_pore_shape(point, coordinates, elements_vdw) for point in points ] results_cleaned = [x for x in results if x is not None] ele = np.array(['X'] * len(results_cleaned)) coor = np.array(results_cleaned) return coor
def function[calculate_pore_shape, parameter[elements, coordinates, adjust, increment]]: constant[Return average diameter for a molecule.] variable[coordinates] assign[=] call[name[deepcopy], parameter[name[coordinates]]] variable[origin] assign[=] call[name[np].array, parameter[list[[<ast.Constant object at 0x7da2047ea650>, <ast.Constant object at 0x7da2047eb160>, <ast.Constant object at 0x7da2047eac80>]]]] variable[initial_com] assign[=] call[name[center_of_mass], parameter[name[elements], name[coordinates]]] variable[coordinates] assign[=] call[name[shift_com], parameter[name[elements], name[coordinates]]] variable[elements_vdw] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da2047e9c60>]] variable[shpere_radius] assign[=] binary_operation[call[call[name[max_dim], parameter[name[elements], name[coordinates]]]][constant[2]] / constant[2]] variable[sphere_surface_area] assign[=] binary_operation[binary_operation[constant[4] * name[np].pi] * binary_operation[name[shpere_radius] ** constant[2]]] variable[number_of_points] assign[=] call[name[int], parameter[binary_operation[binary_operation[call[name[np].log10, parameter[name[sphere_surface_area]]] * constant[250]] * name[adjust]]]] variable[golden_angle] assign[=] binary_operation[name[np].pi * binary_operation[constant[3] - call[name[np].sqrt, parameter[constant[5]]]]] variable[theta] assign[=] binary_operation[name[golden_angle] * call[name[np].arange, parameter[name[number_of_points]]]] variable[z] assign[=] call[name[np].linspace, parameter[binary_operation[constant[1] - binary_operation[constant[1.0] / name[number_of_points]]], binary_operation[binary_operation[constant[1.0] / name[number_of_points]] - constant[1.0]], name[number_of_points]]] variable[radius] assign[=] call[name[np].sqrt, parameter[binary_operation[constant[1] - binary_operation[name[z] * name[z]]]]] variable[points] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da20e9b2200>, <ast.Constant object at 0x7da20e9b3100>]]]] call[name[points]][tuple[[<ast.Slice object at 0x7da20e9b34c0>, <ast.Constant object at 0x7da20e9b0280>]]] assign[=] binary_operation[binary_operation[name[radius] * call[name[np].cos, parameter[name[theta]]]] * name[shpere_radius]] call[name[points]][tuple[[<ast.Slice object at 0x7da20e9b2ad0>, <ast.Constant object at 0x7da20e9b2e60>]]] assign[=] binary_operation[binary_operation[name[radius] * call[name[np].sin, parameter[name[theta]]]] * name[shpere_radius]] call[name[points]][tuple[[<ast.Slice object at 0x7da20e9b26e0>, <ast.Constant object at 0x7da20e9b1360>]]] assign[=] binary_operation[name[z] * name[shpere_radius]] variable[values] assign[=] list[[]] variable[tree] assign[=] call[name[KDTree], parameter[name[points]]] for taget[name[i]] in starred[name[points]] begin[:] <ast.Tuple object at 0x7da20e749720> assign[=] call[name[tree].query, parameter[call[name[i].reshape, parameter[constant[1], <ast.UnaryOp object at 0x7da20c9919c0>]]]] call[name[values].extend, parameter[name[dist]]] variable[mean_distance] assign[=] call[name[np].mean, parameter[name[values]]] variable[eps] assign[=] binary_operation[name[mean_distance] + binary_operation[name[mean_distance] ** constant[0.5]]] variable[results] assign[=] <ast.ListComp object at 0x7da20c991cf0> variable[results_cleaned] assign[=] <ast.ListComp object at 0x7da20c990940> variable[ele] assign[=] call[name[np].array, parameter[binary_operation[list[[<ast.Constant object at 0x7da20c9918a0>]] * call[name[len], parameter[name[results_cleaned]]]]]] variable[coor] assign[=] call[name[np].array, parameter[name[results_cleaned]]] return[name[coor]]
keyword[def] identifier[calculate_pore_shape] ( identifier[elements] , identifier[coordinates] , identifier[adjust] = literal[int] , identifier[increment] = literal[int] , ** identifier[kwargs] ): literal[string] identifier[coordinates] = identifier[deepcopy] ( identifier[coordinates] ) identifier[origin] = identifier[np] . identifier[array] ([ literal[int] , literal[int] , literal[int] ]) identifier[initial_com] = identifier[center_of_mass] ( identifier[elements] , identifier[coordinates] ) identifier[coordinates] = identifier[shift_com] ( identifier[elements] , identifier[coordinates] ) identifier[elements_vdw] = identifier[np] . identifier[array] ([[ identifier[atomic_vdw_radius] [ identifier[x] . identifier[upper] ()]] keyword[for] identifier[x] keyword[in] identifier[elements] ]) identifier[shpere_radius] = identifier[max_dim] ( identifier[elements] , identifier[coordinates] )[ literal[int] ]/ literal[int] identifier[sphere_surface_area] = literal[int] * identifier[np] . identifier[pi] * identifier[shpere_radius] ** literal[int] identifier[number_of_points] = identifier[int] ( identifier[np] . identifier[log10] ( identifier[sphere_surface_area] )* literal[int] * identifier[adjust] ) identifier[golden_angle] = identifier[np] . identifier[pi] *( literal[int] - identifier[np] . identifier[sqrt] ( literal[int] )) identifier[theta] = identifier[golden_angle] * identifier[np] . identifier[arange] ( identifier[number_of_points] ) identifier[z] = identifier[np] . identifier[linspace] ( literal[int] - literal[int] / identifier[number_of_points] , literal[int] / identifier[number_of_points] - literal[int] , identifier[number_of_points] ) identifier[radius] = identifier[np] . identifier[sqrt] ( literal[int] - identifier[z] * identifier[z] ) identifier[points] = identifier[np] . identifier[zeros] (( identifier[number_of_points] , literal[int] )) identifier[points] [:, literal[int] ]= identifier[radius] * identifier[np] . identifier[cos] ( identifier[theta] )* identifier[shpere_radius] identifier[points] [:, literal[int] ]= identifier[radius] * identifier[np] . identifier[sin] ( identifier[theta] )* identifier[shpere_radius] identifier[points] [:, literal[int] ]= identifier[z] * identifier[shpere_radius] identifier[values] =[] identifier[tree] = identifier[KDTree] ( identifier[points] ) keyword[for] identifier[i] keyword[in] identifier[points] : identifier[dist] , identifier[ind] = identifier[tree] . identifier[query] ( identifier[i] . identifier[reshape] ( literal[int] ,- literal[int] ), identifier[k] = literal[int] ) identifier[values] . identifier[extend] ( identifier[dist] ) identifier[mean_distance] = identifier[np] . identifier[mean] ( identifier[values] ) identifier[eps] = identifier[mean_distance] + identifier[mean_distance] ** literal[int] identifier[results] =[ identifier[vector_analysis_pore_shape] ( identifier[point] , identifier[coordinates] , identifier[elements_vdw] ) keyword[for] identifier[point] keyword[in] identifier[points] ] identifier[results_cleaned] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[results] keyword[if] identifier[x] keyword[is] keyword[not] keyword[None] ] identifier[ele] = identifier[np] . identifier[array] ([ literal[string] ]* identifier[len] ( identifier[results_cleaned] )) identifier[coor] = identifier[np] . identifier[array] ( identifier[results_cleaned] ) keyword[return] identifier[coor]
def calculate_pore_shape(elements, coordinates, adjust=1, increment=0.1, **kwargs): """Return average diameter for a molecule.""" # Copy the coordinates as will perform many opertaions on them coordinates = deepcopy(coordinates) # Center of our cartesian system is always at origin origin = np.array([0, 0, 0]) # Initial center of mass to reverse translation at the end initial_com = center_of_mass(elements, coordinates) # We just shift the cage to the origin. coordinates = shift_com(elements, coordinates) # We create an array of vdw radii of elements. elements_vdw = np.array([[atomic_vdw_radius[x.upper()]] for x in elements]) # We calculate maximum diameter of a molecule to determine the radius # of a sampling sphere neccessary to enclose the whole molecule. shpere_radius = max_dim(elements, coordinates)[2] / 2 sphere_surface_area = 4 * np.pi * shpere_radius ** 2 # Here we determine the number of sampling points necessary for a fine # sampling. Smaller molecules require more finner density of sampling # points on the sampling sphere's surface, whereas largen require less. # This formula was created so that larger molecule do not take much longer # to analyse, as number_sampling_points*length_of_sampling_vectors # results in quadratic increase of sampling time. The 250 factor was # specificly determined to produce close to 1 sampling point /Angstrom^2 # for a sphere of radius ~ 24 Angstrom. We can adjust how fine is the # sampling by changing the adjust factor. number_of_points = int(np.log10(sphere_surface_area) * 250 * adjust) # Here I use code by Alexandre Devert for spreading points on a sphere: # http://blog.marmakoide.org/?p=1 golden_angle = np.pi * (3 - np.sqrt(5)) theta = golden_angle * np.arange(number_of_points) z = np.linspace(1 - 1.0 / number_of_points, 1.0 / number_of_points - 1.0, number_of_points) radius = np.sqrt(1 - z * z) points = np.zeros((number_of_points, 3)) points[:, 0] = radius * np.cos(theta) * shpere_radius points[:, 1] = radius * np.sin(theta) * shpere_radius points[:, 2] = z * shpere_radius # Here we will compute the eps parameter for the sklearn.cluster.DBSCAN # (3-dimensional spatial clustering algorithm) which is the mean distance # to the closest point of all points. values = [] tree = KDTree(points) for i in points: (dist, ind) = tree.query(i.reshape(1, -1), k=10) values.extend(dist) # depends on [control=['for'], data=['i']] mean_distance = np.mean(values) # The best eps is parametrized when adding the mean distance and it's root. eps = mean_distance + mean_distance ** 0.5 # Here we either run the sampling points vectors analysis in serial # or parallel. The vectors that go through molecular voids return # as analysed list with the increment at vector's path with largest # included sphere, coordinates for this narrow channel point. vectors # that find molecule on theirs path are return as NoneType object. results = [vector_analysis_pore_shape(point, coordinates, elements_vdw) for point in points] results_cleaned = [x for x in results if x is not None] ele = np.array(['X'] * len(results_cleaned)) coor = np.array(results_cleaned) return coor
def _get_stmt_by_group(self, stmt_type, stmts_this_type, eh): """Group Statements of `stmt_type` by their hierarchical relations.""" # Dict of stmt group key tuples, indexed by their first Agent stmt_by_first = collections.defaultdict(lambda: []) # Dict of stmt group key tuples, indexed by their second Agent stmt_by_second = collections.defaultdict(lambda: []) # Dict of statements with None first, with second Agent as keys none_first = collections.defaultdict(lambda: []) # Dict of statements with None second, with first Agent as keys none_second = collections.defaultdict(lambda: []) # The dict of all statement groups, with tuples of components # or entity_matches_keys as keys stmt_by_group = collections.defaultdict(lambda: []) # Here we group Statements according to the hierarchy graph # components that their agents are part of for stmt_tuple in stmts_this_type: _, stmt = stmt_tuple entities = self._get_entities(stmt, stmt_type, eh) # At this point we have an entity list # If we're dealing with Complexes, sort the entities and use # as dict key if stmt_type == Complex: # There shouldn't be any statements of the type # e.g., Complex([Foo, None, Bar]) assert None not in entities assert len(entities) > 0 entities.sort() key = tuple(entities) if stmt_tuple not in stmt_by_group[key]: stmt_by_group[key].append(stmt_tuple) elif stmt_type == Conversion: assert len(entities) > 0 key = (entities[0], tuple(sorted(entities[1:len(stmt.obj_from)+1])), tuple(sorted(entities[-len(stmt.obj_to):]))) if stmt_tuple not in stmt_by_group[key]: stmt_by_group[key].append(stmt_tuple) # Now look at all other statement types # All other statements will have one or two entities elif len(entities) == 1: # If only one entity, we only need the one key # It should not be None! assert None not in entities key = tuple(entities) if stmt_tuple not in stmt_by_group[key]: stmt_by_group[key].append(stmt_tuple) else: # Make sure we only have two entities, and they are not both # None key = tuple(entities) assert len(key) == 2 assert key != (None, None) # First agent is None; add in the statements, indexed by # 2nd if key[0] is None and stmt_tuple not in none_first[key[1]]: none_first[key[1]].append(stmt_tuple) # Second agent is None; add in the statements, indexed by # 1st elif key[1] is None and stmt_tuple not in none_second[key[0]]: none_second[key[0]].append(stmt_tuple) # Neither entity is None! elif None not in key: if stmt_tuple not in stmt_by_group[key]: stmt_by_group[key].append(stmt_tuple) if key not in stmt_by_first[key[0]]: stmt_by_first[key[0]].append(key) if key not in stmt_by_second[key[1]]: stmt_by_second[key[1]].append(key) # When we've gotten here, we should have stmt_by_group entries, and # we may or may not have stmt_by_first/second dicts filled out # (depending on the statement type). if none_first: # Get the keys associated with stmts having a None first # argument for second_arg, stmts in none_first.items(): # Look for any statements with this second arg second_arg_keys = stmt_by_second[second_arg] # If there are no more specific statements matching this # set of statements with a None first arg, then the # statements with the None first arg deserve to be in # their own group. if not second_arg_keys: stmt_by_group[(None, second_arg)] = stmts # On the other hand, if there are statements with a matching # second arg component, we need to add the None first # statements to all groups with the matching second arg for second_arg_key in second_arg_keys: stmt_by_group[second_arg_key] += stmts # Now do the corresponding steps for the statements with None as the # second argument: if none_second: for first_arg, stmts in none_second.items(): # Look for any statements with this first arg first_arg_keys = stmt_by_first[first_arg] # If there are no more specific statements matching this # set of statements with a None second arg, then the # statements with the None second arg deserve to be in # their own group. if not first_arg_keys: stmt_by_group[(first_arg, None)] = stmts # On the other hand, if there are statements with a matching # first arg component, we need to add the None second # statements to all groups with the matching first arg for first_arg_key in first_arg_keys: stmt_by_group[first_arg_key] += stmts return stmt_by_group
def function[_get_stmt_by_group, parameter[self, stmt_type, stmts_this_type, eh]]: constant[Group Statements of `stmt_type` by their hierarchical relations.] variable[stmt_by_first] assign[=] call[name[collections].defaultdict, parameter[<ast.Lambda object at 0x7da1b2347550>]] variable[stmt_by_second] assign[=] call[name[collections].defaultdict, parameter[<ast.Lambda object at 0x7da1b2346b90>]] variable[none_first] assign[=] call[name[collections].defaultdict, parameter[<ast.Lambda object at 0x7da1b23474f0>]] variable[none_second] assign[=] call[name[collections].defaultdict, parameter[<ast.Lambda object at 0x7da1b2347340>]] variable[stmt_by_group] assign[=] call[name[collections].defaultdict, parameter[<ast.Lambda object at 0x7da1b2346500>]] for taget[name[stmt_tuple]] in starred[name[stmts_this_type]] begin[:] <ast.Tuple object at 0x7da1b2346cb0> assign[=] name[stmt_tuple] variable[entities] assign[=] call[name[self]._get_entities, parameter[name[stmt], name[stmt_type], name[eh]]] if compare[name[stmt_type] equal[==] name[Complex]] begin[:] assert[compare[constant[None] <ast.NotIn object at 0x7da2590d7190> name[entities]]] assert[compare[call[name[len], parameter[name[entities]]] greater[>] constant[0]]] call[name[entities].sort, parameter[]] variable[key] assign[=] call[name[tuple], parameter[name[entities]]] if compare[name[stmt_tuple] <ast.NotIn object at 0x7da2590d7190> call[name[stmt_by_group]][name[key]]] begin[:] call[call[name[stmt_by_group]][name[key]].append, parameter[name[stmt_tuple]]] if name[none_first] begin[:] for taget[tuple[[<ast.Name object at 0x7da18ede4e20>, <ast.Name object at 0x7da18ede4a60>]]] in starred[call[name[none_first].items, parameter[]]] begin[:] variable[second_arg_keys] assign[=] call[name[stmt_by_second]][name[second_arg]] if <ast.UnaryOp object at 0x7da18ede5ab0> begin[:] call[name[stmt_by_group]][tuple[[<ast.Constant object at 0x7da18ede6740>, <ast.Name object at 0x7da18ede6f80>]]] assign[=] name[stmts] for taget[name[second_arg_key]] in starred[name[second_arg_keys]] begin[:] <ast.AugAssign object at 0x7da18ede5c60> if name[none_second] begin[:] for taget[tuple[[<ast.Name object at 0x7da18ede6560>, <ast.Name object at 0x7da18ede4280>]]] in starred[call[name[none_second].items, parameter[]]] begin[:] variable[first_arg_keys] assign[=] call[name[stmt_by_first]][name[first_arg]] if <ast.UnaryOp object at 0x7da18ede6a10> begin[:] call[name[stmt_by_group]][tuple[[<ast.Name object at 0x7da18ede71c0>, <ast.Constant object at 0x7da18ede5150>]]] assign[=] name[stmts] for taget[name[first_arg_key]] in starred[name[first_arg_keys]] begin[:] <ast.AugAssign object at 0x7da18ede5f90> return[name[stmt_by_group]]
keyword[def] identifier[_get_stmt_by_group] ( identifier[self] , identifier[stmt_type] , identifier[stmts_this_type] , identifier[eh] ): literal[string] identifier[stmt_by_first] = identifier[collections] . identifier[defaultdict] ( keyword[lambda] :[]) identifier[stmt_by_second] = identifier[collections] . identifier[defaultdict] ( keyword[lambda] :[]) identifier[none_first] = identifier[collections] . identifier[defaultdict] ( keyword[lambda] :[]) identifier[none_second] = identifier[collections] . identifier[defaultdict] ( keyword[lambda] :[]) identifier[stmt_by_group] = identifier[collections] . identifier[defaultdict] ( keyword[lambda] :[]) keyword[for] identifier[stmt_tuple] keyword[in] identifier[stmts_this_type] : identifier[_] , identifier[stmt] = identifier[stmt_tuple] identifier[entities] = identifier[self] . identifier[_get_entities] ( identifier[stmt] , identifier[stmt_type] , identifier[eh] ) keyword[if] identifier[stmt_type] == identifier[Complex] : keyword[assert] keyword[None] keyword[not] keyword[in] identifier[entities] keyword[assert] identifier[len] ( identifier[entities] )> literal[int] identifier[entities] . identifier[sort] () identifier[key] = identifier[tuple] ( identifier[entities] ) keyword[if] identifier[stmt_tuple] keyword[not] keyword[in] identifier[stmt_by_group] [ identifier[key] ]: identifier[stmt_by_group] [ identifier[key] ]. identifier[append] ( identifier[stmt_tuple] ) keyword[elif] identifier[stmt_type] == identifier[Conversion] : keyword[assert] identifier[len] ( identifier[entities] )> literal[int] identifier[key] =( identifier[entities] [ literal[int] ], identifier[tuple] ( identifier[sorted] ( identifier[entities] [ literal[int] : identifier[len] ( identifier[stmt] . identifier[obj_from] )+ literal[int] ])), identifier[tuple] ( identifier[sorted] ( identifier[entities] [- identifier[len] ( identifier[stmt] . identifier[obj_to] ):]))) keyword[if] identifier[stmt_tuple] keyword[not] keyword[in] identifier[stmt_by_group] [ identifier[key] ]: identifier[stmt_by_group] [ identifier[key] ]. identifier[append] ( identifier[stmt_tuple] ) keyword[elif] identifier[len] ( identifier[entities] )== literal[int] : keyword[assert] keyword[None] keyword[not] keyword[in] identifier[entities] identifier[key] = identifier[tuple] ( identifier[entities] ) keyword[if] identifier[stmt_tuple] keyword[not] keyword[in] identifier[stmt_by_group] [ identifier[key] ]: identifier[stmt_by_group] [ identifier[key] ]. identifier[append] ( identifier[stmt_tuple] ) keyword[else] : identifier[key] = identifier[tuple] ( identifier[entities] ) keyword[assert] identifier[len] ( identifier[key] )== literal[int] keyword[assert] identifier[key] !=( keyword[None] , keyword[None] ) keyword[if] identifier[key] [ literal[int] ] keyword[is] keyword[None] keyword[and] identifier[stmt_tuple] keyword[not] keyword[in] identifier[none_first] [ identifier[key] [ literal[int] ]]: identifier[none_first] [ identifier[key] [ literal[int] ]]. identifier[append] ( identifier[stmt_tuple] ) keyword[elif] identifier[key] [ literal[int] ] keyword[is] keyword[None] keyword[and] identifier[stmt_tuple] keyword[not] keyword[in] identifier[none_second] [ identifier[key] [ literal[int] ]]: identifier[none_second] [ identifier[key] [ literal[int] ]]. identifier[append] ( identifier[stmt_tuple] ) keyword[elif] keyword[None] keyword[not] keyword[in] identifier[key] : keyword[if] identifier[stmt_tuple] keyword[not] keyword[in] identifier[stmt_by_group] [ identifier[key] ]: identifier[stmt_by_group] [ identifier[key] ]. identifier[append] ( identifier[stmt_tuple] ) keyword[if] identifier[key] keyword[not] keyword[in] identifier[stmt_by_first] [ identifier[key] [ literal[int] ]]: identifier[stmt_by_first] [ identifier[key] [ literal[int] ]]. identifier[append] ( identifier[key] ) keyword[if] identifier[key] keyword[not] keyword[in] identifier[stmt_by_second] [ identifier[key] [ literal[int] ]]: identifier[stmt_by_second] [ identifier[key] [ literal[int] ]]. identifier[append] ( identifier[key] ) keyword[if] identifier[none_first] : keyword[for] identifier[second_arg] , identifier[stmts] keyword[in] identifier[none_first] . identifier[items] (): identifier[second_arg_keys] = identifier[stmt_by_second] [ identifier[second_arg] ] keyword[if] keyword[not] identifier[second_arg_keys] : identifier[stmt_by_group] [( keyword[None] , identifier[second_arg] )]= identifier[stmts] keyword[for] identifier[second_arg_key] keyword[in] identifier[second_arg_keys] : identifier[stmt_by_group] [ identifier[second_arg_key] ]+= identifier[stmts] keyword[if] identifier[none_second] : keyword[for] identifier[first_arg] , identifier[stmts] keyword[in] identifier[none_second] . identifier[items] (): identifier[first_arg_keys] = identifier[stmt_by_first] [ identifier[first_arg] ] keyword[if] keyword[not] identifier[first_arg_keys] : identifier[stmt_by_group] [( identifier[first_arg] , keyword[None] )]= identifier[stmts] keyword[for] identifier[first_arg_key] keyword[in] identifier[first_arg_keys] : identifier[stmt_by_group] [ identifier[first_arg_key] ]+= identifier[stmts] keyword[return] identifier[stmt_by_group]
def _get_stmt_by_group(self, stmt_type, stmts_this_type, eh): """Group Statements of `stmt_type` by their hierarchical relations.""" # Dict of stmt group key tuples, indexed by their first Agent stmt_by_first = collections.defaultdict(lambda : []) # Dict of stmt group key tuples, indexed by their second Agent stmt_by_second = collections.defaultdict(lambda : []) # Dict of statements with None first, with second Agent as keys none_first = collections.defaultdict(lambda : []) # Dict of statements with None second, with first Agent as keys none_second = collections.defaultdict(lambda : []) # The dict of all statement groups, with tuples of components # or entity_matches_keys as keys stmt_by_group = collections.defaultdict(lambda : []) # Here we group Statements according to the hierarchy graph # components that their agents are part of for stmt_tuple in stmts_this_type: (_, stmt) = stmt_tuple entities = self._get_entities(stmt, stmt_type, eh) # At this point we have an entity list # If we're dealing with Complexes, sort the entities and use # as dict key if stmt_type == Complex: # There shouldn't be any statements of the type # e.g., Complex([Foo, None, Bar]) assert None not in entities assert len(entities) > 0 entities.sort() key = tuple(entities) if stmt_tuple not in stmt_by_group[key]: stmt_by_group[key].append(stmt_tuple) # depends on [control=['if'], data=['stmt_tuple']] # depends on [control=['if'], data=[]] elif stmt_type == Conversion: assert len(entities) > 0 key = (entities[0], tuple(sorted(entities[1:len(stmt.obj_from) + 1])), tuple(sorted(entities[-len(stmt.obj_to):]))) if stmt_tuple not in stmt_by_group[key]: stmt_by_group[key].append(stmt_tuple) # depends on [control=['if'], data=['stmt_tuple']] # depends on [control=['if'], data=[]] # Now look at all other statement types # All other statements will have one or two entities elif len(entities) == 1: # If only one entity, we only need the one key # It should not be None! assert None not in entities key = tuple(entities) if stmt_tuple not in stmt_by_group[key]: stmt_by_group[key].append(stmt_tuple) # depends on [control=['if'], data=['stmt_tuple']] # depends on [control=['if'], data=[]] else: # Make sure we only have two entities, and they are not both # None key = tuple(entities) assert len(key) == 2 assert key != (None, None) # First agent is None; add in the statements, indexed by # 2nd if key[0] is None and stmt_tuple not in none_first[key[1]]: none_first[key[1]].append(stmt_tuple) # depends on [control=['if'], data=[]] # Second agent is None; add in the statements, indexed by # 1st elif key[1] is None and stmt_tuple not in none_second[key[0]]: none_second[key[0]].append(stmt_tuple) # depends on [control=['if'], data=[]] # Neither entity is None! elif None not in key: if stmt_tuple not in stmt_by_group[key]: stmt_by_group[key].append(stmt_tuple) # depends on [control=['if'], data=['stmt_tuple']] if key not in stmt_by_first[key[0]]: stmt_by_first[key[0]].append(key) # depends on [control=['if'], data=['key']] if key not in stmt_by_second[key[1]]: stmt_by_second[key[1]].append(key) # depends on [control=['if'], data=['key']] # depends on [control=['if'], data=['key']] # depends on [control=['for'], data=['stmt_tuple']] # When we've gotten here, we should have stmt_by_group entries, and # we may or may not have stmt_by_first/second dicts filled out # (depending on the statement type). if none_first: # Get the keys associated with stmts having a None first # argument for (second_arg, stmts) in none_first.items(): # Look for any statements with this second arg second_arg_keys = stmt_by_second[second_arg] # If there are no more specific statements matching this # set of statements with a None first arg, then the # statements with the None first arg deserve to be in # their own group. if not second_arg_keys: stmt_by_group[None, second_arg] = stmts # depends on [control=['if'], data=[]] # On the other hand, if there are statements with a matching # second arg component, we need to add the None first # statements to all groups with the matching second arg for second_arg_key in second_arg_keys: stmt_by_group[second_arg_key] += stmts # depends on [control=['for'], data=['second_arg_key']] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] # Now do the corresponding steps for the statements with None as the # second argument: if none_second: for (first_arg, stmts) in none_second.items(): # Look for any statements with this first arg first_arg_keys = stmt_by_first[first_arg] # If there are no more specific statements matching this # set of statements with a None second arg, then the # statements with the None second arg deserve to be in # their own group. if not first_arg_keys: stmt_by_group[first_arg, None] = stmts # depends on [control=['if'], data=[]] # On the other hand, if there are statements with a matching # first arg component, we need to add the None second # statements to all groups with the matching first arg for first_arg_key in first_arg_keys: stmt_by_group[first_arg_key] += stmts # depends on [control=['for'], data=['first_arg_key']] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] return stmt_by_group
def iter_instances(self): """Iterate over the stored objects Yields: wrkey: The two-tuple key used to store the object obj: The instance or function object """ for wrkey in set(self.keys()): obj = self.get(wrkey) if obj is None: continue yield wrkey, obj
def function[iter_instances, parameter[self]]: constant[Iterate over the stored objects Yields: wrkey: The two-tuple key used to store the object obj: The instance or function object ] for taget[name[wrkey]] in starred[call[name[set], parameter[call[name[self].keys, parameter[]]]]] begin[:] variable[obj] assign[=] call[name[self].get, parameter[name[wrkey]]] if compare[name[obj] is constant[None]] begin[:] continue <ast.Yield object at 0x7da20c794940>
keyword[def] identifier[iter_instances] ( identifier[self] ): literal[string] keyword[for] identifier[wrkey] keyword[in] identifier[set] ( identifier[self] . identifier[keys] ()): identifier[obj] = identifier[self] . identifier[get] ( identifier[wrkey] ) keyword[if] identifier[obj] keyword[is] keyword[None] : keyword[continue] keyword[yield] identifier[wrkey] , identifier[obj]
def iter_instances(self): """Iterate over the stored objects Yields: wrkey: The two-tuple key used to store the object obj: The instance or function object """ for wrkey in set(self.keys()): obj = self.get(wrkey) if obj is None: continue # depends on [control=['if'], data=[]] yield (wrkey, obj) # depends on [control=['for'], data=['wrkey']]
def colorlogs(format="short"): """Append a rainbow logging handler and a formatter to the root logger""" try: from rainbow_logging_handler import RainbowLoggingHandler import sys # setup `RainbowLoggingHandler` logger = logging.root # same as default if format == "short": fmt = "%(message)s " else: fmt = "[%(asctime)s] %(name)s %(funcName)s():%(lineno)d\t%(message)s [%(levelname)s]" formatter = logging.Formatter(fmt) handler = RainbowLoggingHandler(sys.stderr, color_funcName=('black', 'gray', True)) handler.setFormatter(formatter) logger.addHandler(handler) except ImportError: # rainbow logger not found, that's ok pass
def function[colorlogs, parameter[format]]: constant[Append a rainbow logging handler and a formatter to the root logger] <ast.Try object at 0x7da1b26a2ad0>
keyword[def] identifier[colorlogs] ( identifier[format] = literal[string] ): literal[string] keyword[try] : keyword[from] identifier[rainbow_logging_handler] keyword[import] identifier[RainbowLoggingHandler] keyword[import] identifier[sys] identifier[logger] = identifier[logging] . identifier[root] keyword[if] identifier[format] == literal[string] : identifier[fmt] = literal[string] keyword[else] : identifier[fmt] = literal[string] identifier[formatter] = identifier[logging] . identifier[Formatter] ( identifier[fmt] ) identifier[handler] = identifier[RainbowLoggingHandler] ( identifier[sys] . identifier[stderr] , identifier[color_funcName] =( literal[string] , literal[string] , keyword[True] )) identifier[handler] . identifier[setFormatter] ( identifier[formatter] ) identifier[logger] . identifier[addHandler] ( identifier[handler] ) keyword[except] identifier[ImportError] : keyword[pass]
def colorlogs(format='short'): """Append a rainbow logging handler and a formatter to the root logger""" try: from rainbow_logging_handler import RainbowLoggingHandler import sys # setup `RainbowLoggingHandler` logger = logging.root # same as default if format == 'short': fmt = '%(message)s ' # depends on [control=['if'], data=[]] else: fmt = '[%(asctime)s] %(name)s %(funcName)s():%(lineno)d\t%(message)s [%(levelname)s]' formatter = logging.Formatter(fmt) handler = RainbowLoggingHandler(sys.stderr, color_funcName=('black', 'gray', True)) handler.setFormatter(formatter) logger.addHandler(handler) # depends on [control=['try'], data=[]] except ImportError: # rainbow logger not found, that's ok pass # depends on [control=['except'], data=[]]
def query(self, q, data=None, union=True, limit=None): """ Query your database with a raw string. Parameters ---------- q: str Query string to execute data: list, dict Optional argument for handlebars-queries. Data will be passed to the template and rendered using handlebars. union: bool Whether or not "UNION ALL" handlebars templates. This will return any handlebars queries as a single data frame. limit: int Number of records to return Examples -------- >>> from db import DemoDB >>> db = DemoDB() db.query("select * from Track").head(2) TrackId Name AlbumId MediaTypeId \\\r 0 1 For Those About To Rock (We Salute You) 1 1 1 2 Balls to the Wall 2 2 <BLANKLINE> GenreId Composer Milliseconds Bytes \\\r 0 1 Angus Young, Malcolm Young, Brian Johnson 343719 11170334 1 1 None 342562 5510424 <BLANKLINE> UnitPrice 0 0.99 1 0.99 db.query("select * from Track", limit=10) TrackId Name AlbumId MediaTypeId \ 0 1 For Those About To Rock (We Salute You) 1 1 1 2 Balls to the Wall 2 2 2 3 Fast As a Shark 3 2 3 4 Restless and Wild 3 2 4 5 Princess of the Dawn 3 2 5 6 Put The Finger On You 1 1 6 7 Let's Get It Up 1 1 7 8 Inject The Venom 1 1 8 9 Snowballed 1 1 9 10 Evil Walks 1 1 GenreId Composer Milliseconds \ 0 1 Angus Young, Malcolm Young, Brian Johnson 343719 1 1 None 342562 2 1 F. Baltes, S. Kaufman, U. Dirkscneider & W. Ho... 230619 3 1 F. Baltes, R.A. Smith-Diesel, S. Kaufman, U. D... 252051 4 1 Deaffy & R.A. Smith-Diesel 375418 5 1 Angus Young, Malcolm Young, Brian Johnson 205662 6 1 Angus Young, Malcolm Young, Brian Johnson 233926 7 1 Angus Young, Malcolm Young, Brian Johnson 210834 8 1 Angus Young, Malcolm Young, Brian Johnson 203102 9 1 Angus Young, Malcolm Young, Brian Johnson 263497 Bytes UnitPrice 0 11170334 0.99 1 5510424 0.99 2 3990994 0.99 3 4331779 0.99 4 6290521 0.99 5 6713451 0.99 6 7636561 0.99 7 6852860 0.99 8 6599424 0.99 9 8611245 0.99 >>> q = ''' ... SELECT ... a.Title, ... t.Name, ... t.UnitPrice ... FROM ... Album a ... INNER JOIN ... Track t ... on a.AlbumId = t.AlbumId; ... ''' >>> len(db.query(q)) 3503 db.query(q, limit=10) Title \ 0 For Those About To Rock We Salute You 1 Balls to the Wall 2 Restless and Wild 3 Restless and Wild 4 Restless and Wild 5 For Those About To Rock We Salute You 6 For Those About To Rock We Salute You 7 For Those About To Rock We Salute You 8 For Those About To Rock We Salute You 9 For Those About To Rock We Salute You Name UnitPrice 0 For Those About To Rock (We Salute You) 0.99 1 Balls to the Wall 0.99 2 Fast As a Shark 0.99 3 Restless and Wild 0.99 4 Princess of the Dawn 0.99 5 Put The Finger On You 0.99 6 Let's Get It Up 0.99 7 Inject The Venom 0.99 8 Snowballed 0.99 9 Evil Walks 0.99 >>> template = ''' ... SELECT ... '{{ name }}' as table_name, ... COUNT(*) as cnt ... FROM ... {{ name }} ... GROUP BY ... table_name ... ''' >>> data = [ ... {"name": "Album"}, ... {"name": "Artist"}, ... {"name": "Track"} ... ] >>> db.query(q, data=data) table_name cnt 0 Album 347 1 Artist 275 2 Track 3503 >>> q = ''' ... SELECT ... {{#cols}} ... {{#if @last}} ... {{ . }} ... {{else}} ... {{ . }} , ... {{/if}} ... {{/cols}} ... FROM ... Album; ... ''' >>> data = {"cols": ["AlbumId", "Title", "ArtistId"]} >>> len(db.query(q, data=data, union=False)) 347 db.query(q, data=data, union=False) AlbumId Title ArtistId 0 1 For Those About To Rock We Salute You 1 1 2 Balls to the Wall 2 2 3 Restless and Wild 2 3 4 Let There Be Rock 1 4 5 Big Ones 3 """ if data: q = self._apply_handlebars(q, data, union) if limit: q = self._assign_limit(q, limit) return pd.read_sql(q, self.con)
def function[query, parameter[self, q, data, union, limit]]: constant[ Query your database with a raw string. Parameters ---------- q: str Query string to execute data: list, dict Optional argument for handlebars-queries. Data will be passed to the template and rendered using handlebars. union: bool Whether or not "UNION ALL" handlebars templates. This will return any handlebars queries as a single data frame. limit: int Number of records to return Examples -------- >>> from db import DemoDB >>> db = DemoDB() db.query("select * from Track").head(2) TrackId Name AlbumId MediaTypeId \ 0 1 For Those About To Rock (We Salute You) 1 1 1 2 Balls to the Wall 2 2 <BLANKLINE> GenreId Composer Milliseconds Bytes \ 0 1 Angus Young, Malcolm Young, Brian Johnson 343719 11170334 1 1 None 342562 5510424 <BLANKLINE> UnitPrice 0 0.99 1 0.99 db.query("select * from Track", limit=10) TrackId Name AlbumId MediaTypeId 0 1 For Those About To Rock (We Salute You) 1 1 1 2 Balls to the Wall 2 2 2 3 Fast As a Shark 3 2 3 4 Restless and Wild 3 2 4 5 Princess of the Dawn 3 2 5 6 Put The Finger On You 1 1 6 7 Let's Get It Up 1 1 7 8 Inject The Venom 1 1 8 9 Snowballed 1 1 9 10 Evil Walks 1 1 GenreId Composer Milliseconds 0 1 Angus Young, Malcolm Young, Brian Johnson 343719 1 1 None 342562 2 1 F. Baltes, S. Kaufman, U. Dirkscneider & W. Ho... 230619 3 1 F. Baltes, R.A. Smith-Diesel, S. Kaufman, U. D... 252051 4 1 Deaffy & R.A. Smith-Diesel 375418 5 1 Angus Young, Malcolm Young, Brian Johnson 205662 6 1 Angus Young, Malcolm Young, Brian Johnson 233926 7 1 Angus Young, Malcolm Young, Brian Johnson 210834 8 1 Angus Young, Malcolm Young, Brian Johnson 203102 9 1 Angus Young, Malcolm Young, Brian Johnson 263497 Bytes UnitPrice 0 11170334 0.99 1 5510424 0.99 2 3990994 0.99 3 4331779 0.99 4 6290521 0.99 5 6713451 0.99 6 7636561 0.99 7 6852860 0.99 8 6599424 0.99 9 8611245 0.99 >>> q = ''' ... SELECT ... a.Title, ... t.Name, ... t.UnitPrice ... FROM ... Album a ... INNER JOIN ... Track t ... on a.AlbumId = t.AlbumId; ... ''' >>> len(db.query(q)) 3503 db.query(q, limit=10) Title 0 For Those About To Rock We Salute You 1 Balls to the Wall 2 Restless and Wild 3 Restless and Wild 4 Restless and Wild 5 For Those About To Rock We Salute You 6 For Those About To Rock We Salute You 7 For Those About To Rock We Salute You 8 For Those About To Rock We Salute You 9 For Those About To Rock We Salute You Name UnitPrice 0 For Those About To Rock (We Salute You) 0.99 1 Balls to the Wall 0.99 2 Fast As a Shark 0.99 3 Restless and Wild 0.99 4 Princess of the Dawn 0.99 5 Put The Finger On You 0.99 6 Let's Get It Up 0.99 7 Inject The Venom 0.99 8 Snowballed 0.99 9 Evil Walks 0.99 >>> template = ''' ... SELECT ... '{{ name }}' as table_name, ... COUNT(*) as cnt ... FROM ... {{ name }} ... GROUP BY ... table_name ... ''' >>> data = [ ... {"name": "Album"}, ... {"name": "Artist"}, ... {"name": "Track"} ... ] >>> db.query(q, data=data) table_name cnt 0 Album 347 1 Artist 275 2 Track 3503 >>> q = ''' ... SELECT ... {{#cols}} ... {{#if @last}} ... {{ . }} ... {{else}} ... {{ . }} , ... {{/if}} ... {{/cols}} ... FROM ... Album; ... ''' >>> data = {"cols": ["AlbumId", "Title", "ArtistId"]} >>> len(db.query(q, data=data, union=False)) 347 db.query(q, data=data, union=False) AlbumId Title ArtistId 0 1 For Those About To Rock We Salute You 1 1 2 Balls to the Wall 2 2 3 Restless and Wild 2 3 4 Let There Be Rock 1 4 5 Big Ones 3 ] if name[data] begin[:] variable[q] assign[=] call[name[self]._apply_handlebars, parameter[name[q], name[data], name[union]]] if name[limit] begin[:] variable[q] assign[=] call[name[self]._assign_limit, parameter[name[q], name[limit]]] return[call[name[pd].read_sql, parameter[name[q], name[self].con]]]
keyword[def] identifier[query] ( identifier[self] , identifier[q] , identifier[data] = keyword[None] , identifier[union] = keyword[True] , identifier[limit] = keyword[None] ): literal[string] keyword[if] identifier[data] : identifier[q] = identifier[self] . identifier[_apply_handlebars] ( identifier[q] , identifier[data] , identifier[union] ) keyword[if] identifier[limit] : identifier[q] = identifier[self] . identifier[_assign_limit] ( identifier[q] , identifier[limit] ) keyword[return] identifier[pd] . identifier[read_sql] ( identifier[q] , identifier[self] . identifier[con] )
def query(self, q, data=None, union=True, limit=None): """ Query your database with a raw string. Parameters ---------- q: str Query string to execute data: list, dict Optional argument for handlebars-queries. Data will be passed to the template and rendered using handlebars. union: bool Whether or not "UNION ALL" handlebars templates. This will return any handlebars queries as a single data frame. limit: int Number of records to return Examples -------- >>> from db import DemoDB >>> db = DemoDB() db.query("select * from Track").head(2) TrackId Name AlbumId MediaTypeId \\\r 0 1 For Those About To Rock (We Salute You) 1 1 1 2 Balls to the Wall 2 2 <BLANKLINE> GenreId Composer Milliseconds Bytes \\\r 0 1 Angus Young, Malcolm Young, Brian Johnson 343719 11170334 1 1 None 342562 5510424 <BLANKLINE> UnitPrice 0 0.99 1 0.99 db.query("select * from Track", limit=10) TrackId Name AlbumId MediaTypeId 0 1 For Those About To Rock (We Salute You) 1 1 1 2 Balls to the Wall 2 2 2 3 Fast As a Shark 3 2 3 4 Restless and Wild 3 2 4 5 Princess of the Dawn 3 2 5 6 Put The Finger On You 1 1 6 7 Let's Get It Up 1 1 7 8 Inject The Venom 1 1 8 9 Snowballed 1 1 9 10 Evil Walks 1 1 GenreId Composer Milliseconds 0 1 Angus Young, Malcolm Young, Brian Johnson 343719 1 1 None 342562 2 1 F. Baltes, S. Kaufman, U. Dirkscneider & W. Ho... 230619 3 1 F. Baltes, R.A. Smith-Diesel, S. Kaufman, U. D... 252051 4 1 Deaffy & R.A. Smith-Diesel 375418 5 1 Angus Young, Malcolm Young, Brian Johnson 205662 6 1 Angus Young, Malcolm Young, Brian Johnson 233926 7 1 Angus Young, Malcolm Young, Brian Johnson 210834 8 1 Angus Young, Malcolm Young, Brian Johnson 203102 9 1 Angus Young, Malcolm Young, Brian Johnson 263497 Bytes UnitPrice 0 11170334 0.99 1 5510424 0.99 2 3990994 0.99 3 4331779 0.99 4 6290521 0.99 5 6713451 0.99 6 7636561 0.99 7 6852860 0.99 8 6599424 0.99 9 8611245 0.99 >>> q = ''' ... SELECT ... a.Title, ... t.Name, ... t.UnitPrice ... FROM ... Album a ... INNER JOIN ... Track t ... on a.AlbumId = t.AlbumId; ... ''' >>> len(db.query(q)) 3503 db.query(q, limit=10) Title 0 For Those About To Rock We Salute You 1 Balls to the Wall 2 Restless and Wild 3 Restless and Wild 4 Restless and Wild 5 For Those About To Rock We Salute You 6 For Those About To Rock We Salute You 7 For Those About To Rock We Salute You 8 For Those About To Rock We Salute You 9 For Those About To Rock We Salute You Name UnitPrice 0 For Those About To Rock (We Salute You) 0.99 1 Balls to the Wall 0.99 2 Fast As a Shark 0.99 3 Restless and Wild 0.99 4 Princess of the Dawn 0.99 5 Put The Finger On You 0.99 6 Let's Get It Up 0.99 7 Inject The Venom 0.99 8 Snowballed 0.99 9 Evil Walks 0.99 >>> template = ''' ... SELECT ... '{{ name }}' as table_name, ... COUNT(*) as cnt ... FROM ... {{ name }} ... GROUP BY ... table_name ... ''' >>> data = [ ... {"name": "Album"}, ... {"name": "Artist"}, ... {"name": "Track"} ... ] >>> db.query(q, data=data) table_name cnt 0 Album 347 1 Artist 275 2 Track 3503 >>> q = ''' ... SELECT ... {{#cols}} ... {{#if @last}} ... {{ . }} ... {{else}} ... {{ . }} , ... {{/if}} ... {{/cols}} ... FROM ... Album; ... ''' >>> data = {"cols": ["AlbumId", "Title", "ArtistId"]} >>> len(db.query(q, data=data, union=False)) 347 db.query(q, data=data, union=False) AlbumId Title ArtistId 0 1 For Those About To Rock We Salute You 1 1 2 Balls to the Wall 2 2 3 Restless and Wild 2 3 4 Let There Be Rock 1 4 5 Big Ones 3 """ if data: q = self._apply_handlebars(q, data, union) # depends on [control=['if'], data=[]] if limit: q = self._assign_limit(q, limit) # depends on [control=['if'], data=[]] return pd.read_sql(q, self.con)
def _get_client(self, server): """ Get the pyrad client for a given server. RADIUS server is described by a 3-tuple: (<hostname>, <port>, <secret>). """ return Client( server=server[0], authport=server[1], secret=server[2], dict=self._get_dictionary(), )
def function[_get_client, parameter[self, server]]: constant[ Get the pyrad client for a given server. RADIUS server is described by a 3-tuple: (<hostname>, <port>, <secret>). ] return[call[name[Client], parameter[]]]
keyword[def] identifier[_get_client] ( identifier[self] , identifier[server] ): literal[string] keyword[return] identifier[Client] ( identifier[server] = identifier[server] [ literal[int] ], identifier[authport] = identifier[server] [ literal[int] ], identifier[secret] = identifier[server] [ literal[int] ], identifier[dict] = identifier[self] . identifier[_get_dictionary] (), )
def _get_client(self, server): """ Get the pyrad client for a given server. RADIUS server is described by a 3-tuple: (<hostname>, <port>, <secret>). """ return Client(server=server[0], authport=server[1], secret=server[2], dict=self._get_dictionary())
def _transform_abstract(plpy, module_ident): """Transform abstract, bi-directionally. Transforms an abstract using one of content columns ('abstract' or 'html') to determine which direction the transform will go (cnxml->html or html->cnxml). A transform is done on either one of them to make the other value. If no value is supplied, the trigger raises an error. If both values are supplied, the trigger will skip. """ plan = plpy.prepare("""\ SELECT a.abstractid, a.abstract, a.html FROM modules AS m NATURAL JOIN abstracts AS a WHERE m.module_ident = $1""", ('integer',)) result = plpy.execute(plan, (module_ident,), 1)[0] abstractid, cnxml, html = ( result['abstractid'], result['abstract'], result['html']) if cnxml is not None and html is not None: return # skip # TODO Prevent blank abstracts (abstract = null & html = null). msg = "produce {}->{} for abstractid={}" if cnxml is None: # Transform html->cnxml msg = msg.format('html', 'cnxml', abstractid) content = html column = 'abstract' transform_func = transform_abstract_to_cnxml else: # Transform cnxml->html msg = msg.format('cnxml', 'html', abstractid) content = cnxml column = 'html' transform_func = transform_abstract_to_html content, messages = transform_func(content, module_ident, plpy) plan = plpy.prepare( "UPDATE abstracts SET {} = $1 WHERE abstractid = $2".format(column), ('text', 'integer')) plpy.execute(plan, (content, abstractid,)) return msg
def function[_transform_abstract, parameter[plpy, module_ident]]: constant[Transform abstract, bi-directionally. Transforms an abstract using one of content columns ('abstract' or 'html') to determine which direction the transform will go (cnxml->html or html->cnxml). A transform is done on either one of them to make the other value. If no value is supplied, the trigger raises an error. If both values are supplied, the trigger will skip. ] variable[plan] assign[=] call[name[plpy].prepare, parameter[constant[SELECT a.abstractid, a.abstract, a.html FROM modules AS m NATURAL JOIN abstracts AS a WHERE m.module_ident = $1], tuple[[<ast.Constant object at 0x7da1b198ef80>]]]] variable[result] assign[=] call[call[name[plpy].execute, parameter[name[plan], tuple[[<ast.Name object at 0x7da1b198c550>]], constant[1]]]][constant[0]] <ast.Tuple object at 0x7da1b198dde0> assign[=] tuple[[<ast.Subscript object at 0x7da1b198f5e0>, <ast.Subscript object at 0x7da1b198fa60>, <ast.Subscript object at 0x7da1b198c7f0>]] if <ast.BoolOp object at 0x7da1b198e680> begin[:] return[None] variable[msg] assign[=] constant[produce {}->{} for abstractid={}] if compare[name[cnxml] is constant[None]] begin[:] variable[msg] assign[=] call[name[msg].format, parameter[constant[html], constant[cnxml], name[abstractid]]] variable[content] assign[=] name[html] variable[column] assign[=] constant[abstract] variable[transform_func] assign[=] name[transform_abstract_to_cnxml] <ast.Tuple object at 0x7da1b197f0a0> assign[=] call[name[transform_func], parameter[name[content], name[module_ident], name[plpy]]] variable[plan] assign[=] call[name[plpy].prepare, parameter[call[constant[UPDATE abstracts SET {} = $1 WHERE abstractid = $2].format, parameter[name[column]]], tuple[[<ast.Constant object at 0x7da1b197ee90>, <ast.Constant object at 0x7da1b197db70>]]]] call[name[plpy].execute, parameter[name[plan], tuple[[<ast.Name object at 0x7da1b1803eb0>, <ast.Name object at 0x7da1b1802b30>]]]] return[name[msg]]
keyword[def] identifier[_transform_abstract] ( identifier[plpy] , identifier[module_ident] ): literal[string] identifier[plan] = identifier[plpy] . identifier[prepare] ( literal[string] ,( literal[string] ,)) identifier[result] = identifier[plpy] . identifier[execute] ( identifier[plan] ,( identifier[module_ident] ,), literal[int] )[ literal[int] ] identifier[abstractid] , identifier[cnxml] , identifier[html] =( identifier[result] [ literal[string] ], identifier[result] [ literal[string] ], identifier[result] [ literal[string] ]) keyword[if] identifier[cnxml] keyword[is] keyword[not] keyword[None] keyword[and] identifier[html] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[msg] = literal[string] keyword[if] identifier[cnxml] keyword[is] keyword[None] : identifier[msg] = identifier[msg] . identifier[format] ( literal[string] , literal[string] , identifier[abstractid] ) identifier[content] = identifier[html] identifier[column] = literal[string] identifier[transform_func] = identifier[transform_abstract_to_cnxml] keyword[else] : identifier[msg] = identifier[msg] . identifier[format] ( literal[string] , literal[string] , identifier[abstractid] ) identifier[content] = identifier[cnxml] identifier[column] = literal[string] identifier[transform_func] = identifier[transform_abstract_to_html] identifier[content] , identifier[messages] = identifier[transform_func] ( identifier[content] , identifier[module_ident] , identifier[plpy] ) identifier[plan] = identifier[plpy] . identifier[prepare] ( literal[string] . identifier[format] ( identifier[column] ), ( literal[string] , literal[string] )) identifier[plpy] . identifier[execute] ( identifier[plan] ,( identifier[content] , identifier[abstractid] ,)) keyword[return] identifier[msg]
def _transform_abstract(plpy, module_ident): """Transform abstract, bi-directionally. Transforms an abstract using one of content columns ('abstract' or 'html') to determine which direction the transform will go (cnxml->html or html->cnxml). A transform is done on either one of them to make the other value. If no value is supplied, the trigger raises an error. If both values are supplied, the trigger will skip. """ plan = plpy.prepare('SELECT a.abstractid, a.abstract, a.html\nFROM modules AS m NATURAL JOIN abstracts AS a\nWHERE m.module_ident = $1', ('integer',)) result = plpy.execute(plan, (module_ident,), 1)[0] (abstractid, cnxml, html) = (result['abstractid'], result['abstract'], result['html']) if cnxml is not None and html is not None: return # skip # depends on [control=['if'], data=[]] # TODO Prevent blank abstracts (abstract = null & html = null). msg = 'produce {}->{} for abstractid={}' if cnxml is None: # Transform html->cnxml msg = msg.format('html', 'cnxml', abstractid) content = html column = 'abstract' transform_func = transform_abstract_to_cnxml # depends on [control=['if'], data=[]] else: # Transform cnxml->html msg = msg.format('cnxml', 'html', abstractid) content = cnxml column = 'html' transform_func = transform_abstract_to_html (content, messages) = transform_func(content, module_ident, plpy) plan = plpy.prepare('UPDATE abstracts SET {} = $1 WHERE abstractid = $2'.format(column), ('text', 'integer')) plpy.execute(plan, (content, abstractid)) return msg
def add(name, gid=None, **kwargs): ''' Add the specified group CLI Example: .. code-block:: bash salt '*' group.add foo 3456 ''' ### NOTE: **kwargs isn't used here but needs to be included in this ### function for compatibility with the group.present state if info(name): raise CommandExecutionError( 'Group \'{0}\' already exists'.format(name) ) if salt.utils.stringutils.contains_whitespace(name): raise SaltInvocationError('Group name cannot contain whitespace') if name.startswith('_'): raise SaltInvocationError( 'Salt will not create groups beginning with underscores' ) if gid is not None and not isinstance(gid, int): raise SaltInvocationError('gid must be an integer') # check if gid is already in use gid_list = _list_gids() if six.text_type(gid) in gid_list: raise CommandExecutionError( 'gid \'{0}\' already exists'.format(gid) ) cmd = ['dseditgroup', '-o', 'create'] if gid: cmd.extend(['-i', gid]) cmd.append(name) return __salt__['cmd.retcode'](cmd, python_shell=False) == 0
def function[add, parameter[name, gid]]: constant[ Add the specified group CLI Example: .. code-block:: bash salt '*' group.add foo 3456 ] if call[name[info], parameter[name[name]]] begin[:] <ast.Raise object at 0x7da18ede47c0> if call[name[salt].utils.stringutils.contains_whitespace, parameter[name[name]]] begin[:] <ast.Raise object at 0x7da18ede4d90> if call[name[name].startswith, parameter[constant[_]]] begin[:] <ast.Raise object at 0x7da18ede5c00> if <ast.BoolOp object at 0x7da18ede6800> begin[:] <ast.Raise object at 0x7da18ede49d0> variable[gid_list] assign[=] call[name[_list_gids], parameter[]] if compare[call[name[six].text_type, parameter[name[gid]]] in name[gid_list]] begin[:] <ast.Raise object at 0x7da18ede4190> variable[cmd] assign[=] list[[<ast.Constant object at 0x7da18ede4fd0>, <ast.Constant object at 0x7da18ede58a0>, <ast.Constant object at 0x7da18ede50c0>]] if name[gid] begin[:] call[name[cmd].extend, parameter[list[[<ast.Constant object at 0x7da18ede4c10>, <ast.Name object at 0x7da18ede7ca0>]]]] call[name[cmd].append, parameter[name[name]]] return[compare[call[call[name[__salt__]][constant[cmd.retcode]], parameter[name[cmd]]] equal[==] constant[0]]]
keyword[def] identifier[add] ( identifier[name] , identifier[gid] = keyword[None] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[info] ( identifier[name] ): keyword[raise] identifier[CommandExecutionError] ( literal[string] . identifier[format] ( identifier[name] ) ) keyword[if] identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[contains_whitespace] ( identifier[name] ): keyword[raise] identifier[SaltInvocationError] ( literal[string] ) keyword[if] identifier[name] . identifier[startswith] ( literal[string] ): keyword[raise] identifier[SaltInvocationError] ( literal[string] ) keyword[if] identifier[gid] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[isinstance] ( identifier[gid] , identifier[int] ): keyword[raise] identifier[SaltInvocationError] ( literal[string] ) identifier[gid_list] = identifier[_list_gids] () keyword[if] identifier[six] . identifier[text_type] ( identifier[gid] ) keyword[in] identifier[gid_list] : keyword[raise] identifier[CommandExecutionError] ( literal[string] . identifier[format] ( identifier[gid] ) ) identifier[cmd] =[ literal[string] , literal[string] , literal[string] ] keyword[if] identifier[gid] : identifier[cmd] . identifier[extend] ([ literal[string] , identifier[gid] ]) identifier[cmd] . identifier[append] ( identifier[name] ) keyword[return] identifier[__salt__] [ literal[string] ]( identifier[cmd] , identifier[python_shell] = keyword[False] )== literal[int]
def add(name, gid=None, **kwargs): """ Add the specified group CLI Example: .. code-block:: bash salt '*' group.add foo 3456 """ ### NOTE: **kwargs isn't used here but needs to be included in this ### function for compatibility with the group.present state if info(name): raise CommandExecutionError("Group '{0}' already exists".format(name)) # depends on [control=['if'], data=[]] if salt.utils.stringutils.contains_whitespace(name): raise SaltInvocationError('Group name cannot contain whitespace') # depends on [control=['if'], data=[]] if name.startswith('_'): raise SaltInvocationError('Salt will not create groups beginning with underscores') # depends on [control=['if'], data=[]] if gid is not None and (not isinstance(gid, int)): raise SaltInvocationError('gid must be an integer') # depends on [control=['if'], data=[]] # check if gid is already in use gid_list = _list_gids() if six.text_type(gid) in gid_list: raise CommandExecutionError("gid '{0}' already exists".format(gid)) # depends on [control=['if'], data=[]] cmd = ['dseditgroup', '-o', 'create'] if gid: cmd.extend(['-i', gid]) # depends on [control=['if'], data=[]] cmd.append(name) return __salt__['cmd.retcode'](cmd, python_shell=False) == 0
def asynchronous(self, fun, low, user='UNKNOWN', pub=None): ''' Execute the function in a multiprocess and return the event tag to use to watch for the return ''' async_pub = pub if pub is not None else self._gen_async_pub() proc = salt.utils.process.SignalHandlingMultiprocessingProcess( target=self._proc_function, args=(fun, low, user, async_pub['tag'], async_pub['jid'])) with salt.utils.process.default_signals(signal.SIGINT, signal.SIGTERM): # Reset current signals before starting the process in # order not to inherit the current signal handlers proc.start() proc.join() # MUST join, otherwise we leave zombies all over return async_pub
def function[asynchronous, parameter[self, fun, low, user, pub]]: constant[ Execute the function in a multiprocess and return the event tag to use to watch for the return ] variable[async_pub] assign[=] <ast.IfExp object at 0x7da18bccb8e0> variable[proc] assign[=] call[name[salt].utils.process.SignalHandlingMultiprocessingProcess, parameter[]] with call[name[salt].utils.process.default_signals, parameter[name[signal].SIGINT, name[signal].SIGTERM]] begin[:] call[name[proc].start, parameter[]] call[name[proc].join, parameter[]] return[name[async_pub]]
keyword[def] identifier[asynchronous] ( identifier[self] , identifier[fun] , identifier[low] , identifier[user] = literal[string] , identifier[pub] = keyword[None] ): literal[string] identifier[async_pub] = identifier[pub] keyword[if] identifier[pub] keyword[is] keyword[not] keyword[None] keyword[else] identifier[self] . identifier[_gen_async_pub] () identifier[proc] = identifier[salt] . identifier[utils] . identifier[process] . identifier[SignalHandlingMultiprocessingProcess] ( identifier[target] = identifier[self] . identifier[_proc_function] , identifier[args] =( identifier[fun] , identifier[low] , identifier[user] , identifier[async_pub] [ literal[string] ], identifier[async_pub] [ literal[string] ])) keyword[with] identifier[salt] . identifier[utils] . identifier[process] . identifier[default_signals] ( identifier[signal] . identifier[SIGINT] , identifier[signal] . identifier[SIGTERM] ): identifier[proc] . identifier[start] () identifier[proc] . identifier[join] () keyword[return] identifier[async_pub]
def asynchronous(self, fun, low, user='UNKNOWN', pub=None): """ Execute the function in a multiprocess and return the event tag to use to watch for the return """ async_pub = pub if pub is not None else self._gen_async_pub() proc = salt.utils.process.SignalHandlingMultiprocessingProcess(target=self._proc_function, args=(fun, low, user, async_pub['tag'], async_pub['jid'])) with salt.utils.process.default_signals(signal.SIGINT, signal.SIGTERM): # Reset current signals before starting the process in # order not to inherit the current signal handlers proc.start() # depends on [control=['with'], data=[]] proc.join() # MUST join, otherwise we leave zombies all over return async_pub
def _add_group(self, group, attrs): """ :param group: group_name :param attrs: :return: """ self.handle.create_group(group) if attrs is not None: self._add_attributes(group, attrs)
def function[_add_group, parameter[self, group, attrs]]: constant[ :param group: group_name :param attrs: :return: ] call[name[self].handle.create_group, parameter[name[group]]] if compare[name[attrs] is_not constant[None]] begin[:] call[name[self]._add_attributes, parameter[name[group], name[attrs]]]
keyword[def] identifier[_add_group] ( identifier[self] , identifier[group] , identifier[attrs] ): literal[string] identifier[self] . identifier[handle] . identifier[create_group] ( identifier[group] ) keyword[if] identifier[attrs] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[_add_attributes] ( identifier[group] , identifier[attrs] )
def _add_group(self, group, attrs): """ :param group: group_name :param attrs: :return: """ self.handle.create_group(group) if attrs is not None: self._add_attributes(group, attrs) # depends on [control=['if'], data=['attrs']]
def get_type_string(item): """Return type string of an object.""" if isinstance(item, DataFrame): return "DataFrame" if isinstance(item, Index): return type(item).__name__ if isinstance(item, Series): return "Series" found = re.findall(r"<(?:type|class) '(\S*)'>", to_text_string(type(item))) if found: return found[0]
def function[get_type_string, parameter[item]]: constant[Return type string of an object.] if call[name[isinstance], parameter[name[item], name[DataFrame]]] begin[:] return[constant[DataFrame]] if call[name[isinstance], parameter[name[item], name[Index]]] begin[:] return[call[name[type], parameter[name[item]]].__name__] if call[name[isinstance], parameter[name[item], name[Series]]] begin[:] return[constant[Series]] variable[found] assign[=] call[name[re].findall, parameter[constant[<(?:type|class) '(\S*)'>], call[name[to_text_string], parameter[call[name[type], parameter[name[item]]]]]]] if name[found] begin[:] return[call[name[found]][constant[0]]]
keyword[def] identifier[get_type_string] ( identifier[item] ): literal[string] keyword[if] identifier[isinstance] ( identifier[item] , identifier[DataFrame] ): keyword[return] literal[string] keyword[if] identifier[isinstance] ( identifier[item] , identifier[Index] ): keyword[return] identifier[type] ( identifier[item] ). identifier[__name__] keyword[if] identifier[isinstance] ( identifier[item] , identifier[Series] ): keyword[return] literal[string] identifier[found] = identifier[re] . identifier[findall] ( literal[string] , identifier[to_text_string] ( identifier[type] ( identifier[item] ))) keyword[if] identifier[found] : keyword[return] identifier[found] [ literal[int] ]
def get_type_string(item): """Return type string of an object.""" if isinstance(item, DataFrame): return 'DataFrame' # depends on [control=['if'], data=[]] if isinstance(item, Index): return type(item).__name__ # depends on [control=['if'], data=[]] if isinstance(item, Series): return 'Series' # depends on [control=['if'], data=[]] found = re.findall("<(?:type|class) '(\\S*)'>", to_text_string(type(item))) if found: return found[0] # depends on [control=['if'], data=[]]
def getResourceValue(self,ep,res,cbfn="",noResp=False,cacheOnly=False): """ Get value of a specific resource on a specific endpoint. :param str ep: name of endpoint :param str res: name of resource :param fnptr cbfn: Optional - callback function to be called on completion :param bool noResp: Optional - specify no response necessary from endpoint :param bool cacheOnly: Optional - get results from cache on connector, do not wake up endpoint :return: value of the resource, usually a string :rtype: asyncResult """ q = {} result = asyncResult(callback=cbfn) #set callback fn for use in async handler result.endpoint = ep result.resource = res if noResp or cacheOnly: q['noResp'] = 'true' if noResp == True else 'false' q['cacheOnly'] = 'true' if cacheOnly == True else 'false' # make query data = self._getURL("/endpoints/"+ep+res, query=q) result.fill(data) if data.status_code == 200: # immediate success result.error = False result.is_done = True if cbfn: cbfn(result) return result elif data.status_code == 202: self.database['async-responses'][json.loads(data.content)["async-response-id"]]= result else: # fail result.error = response_codes("resource",data.status_code) result.is_done = True result.raw_data = data.content result.status_code = data.status_code return result
def function[getResourceValue, parameter[self, ep, res, cbfn, noResp, cacheOnly]]: constant[ Get value of a specific resource on a specific endpoint. :param str ep: name of endpoint :param str res: name of resource :param fnptr cbfn: Optional - callback function to be called on completion :param bool noResp: Optional - specify no response necessary from endpoint :param bool cacheOnly: Optional - get results from cache on connector, do not wake up endpoint :return: value of the resource, usually a string :rtype: asyncResult ] variable[q] assign[=] dictionary[[], []] variable[result] assign[=] call[name[asyncResult], parameter[]] name[result].endpoint assign[=] name[ep] name[result].resource assign[=] name[res] if <ast.BoolOp object at 0x7da204344a00> begin[:] call[name[q]][constant[noResp]] assign[=] <ast.IfExp object at 0x7da204345db0> call[name[q]][constant[cacheOnly]] assign[=] <ast.IfExp object at 0x7da204347ca0> variable[data] assign[=] call[name[self]._getURL, parameter[binary_operation[binary_operation[constant[/endpoints/] + name[ep]] + name[res]]]] call[name[result].fill, parameter[name[data]]] if compare[name[data].status_code equal[==] constant[200]] begin[:] name[result].error assign[=] constant[False] name[result].is_done assign[=] constant[True] if name[cbfn] begin[:] call[name[cbfn], parameter[name[result]]] return[name[result]] name[result].raw_data assign[=] name[data].content name[result].status_code assign[=] name[data].status_code return[name[result]]
keyword[def] identifier[getResourceValue] ( identifier[self] , identifier[ep] , identifier[res] , identifier[cbfn] = literal[string] , identifier[noResp] = keyword[False] , identifier[cacheOnly] = keyword[False] ): literal[string] identifier[q] ={} identifier[result] = identifier[asyncResult] ( identifier[callback] = identifier[cbfn] ) identifier[result] . identifier[endpoint] = identifier[ep] identifier[result] . identifier[resource] = identifier[res] keyword[if] identifier[noResp] keyword[or] identifier[cacheOnly] : identifier[q] [ literal[string] ]= literal[string] keyword[if] identifier[noResp] == keyword[True] keyword[else] literal[string] identifier[q] [ literal[string] ]= literal[string] keyword[if] identifier[cacheOnly] == keyword[True] keyword[else] literal[string] identifier[data] = identifier[self] . identifier[_getURL] ( literal[string] + identifier[ep] + identifier[res] , identifier[query] = identifier[q] ) identifier[result] . identifier[fill] ( identifier[data] ) keyword[if] identifier[data] . identifier[status_code] == literal[int] : identifier[result] . identifier[error] = keyword[False] identifier[result] . identifier[is_done] = keyword[True] keyword[if] identifier[cbfn] : identifier[cbfn] ( identifier[result] ) keyword[return] identifier[result] keyword[elif] identifier[data] . identifier[status_code] == literal[int] : identifier[self] . identifier[database] [ literal[string] ][ identifier[json] . identifier[loads] ( identifier[data] . identifier[content] )[ literal[string] ]]= identifier[result] keyword[else] : identifier[result] . identifier[error] = identifier[response_codes] ( literal[string] , identifier[data] . identifier[status_code] ) identifier[result] . identifier[is_done] = keyword[True] identifier[result] . identifier[raw_data] = identifier[data] . identifier[content] identifier[result] . identifier[status_code] = identifier[data] . identifier[status_code] keyword[return] identifier[result]
def getResourceValue(self, ep, res, cbfn='', noResp=False, cacheOnly=False): """ Get value of a specific resource on a specific endpoint. :param str ep: name of endpoint :param str res: name of resource :param fnptr cbfn: Optional - callback function to be called on completion :param bool noResp: Optional - specify no response necessary from endpoint :param bool cacheOnly: Optional - get results from cache on connector, do not wake up endpoint :return: value of the resource, usually a string :rtype: asyncResult """ q = {} result = asyncResult(callback=cbfn) #set callback fn for use in async handler result.endpoint = ep result.resource = res if noResp or cacheOnly: q['noResp'] = 'true' if noResp == True else 'false' q['cacheOnly'] = 'true' if cacheOnly == True else 'false' # depends on [control=['if'], data=[]] # make query data = self._getURL('/endpoints/' + ep + res, query=q) result.fill(data) if data.status_code == 200: # immediate success result.error = False result.is_done = True if cbfn: cbfn(result) # depends on [control=['if'], data=[]] return result # depends on [control=['if'], data=[]] elif data.status_code == 202: self.database['async-responses'][json.loads(data.content)['async-response-id']] = result # depends on [control=['if'], data=[]] else: # fail result.error = response_codes('resource', data.status_code) result.is_done = True result.raw_data = data.content result.status_code = data.status_code return result
def single_side_pathway_enrichment(pathway_definitions, gene_signature, n_genes): """Identify overrepresented pathways using the Fisher's exact test for significance on a given pathway definition and gene signature. (FDR correction for multiple testing is applied in `_significant_pathways_dataframe`). Parameters ----------- pathway_definitions : dict(str -> set(str)) Pathway definitions, *post*-overlap-correction if this function is called from `pathway_enrichment_with_overlap_correction`. A pathway (key) is defined by a set of genes (value). gene_signature : set(str) The set of genes we consider to be enriched in a feature. n_genes : int The total number of genes for which we have assigned weights in the features of an unsupervised model. Returns ----------- pandas.Series, for each pathway, the p-value from applying the Fisher's exact test. """ if not gene_signature: return pd.Series(name="p-value") pvalues_list = [] for pathway, definition in pathway_definitions.items(): if isinstance(definition, tuple): definition = set.union(*definition) both_definition_and_signature = len(definition & gene_signature) in_definition_not_signature = (len(definition) - both_definition_and_signature) in_signature_not_definition = (len(gene_signature) - both_definition_and_signature) neither_definition_nor_signature = (n_genes - both_definition_and_signature - in_definition_not_signature - in_signature_not_definition) contingency_table = np.array( [[both_definition_and_signature, in_signature_not_definition], [in_definition_not_signature, neither_definition_nor_signature]]) try: _, pvalue = stats.fisher_exact( contingency_table, alternative="greater") pvalues_list.append(pvalue) # FPE can occur when `neither_definition_nor_signature` is very # large and `both_definition_and_signature` is very small (near zero) except FloatingPointError: pvalues_list.append(1.0) pvalues_series = pd.Series( pvalues_list, index=pathway_definitions.keys(), name="p-value") return pvalues_series
def function[single_side_pathway_enrichment, parameter[pathway_definitions, gene_signature, n_genes]]: constant[Identify overrepresented pathways using the Fisher's exact test for significance on a given pathway definition and gene signature. (FDR correction for multiple testing is applied in `_significant_pathways_dataframe`). Parameters ----------- pathway_definitions : dict(str -> set(str)) Pathway definitions, *post*-overlap-correction if this function is called from `pathway_enrichment_with_overlap_correction`. A pathway (key) is defined by a set of genes (value). gene_signature : set(str) The set of genes we consider to be enriched in a feature. n_genes : int The total number of genes for which we have assigned weights in the features of an unsupervised model. Returns ----------- pandas.Series, for each pathway, the p-value from applying the Fisher's exact test. ] if <ast.UnaryOp object at 0x7da18dc9b400> begin[:] return[call[name[pd].Series, parameter[]]] variable[pvalues_list] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da18dc9b1c0>, <ast.Name object at 0x7da18dc98ee0>]]] in starred[call[name[pathway_definitions].items, parameter[]]] begin[:] if call[name[isinstance], parameter[name[definition], name[tuple]]] begin[:] variable[definition] assign[=] call[name[set].union, parameter[<ast.Starred object at 0x7da18dc9a020>]] variable[both_definition_and_signature] assign[=] call[name[len], parameter[binary_operation[name[definition] <ast.BitAnd object at 0x7da2590d6b60> name[gene_signature]]]] variable[in_definition_not_signature] assign[=] binary_operation[call[name[len], parameter[name[definition]]] - name[both_definition_and_signature]] variable[in_signature_not_definition] assign[=] binary_operation[call[name[len], parameter[name[gene_signature]]] - name[both_definition_and_signature]] variable[neither_definition_nor_signature] assign[=] binary_operation[binary_operation[binary_operation[name[n_genes] - name[both_definition_and_signature]] - name[in_definition_not_signature]] - name[in_signature_not_definition]] variable[contingency_table] assign[=] call[name[np].array, parameter[list[[<ast.List object at 0x7da18dc99ae0>, <ast.List object at 0x7da18dc991b0>]]]] <ast.Try object at 0x7da18dc9bc10> variable[pvalues_series] assign[=] call[name[pd].Series, parameter[name[pvalues_list]]] return[name[pvalues_series]]
keyword[def] identifier[single_side_pathway_enrichment] ( identifier[pathway_definitions] , identifier[gene_signature] , identifier[n_genes] ): literal[string] keyword[if] keyword[not] identifier[gene_signature] : keyword[return] identifier[pd] . identifier[Series] ( identifier[name] = literal[string] ) identifier[pvalues_list] =[] keyword[for] identifier[pathway] , identifier[definition] keyword[in] identifier[pathway_definitions] . identifier[items] (): keyword[if] identifier[isinstance] ( identifier[definition] , identifier[tuple] ): identifier[definition] = identifier[set] . identifier[union] (* identifier[definition] ) identifier[both_definition_and_signature] = identifier[len] ( identifier[definition] & identifier[gene_signature] ) identifier[in_definition_not_signature] =( identifier[len] ( identifier[definition] )- identifier[both_definition_and_signature] ) identifier[in_signature_not_definition] =( identifier[len] ( identifier[gene_signature] )- identifier[both_definition_and_signature] ) identifier[neither_definition_nor_signature] =( identifier[n_genes] - identifier[both_definition_and_signature] - identifier[in_definition_not_signature] - identifier[in_signature_not_definition] ) identifier[contingency_table] = identifier[np] . identifier[array] ( [[ identifier[both_definition_and_signature] , identifier[in_signature_not_definition] ], [ identifier[in_definition_not_signature] , identifier[neither_definition_nor_signature] ]]) keyword[try] : identifier[_] , identifier[pvalue] = identifier[stats] . identifier[fisher_exact] ( identifier[contingency_table] , identifier[alternative] = literal[string] ) identifier[pvalues_list] . identifier[append] ( identifier[pvalue] ) keyword[except] identifier[FloatingPointError] : identifier[pvalues_list] . identifier[append] ( literal[int] ) identifier[pvalues_series] = identifier[pd] . identifier[Series] ( identifier[pvalues_list] , identifier[index] = identifier[pathway_definitions] . identifier[keys] (), identifier[name] = literal[string] ) keyword[return] identifier[pvalues_series]
def single_side_pathway_enrichment(pathway_definitions, gene_signature, n_genes): """Identify overrepresented pathways using the Fisher's exact test for significance on a given pathway definition and gene signature. (FDR correction for multiple testing is applied in `_significant_pathways_dataframe`). Parameters ----------- pathway_definitions : dict(str -> set(str)) Pathway definitions, *post*-overlap-correction if this function is called from `pathway_enrichment_with_overlap_correction`. A pathway (key) is defined by a set of genes (value). gene_signature : set(str) The set of genes we consider to be enriched in a feature. n_genes : int The total number of genes for which we have assigned weights in the features of an unsupervised model. Returns ----------- pandas.Series, for each pathway, the p-value from applying the Fisher's exact test. """ if not gene_signature: return pd.Series(name='p-value') # depends on [control=['if'], data=[]] pvalues_list = [] for (pathway, definition) in pathway_definitions.items(): if isinstance(definition, tuple): definition = set.union(*definition) # depends on [control=['if'], data=[]] both_definition_and_signature = len(definition & gene_signature) in_definition_not_signature = len(definition) - both_definition_and_signature in_signature_not_definition = len(gene_signature) - both_definition_and_signature neither_definition_nor_signature = n_genes - both_definition_and_signature - in_definition_not_signature - in_signature_not_definition contingency_table = np.array([[both_definition_and_signature, in_signature_not_definition], [in_definition_not_signature, neither_definition_nor_signature]]) try: (_, pvalue) = stats.fisher_exact(contingency_table, alternative='greater') pvalues_list.append(pvalue) # depends on [control=['try'], data=[]] # FPE can occur when `neither_definition_nor_signature` is very # large and `both_definition_and_signature` is very small (near zero) except FloatingPointError: pvalues_list.append(1.0) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=[]] pvalues_series = pd.Series(pvalues_list, index=pathway_definitions.keys(), name='p-value') return pvalues_series
def cluster_node_present(name, node, extra_args=None): ''' Add a node to the Pacemaker cluster via PCS Should be run on one cluster node only (there may be races) Can only be run on a already setup/added node name Irrelevant, not used (recommended: pcs_setup__node_add_{{node}}) node node that should be added extra_args list of extra args for the \'pcs cluster node add\' command Example: .. code-block:: yaml pcs_setup__node_add_node1.example.com: pcs.cluster_node_present: - node: node1.example.com - extra_args: - '--start' - '--enable' ''' ret = {'name': name, 'result': True, 'comment': '', 'changes': {}} node_add_required = True current_nodes = [] is_member_cmd = ['pcs', 'status', 'nodes', 'corosync'] is_member = __salt__['cmd.run_all'](is_member_cmd, output_loglevel='trace', python_shell=False) log.trace('Output of pcs status nodes corosync: %s', is_member) for line in is_member['stdout'].splitlines(): try: key, value = [x.strip() for x in line.split(':')] except ValueError: continue else: if not value or key not in ('Offline', 'Online'): continue values = value.split(':') if node in values: node_add_required = False ret['comment'] += 'Node {0} is already member of the cluster\n'.format(node) else: current_nodes += values if not node_add_required: return ret if __opts__['test']: ret['result'] = None ret['comment'] += 'Node {0} is set to be added to the cluster\n'.format(node) return ret if not isinstance(extra_args, (list, tuple)): extra_args = [] node_add = __salt__['pcs.cluster_node_add'](node=node, extra_args=extra_args) log.trace('Output of pcs.cluster_node_add: %s', node_add) node_add_dict = {} for line in node_add['stdout'].splitlines(): log.trace('line: %s', line) log.trace('line.split(:).len: %s', len(line.split(':'))) if len(line.split(':')) in [2]: current_node = line.split(':')[0].strip() current_node_add_state = line.split(':')[1].strip() if current_node in current_nodes + [node]: node_add_dict.update({current_node: current_node_add_state}) log.trace('node_add_dict: %s', node_add_dict) for current_node in current_nodes: if current_node in node_add_dict: if node_add_dict[current_node] not in ['Corosync updated']: ret['result'] = False ret['comment'] += 'Failed to update corosync.conf on node {0}\n'.format(current_node) ret['comment'] += '{0}: node_add_dict: {1}\n'.format(current_node, node_add_dict[current_node]) else: ret['result'] = False ret['comment'] += 'Failed to update corosync.conf on node {0}\n'.format(current_node) if node in node_add_dict and node_add_dict[node] in ['Succeeded', 'Success']: ret['comment'] += 'Added node {0}\n'.format(node) ret['changes'].update({node: {'old': '', 'new': 'Added'}}) else: ret['result'] = False ret['comment'] += 'Failed to add node{0}\n'.format(node) if node in node_add_dict: ret['comment'] += '{0}: node_add_dict: {1}\n'.format(node, node_add_dict[node]) ret['comment'] += six.text_type(node_add) log.trace('ret: %s', ret) return ret
def function[cluster_node_present, parameter[name, node, extra_args]]: constant[ Add a node to the Pacemaker cluster via PCS Should be run on one cluster node only (there may be races) Can only be run on a already setup/added node name Irrelevant, not used (recommended: pcs_setup__node_add_{{node}}) node node that should be added extra_args list of extra args for the 'pcs cluster node add' command Example: .. code-block:: yaml pcs_setup__node_add_node1.example.com: pcs.cluster_node_present: - node: node1.example.com - extra_args: - '--start' - '--enable' ] variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b26ad7b0>, <ast.Constant object at 0x7da1b26aefb0>, <ast.Constant object at 0x7da1b26adf30>, <ast.Constant object at 0x7da1b26ad4e0>], [<ast.Name object at 0x7da1b26ad930>, <ast.Constant object at 0x7da1b26ae8f0>, <ast.Constant object at 0x7da1b26af250>, <ast.Dict object at 0x7da1b26ac550>]] variable[node_add_required] assign[=] constant[True] variable[current_nodes] assign[=] list[[]] variable[is_member_cmd] assign[=] list[[<ast.Constant object at 0x7da1b26acf70>, <ast.Constant object at 0x7da1b26af340>, <ast.Constant object at 0x7da1b26ae530>, <ast.Constant object at 0x7da1b26ac910>]] variable[is_member] assign[=] call[call[name[__salt__]][constant[cmd.run_all]], parameter[name[is_member_cmd]]] call[name[log].trace, parameter[constant[Output of pcs status nodes corosync: %s], name[is_member]]] for taget[name[line]] in starred[call[call[name[is_member]][constant[stdout]].splitlines, parameter[]]] begin[:] <ast.Try object at 0x7da1b26ae7a0> if <ast.UnaryOp object at 0x7da207f03490> begin[:] return[name[ret]] if call[name[__opts__]][constant[test]] begin[:] call[name[ret]][constant[result]] assign[=] constant[None] <ast.AugAssign object at 0x7da207f02740> return[name[ret]] if <ast.UnaryOp object at 0x7da207f03c10> begin[:] variable[extra_args] assign[=] list[[]] variable[node_add] assign[=] call[call[name[__salt__]][constant[pcs.cluster_node_add]], parameter[]] call[name[log].trace, parameter[constant[Output of pcs.cluster_node_add: %s], name[node_add]]] variable[node_add_dict] assign[=] dictionary[[], []] for taget[name[line]] in starred[call[call[name[node_add]][constant[stdout]].splitlines, parameter[]]] begin[:] call[name[log].trace, parameter[constant[line: %s], name[line]]] call[name[log].trace, parameter[constant[line.split(:).len: %s], call[name[len], parameter[call[name[line].split, parameter[constant[:]]]]]]] if compare[call[name[len], parameter[call[name[line].split, parameter[constant[:]]]]] in list[[<ast.Constant object at 0x7da18dc9a650>]]] begin[:] variable[current_node] assign[=] call[call[call[name[line].split, parameter[constant[:]]]][constant[0]].strip, parameter[]] variable[current_node_add_state] assign[=] call[call[call[name[line].split, parameter[constant[:]]]][constant[1]].strip, parameter[]] if compare[name[current_node] in binary_operation[name[current_nodes] + list[[<ast.Name object at 0x7da18dc9af80>]]]] begin[:] call[name[node_add_dict].update, parameter[dictionary[[<ast.Name object at 0x7da18dc99b10>], [<ast.Name object at 0x7da18dc9bd60>]]]] call[name[log].trace, parameter[constant[node_add_dict: %s], name[node_add_dict]]] for taget[name[current_node]] in starred[name[current_nodes]] begin[:] if compare[name[current_node] in name[node_add_dict]] begin[:] if compare[call[name[node_add_dict]][name[current_node]] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Constant object at 0x7da18dc98d90>]]] begin[:] call[name[ret]][constant[result]] assign[=] constant[False] <ast.AugAssign object at 0x7da18dc99ab0> <ast.AugAssign object at 0x7da18dc9a3e0> if <ast.BoolOp object at 0x7da18dc989a0> begin[:] <ast.AugAssign object at 0x7da18dc9a1a0> call[call[name[ret]][constant[changes]].update, parameter[dictionary[[<ast.Name object at 0x7da18dc9baf0>], [<ast.Dict object at 0x7da18dc9af50>]]]] call[name[log].trace, parameter[constant[ret: %s], name[ret]]] return[name[ret]]
keyword[def] identifier[cluster_node_present] ( identifier[name] , identifier[node] , identifier[extra_args] = keyword[None] ): literal[string] identifier[ret] ={ literal[string] : identifier[name] , literal[string] : keyword[True] , literal[string] : literal[string] , literal[string] :{}} identifier[node_add_required] = keyword[True] identifier[current_nodes] =[] identifier[is_member_cmd] =[ literal[string] , literal[string] , literal[string] , literal[string] ] identifier[is_member] = identifier[__salt__] [ literal[string] ]( identifier[is_member_cmd] , identifier[output_loglevel] = literal[string] , identifier[python_shell] = keyword[False] ) identifier[log] . identifier[trace] ( literal[string] , identifier[is_member] ) keyword[for] identifier[line] keyword[in] identifier[is_member] [ literal[string] ]. identifier[splitlines] (): keyword[try] : identifier[key] , identifier[value] =[ identifier[x] . identifier[strip] () keyword[for] identifier[x] keyword[in] identifier[line] . identifier[split] ( literal[string] )] keyword[except] identifier[ValueError] : keyword[continue] keyword[else] : keyword[if] keyword[not] identifier[value] keyword[or] identifier[key] keyword[not] keyword[in] ( literal[string] , literal[string] ): keyword[continue] identifier[values] = identifier[value] . identifier[split] ( literal[string] ) keyword[if] identifier[node] keyword[in] identifier[values] : identifier[node_add_required] = keyword[False] identifier[ret] [ literal[string] ]+= literal[string] . identifier[format] ( identifier[node] ) keyword[else] : identifier[current_nodes] += identifier[values] keyword[if] keyword[not] identifier[node_add_required] : keyword[return] identifier[ret] keyword[if] identifier[__opts__] [ literal[string] ]: identifier[ret] [ literal[string] ]= keyword[None] identifier[ret] [ literal[string] ]+= literal[string] . identifier[format] ( identifier[node] ) keyword[return] identifier[ret] keyword[if] keyword[not] identifier[isinstance] ( identifier[extra_args] ,( identifier[list] , identifier[tuple] )): identifier[extra_args] =[] identifier[node_add] = identifier[__salt__] [ literal[string] ]( identifier[node] = identifier[node] , identifier[extra_args] = identifier[extra_args] ) identifier[log] . identifier[trace] ( literal[string] , identifier[node_add] ) identifier[node_add_dict] ={} keyword[for] identifier[line] keyword[in] identifier[node_add] [ literal[string] ]. identifier[splitlines] (): identifier[log] . identifier[trace] ( literal[string] , identifier[line] ) identifier[log] . identifier[trace] ( literal[string] , identifier[len] ( identifier[line] . identifier[split] ( literal[string] ))) keyword[if] identifier[len] ( identifier[line] . identifier[split] ( literal[string] )) keyword[in] [ literal[int] ]: identifier[current_node] = identifier[line] . identifier[split] ( literal[string] )[ literal[int] ]. identifier[strip] () identifier[current_node_add_state] = identifier[line] . identifier[split] ( literal[string] )[ literal[int] ]. identifier[strip] () keyword[if] identifier[current_node] keyword[in] identifier[current_nodes] +[ identifier[node] ]: identifier[node_add_dict] . identifier[update] ({ identifier[current_node] : identifier[current_node_add_state] }) identifier[log] . identifier[trace] ( literal[string] , identifier[node_add_dict] ) keyword[for] identifier[current_node] keyword[in] identifier[current_nodes] : keyword[if] identifier[current_node] keyword[in] identifier[node_add_dict] : keyword[if] identifier[node_add_dict] [ identifier[current_node] ] keyword[not] keyword[in] [ literal[string] ]: identifier[ret] [ literal[string] ]= keyword[False] identifier[ret] [ literal[string] ]+= literal[string] . identifier[format] ( identifier[current_node] ) identifier[ret] [ literal[string] ]+= literal[string] . identifier[format] ( identifier[current_node] , identifier[node_add_dict] [ identifier[current_node] ]) keyword[else] : identifier[ret] [ literal[string] ]= keyword[False] identifier[ret] [ literal[string] ]+= literal[string] . identifier[format] ( identifier[current_node] ) keyword[if] identifier[node] keyword[in] identifier[node_add_dict] keyword[and] identifier[node_add_dict] [ identifier[node] ] keyword[in] [ literal[string] , literal[string] ]: identifier[ret] [ literal[string] ]+= literal[string] . identifier[format] ( identifier[node] ) identifier[ret] [ literal[string] ]. identifier[update] ({ identifier[node] :{ literal[string] : literal[string] , literal[string] : literal[string] }}) keyword[else] : identifier[ret] [ literal[string] ]= keyword[False] identifier[ret] [ literal[string] ]+= literal[string] . identifier[format] ( identifier[node] ) keyword[if] identifier[node] keyword[in] identifier[node_add_dict] : identifier[ret] [ literal[string] ]+= literal[string] . identifier[format] ( identifier[node] , identifier[node_add_dict] [ identifier[node] ]) identifier[ret] [ literal[string] ]+= identifier[six] . identifier[text_type] ( identifier[node_add] ) identifier[log] . identifier[trace] ( literal[string] , identifier[ret] ) keyword[return] identifier[ret]
def cluster_node_present(name, node, extra_args=None): """ Add a node to the Pacemaker cluster via PCS Should be run on one cluster node only (there may be races) Can only be run on a already setup/added node name Irrelevant, not used (recommended: pcs_setup__node_add_{{node}}) node node that should be added extra_args list of extra args for the 'pcs cluster node add' command Example: .. code-block:: yaml pcs_setup__node_add_node1.example.com: pcs.cluster_node_present: - node: node1.example.com - extra_args: - '--start' - '--enable' """ ret = {'name': name, 'result': True, 'comment': '', 'changes': {}} node_add_required = True current_nodes = [] is_member_cmd = ['pcs', 'status', 'nodes', 'corosync'] is_member = __salt__['cmd.run_all'](is_member_cmd, output_loglevel='trace', python_shell=False) log.trace('Output of pcs status nodes corosync: %s', is_member) for line in is_member['stdout'].splitlines(): try: (key, value) = [x.strip() for x in line.split(':')] # depends on [control=['try'], data=[]] except ValueError: continue # depends on [control=['except'], data=[]] else: if not value or key not in ('Offline', 'Online'): continue # depends on [control=['if'], data=[]] values = value.split(':') if node in values: node_add_required = False ret['comment'] += 'Node {0} is already member of the cluster\n'.format(node) # depends on [control=['if'], data=['node']] else: current_nodes += values # depends on [control=['for'], data=['line']] if not node_add_required: return ret # depends on [control=['if'], data=[]] if __opts__['test']: ret['result'] = None ret['comment'] += 'Node {0} is set to be added to the cluster\n'.format(node) return ret # depends on [control=['if'], data=[]] if not isinstance(extra_args, (list, tuple)): extra_args = [] # depends on [control=['if'], data=[]] node_add = __salt__['pcs.cluster_node_add'](node=node, extra_args=extra_args) log.trace('Output of pcs.cluster_node_add: %s', node_add) node_add_dict = {} for line in node_add['stdout'].splitlines(): log.trace('line: %s', line) log.trace('line.split(:).len: %s', len(line.split(':'))) if len(line.split(':')) in [2]: current_node = line.split(':')[0].strip() current_node_add_state = line.split(':')[1].strip() if current_node in current_nodes + [node]: node_add_dict.update({current_node: current_node_add_state}) # depends on [control=['if'], data=['current_node']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] log.trace('node_add_dict: %s', node_add_dict) for current_node in current_nodes: if current_node in node_add_dict: if node_add_dict[current_node] not in ['Corosync updated']: ret['result'] = False ret['comment'] += 'Failed to update corosync.conf on node {0}\n'.format(current_node) ret['comment'] += '{0}: node_add_dict: {1}\n'.format(current_node, node_add_dict[current_node]) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['current_node', 'node_add_dict']] else: ret['result'] = False ret['comment'] += 'Failed to update corosync.conf on node {0}\n'.format(current_node) # depends on [control=['for'], data=['current_node']] if node in node_add_dict and node_add_dict[node] in ['Succeeded', 'Success']: ret['comment'] += 'Added node {0}\n'.format(node) ret['changes'].update({node: {'old': '', 'new': 'Added'}}) # depends on [control=['if'], data=[]] else: ret['result'] = False ret['comment'] += 'Failed to add node{0}\n'.format(node) if node in node_add_dict: ret['comment'] += '{0}: node_add_dict: {1}\n'.format(node, node_add_dict[node]) # depends on [control=['if'], data=['node', 'node_add_dict']] ret['comment'] += six.text_type(node_add) log.trace('ret: %s', ret) return ret
def scan_list(self, start_time=None, end_time=None, **kwargs): """List scans stored in Security Center in a given time range. Time is given in UNIX timestamps, assumed to be UTC. If a `datetime` is passed it is converted. If `end_time` is not specified it is NOW. If `start_time` is not specified it is 30 days previous from `end_time`. :param start_time: start of range to filter :type start_time: date, datetime, int :param end_time: end of range to filter :type start_time: date, datetime, int :return: list of dictionaries representing scans """ try: end_time = datetime.utcfromtimestamp(int(end_time)) except TypeError: if end_time is None: end_time = datetime.utcnow() try: start_time = datetime.utcfromtimestamp(int(start_time)) except TypeError: if start_time is None: start_time = end_time - timedelta(days=30) data = {"startTime": calendar.timegm(start_time.utctimetuple()), "endTime": calendar.timegm(end_time.utctimetuple())} data.update(kwargs) result = self.raw_query("scanResult", "getRange", data=data) return result["scanResults"]
def function[scan_list, parameter[self, start_time, end_time]]: constant[List scans stored in Security Center in a given time range. Time is given in UNIX timestamps, assumed to be UTC. If a `datetime` is passed it is converted. If `end_time` is not specified it is NOW. If `start_time` is not specified it is 30 days previous from `end_time`. :param start_time: start of range to filter :type start_time: date, datetime, int :param end_time: end of range to filter :type start_time: date, datetime, int :return: list of dictionaries representing scans ] <ast.Try object at 0x7da1b28c5bd0> <ast.Try object at 0x7da1b28c59f0> variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b28c6290>, <ast.Constant object at 0x7da1b28c73d0>], [<ast.Call object at 0x7da1b28c66b0>, <ast.Call object at 0x7da1b28d6b60>]] call[name[data].update, parameter[name[kwargs]]] variable[result] assign[=] call[name[self].raw_query, parameter[constant[scanResult], constant[getRange]]] return[call[name[result]][constant[scanResults]]]
keyword[def] identifier[scan_list] ( identifier[self] , identifier[start_time] = keyword[None] , identifier[end_time] = keyword[None] ,** identifier[kwargs] ): literal[string] keyword[try] : identifier[end_time] = identifier[datetime] . identifier[utcfromtimestamp] ( identifier[int] ( identifier[end_time] )) keyword[except] identifier[TypeError] : keyword[if] identifier[end_time] keyword[is] keyword[None] : identifier[end_time] = identifier[datetime] . identifier[utcnow] () keyword[try] : identifier[start_time] = identifier[datetime] . identifier[utcfromtimestamp] ( identifier[int] ( identifier[start_time] )) keyword[except] identifier[TypeError] : keyword[if] identifier[start_time] keyword[is] keyword[None] : identifier[start_time] = identifier[end_time] - identifier[timedelta] ( identifier[days] = literal[int] ) identifier[data] ={ literal[string] : identifier[calendar] . identifier[timegm] ( identifier[start_time] . identifier[utctimetuple] ()), literal[string] : identifier[calendar] . identifier[timegm] ( identifier[end_time] . identifier[utctimetuple] ())} identifier[data] . identifier[update] ( identifier[kwargs] ) identifier[result] = identifier[self] . identifier[raw_query] ( literal[string] , literal[string] , identifier[data] = identifier[data] ) keyword[return] identifier[result] [ literal[string] ]
def scan_list(self, start_time=None, end_time=None, **kwargs): """List scans stored in Security Center in a given time range. Time is given in UNIX timestamps, assumed to be UTC. If a `datetime` is passed it is converted. If `end_time` is not specified it is NOW. If `start_time` is not specified it is 30 days previous from `end_time`. :param start_time: start of range to filter :type start_time: date, datetime, int :param end_time: end of range to filter :type start_time: date, datetime, int :return: list of dictionaries representing scans """ try: end_time = datetime.utcfromtimestamp(int(end_time)) # depends on [control=['try'], data=[]] except TypeError: if end_time is None: end_time = datetime.utcnow() # depends on [control=['if'], data=['end_time']] # depends on [control=['except'], data=[]] try: start_time = datetime.utcfromtimestamp(int(start_time)) # depends on [control=['try'], data=[]] except TypeError: if start_time is None: start_time = end_time - timedelta(days=30) # depends on [control=['if'], data=['start_time']] # depends on [control=['except'], data=[]] data = {'startTime': calendar.timegm(start_time.utctimetuple()), 'endTime': calendar.timegm(end_time.utctimetuple())} data.update(kwargs) result = self.raw_query('scanResult', 'getRange', data=data) return result['scanResults']
def Vm(self): r'''Molar volume of the chemical at its current phase and temperature and pressure, in units of [m^3/mol]. Utilizes the object oriented interfaces :obj:`thermo.volume.VolumeSolid`, :obj:`thermo.volume.VolumeLiquid`, and :obj:`thermo.volume.VolumeGas` to perform the actual calculation of each property. Examples -------- >>> Chemical('ethylbenzene', T=550, P=3E6).Vm 0.00017758024401627633 ''' return phase_select_property(phase=self.phase, s=self.Vms, l=self.Vml, g=self.Vmg)
def function[Vm, parameter[self]]: constant[Molar volume of the chemical at its current phase and temperature and pressure, in units of [m^3/mol]. Utilizes the object oriented interfaces :obj:`thermo.volume.VolumeSolid`, :obj:`thermo.volume.VolumeLiquid`, and :obj:`thermo.volume.VolumeGas` to perform the actual calculation of each property. Examples -------- >>> Chemical('ethylbenzene', T=550, P=3E6).Vm 0.00017758024401627633 ] return[call[name[phase_select_property], parameter[]]]
keyword[def] identifier[Vm] ( identifier[self] ): literal[string] keyword[return] identifier[phase_select_property] ( identifier[phase] = identifier[self] . identifier[phase] , identifier[s] = identifier[self] . identifier[Vms] , identifier[l] = identifier[self] . identifier[Vml] , identifier[g] = identifier[self] . identifier[Vmg] )
def Vm(self): """Molar volume of the chemical at its current phase and temperature and pressure, in units of [m^3/mol]. Utilizes the object oriented interfaces :obj:`thermo.volume.VolumeSolid`, :obj:`thermo.volume.VolumeLiquid`, and :obj:`thermo.volume.VolumeGas` to perform the actual calculation of each property. Examples -------- >>> Chemical('ethylbenzene', T=550, P=3E6).Vm 0.00017758024401627633 """ return phase_select_property(phase=self.phase, s=self.Vms, l=self.Vml, g=self.Vmg)
def read(self, size=None): """ read([size]) -> read at most size bytes, returned as a string. If the size argument is negative or None, read until EOF is reached. Return an empty string at EOF. """ if size is None or size < 0: return "".join(list(self)) else: data_chunks = [] data_readed = 0 try: while data_readed < size: chunk = self.next_chunk() data_chunks.append(chunk) data_readed += len(chunk) except StopIteration: pass if data_readed > size: last_chunk = data_chunks.pop() extra_length = data_readed - size last_chunk, extra_data = last_chunk[:-extra_length], last_chunk[-extra_length:] self.unshift(extra_data) data_chunks.append(last_chunk) return "".join(data_chunks)
def function[read, parameter[self, size]]: constant[ read([size]) -> read at most size bytes, returned as a string. If the size argument is negative or None, read until EOF is reached. Return an empty string at EOF. ] if <ast.BoolOp object at 0x7da1b1190cd0> begin[:] return[call[constant[].join, parameter[call[name[list], parameter[name[self]]]]]]
keyword[def] identifier[read] ( identifier[self] , identifier[size] = keyword[None] ): literal[string] keyword[if] identifier[size] keyword[is] keyword[None] keyword[or] identifier[size] < literal[int] : keyword[return] literal[string] . identifier[join] ( identifier[list] ( identifier[self] )) keyword[else] : identifier[data_chunks] =[] identifier[data_readed] = literal[int] keyword[try] : keyword[while] identifier[data_readed] < identifier[size] : identifier[chunk] = identifier[self] . identifier[next_chunk] () identifier[data_chunks] . identifier[append] ( identifier[chunk] ) identifier[data_readed] += identifier[len] ( identifier[chunk] ) keyword[except] identifier[StopIteration] : keyword[pass] keyword[if] identifier[data_readed] > identifier[size] : identifier[last_chunk] = identifier[data_chunks] . identifier[pop] () identifier[extra_length] = identifier[data_readed] - identifier[size] identifier[last_chunk] , identifier[extra_data] = identifier[last_chunk] [:- identifier[extra_length] ], identifier[last_chunk] [- identifier[extra_length] :] identifier[self] . identifier[unshift] ( identifier[extra_data] ) identifier[data_chunks] . identifier[append] ( identifier[last_chunk] ) keyword[return] literal[string] . identifier[join] ( identifier[data_chunks] )
def read(self, size=None): """ read([size]) -> read at most size bytes, returned as a string. If the size argument is negative or None, read until EOF is reached. Return an empty string at EOF. """ if size is None or size < 0: return ''.join(list(self)) # depends on [control=['if'], data=[]] else: data_chunks = [] data_readed = 0 try: while data_readed < size: chunk = self.next_chunk() data_chunks.append(chunk) data_readed += len(chunk) # depends on [control=['while'], data=['data_readed']] # depends on [control=['try'], data=[]] except StopIteration: pass # depends on [control=['except'], data=[]] if data_readed > size: last_chunk = data_chunks.pop() extra_length = data_readed - size (last_chunk, extra_data) = (last_chunk[:-extra_length], last_chunk[-extra_length:]) self.unshift(extra_data) data_chunks.append(last_chunk) # depends on [control=['if'], data=['data_readed', 'size']] return ''.join(data_chunks)
def send(self, picture, *args): """ Send a 'picture' message to the socket (or actor). The picture is a string that defines the type of each frame. This makes it easy to send a complex multiframe message in one call. The picture can contain any of these characters, each corresponding to one or two arguments: i = int (signed) 1 = uint8_t 2 = uint16_t 4 = uint32_t 8 = uint64_t s = char * b = byte *, size_t (2 arguments) c = zchunk_t * f = zframe_t * h = zhashx_t * U = zuuid_t * p = void * (sends the pointer value, only meaningful over inproc) m = zmsg_t * (sends all frames in the zmsg) z = sends zero-sized frame (0 arguments) u = uint (deprecated) Note that s, b, c, and f are encoded the same way and the choice is offered as a convenience to the sender, which may or may not already have data in a zchunk or zframe. Does not change or take ownership of any arguments. Returns 0 if successful, -1 if sending failed for any reason. """ return lib.zsock_send(self._as_parameter_, picture, *args)
def function[send, parameter[self, picture]]: constant[ Send a 'picture' message to the socket (or actor). The picture is a string that defines the type of each frame. This makes it easy to send a complex multiframe message in one call. The picture can contain any of these characters, each corresponding to one or two arguments: i = int (signed) 1 = uint8_t 2 = uint16_t 4 = uint32_t 8 = uint64_t s = char * b = byte *, size_t (2 arguments) c = zchunk_t * f = zframe_t * h = zhashx_t * U = zuuid_t * p = void * (sends the pointer value, only meaningful over inproc) m = zmsg_t * (sends all frames in the zmsg) z = sends zero-sized frame (0 arguments) u = uint (deprecated) Note that s, b, c, and f are encoded the same way and the choice is offered as a convenience to the sender, which may or may not already have data in a zchunk or zframe. Does not change or take ownership of any arguments. Returns 0 if successful, -1 if sending failed for any reason. ] return[call[name[lib].zsock_send, parameter[name[self]._as_parameter_, name[picture], <ast.Starred object at 0x7da1b09d2b00>]]]
keyword[def] identifier[send] ( identifier[self] , identifier[picture] ,* identifier[args] ): literal[string] keyword[return] identifier[lib] . identifier[zsock_send] ( identifier[self] . identifier[_as_parameter_] , identifier[picture] ,* identifier[args] )
def send(self, picture, *args): """ Send a 'picture' message to the socket (or actor). The picture is a string that defines the type of each frame. This makes it easy to send a complex multiframe message in one call. The picture can contain any of these characters, each corresponding to one or two arguments: i = int (signed) 1 = uint8_t 2 = uint16_t 4 = uint32_t 8 = uint64_t s = char * b = byte *, size_t (2 arguments) c = zchunk_t * f = zframe_t * h = zhashx_t * U = zuuid_t * p = void * (sends the pointer value, only meaningful over inproc) m = zmsg_t * (sends all frames in the zmsg) z = sends zero-sized frame (0 arguments) u = uint (deprecated) Note that s, b, c, and f are encoded the same way and the choice is offered as a convenience to the sender, which may or may not already have data in a zchunk or zframe. Does not change or take ownership of any arguments. Returns 0 if successful, -1 if sending failed for any reason. """ return lib.zsock_send(self._as_parameter_, picture, *args)
def get_transform(offset, scale): ''' Parameters ---------- offset : pandas.Series Cartesian ``(x, y)`` coordinate of offset origin. scale : pandas.Series Scaling factor for ``x`` and ``y`` dimensions. Returns ------- pandas.DataFrame 3x3 transformation matrix resulting in specified `x/y` offset and scale. **Note that third row label is ``w`` and not ``z``).** ''' return pd.DataFrame([[scale, 0, offset.x], [0, scale, offset.y], [0, 0, 1]], index=['x', 'y', 'w'])
def function[get_transform, parameter[offset, scale]]: constant[ Parameters ---------- offset : pandas.Series Cartesian ``(x, y)`` coordinate of offset origin. scale : pandas.Series Scaling factor for ``x`` and ``y`` dimensions. Returns ------- pandas.DataFrame 3x3 transformation matrix resulting in specified `x/y` offset and scale. **Note that third row label is ``w`` and not ``z``).** ] return[call[name[pd].DataFrame, parameter[list[[<ast.List object at 0x7da18c4cc3d0>, <ast.List object at 0x7da18c4cc430>, <ast.List object at 0x7da18c4ce170>]]]]]
keyword[def] identifier[get_transform] ( identifier[offset] , identifier[scale] ): literal[string] keyword[return] identifier[pd] . identifier[DataFrame] ([[ identifier[scale] , literal[int] , identifier[offset] . identifier[x] ],[ literal[int] , identifier[scale] , identifier[offset] . identifier[y] ], [ literal[int] , literal[int] , literal[int] ]], identifier[index] =[ literal[string] , literal[string] , literal[string] ])
def get_transform(offset, scale): """ Parameters ---------- offset : pandas.Series Cartesian ``(x, y)`` coordinate of offset origin. scale : pandas.Series Scaling factor for ``x`` and ``y`` dimensions. Returns ------- pandas.DataFrame 3x3 transformation matrix resulting in specified `x/y` offset and scale. **Note that third row label is ``w`` and not ``z``).** """ return pd.DataFrame([[scale, 0, offset.x], [0, scale, offset.y], [0, 0, 1]], index=['x', 'y', 'w'])
def list_subadressen_by_huisnummer(self, huisnummer): ''' List all `subadressen` for a :class:`Huisnummer`. :param huisnummer: The :class:`Huisnummer` for which the \ `subadressen` are wanted. OR A huisnummer id. :rtype: A :class:`list` of :class:`Gebouw` ''' try: id = huisnummer.id except AttributeError: id = huisnummer def creator(): res = crab_gateway_request( self.client, 'ListSubadressenWithStatusByHuisnummerId', id ) try: return [ Subadres( r.SubadresId, r.Subadres, r.StatusSubadres )for r in res.SubadresWithStatusItem ] except AttributeError: return [] if self.caches['short'].is_configured: key = 'ListSubadressenWithStatusByHuisnummerId#%s' % (id) subadressen = self.caches['short'].get_or_create(key, creator) else: subadressen = creator() for s in subadressen: s.set_gateway(self) return subadressen
def function[list_subadressen_by_huisnummer, parameter[self, huisnummer]]: constant[ List all `subadressen` for a :class:`Huisnummer`. :param huisnummer: The :class:`Huisnummer` for which the `subadressen` are wanted. OR A huisnummer id. :rtype: A :class:`list` of :class:`Gebouw` ] <ast.Try object at 0x7da1b0aee020> def function[creator, parameter[]]: variable[res] assign[=] call[name[crab_gateway_request], parameter[name[self].client, constant[ListSubadressenWithStatusByHuisnummerId], name[id]]] <ast.Try object at 0x7da1b0aeeef0> if call[name[self].caches][constant[short]].is_configured begin[:] variable[key] assign[=] binary_operation[constant[ListSubadressenWithStatusByHuisnummerId#%s] <ast.Mod object at 0x7da2590d6920> name[id]] variable[subadressen] assign[=] call[call[name[self].caches][constant[short]].get_or_create, parameter[name[key], name[creator]]] for taget[name[s]] in starred[name[subadressen]] begin[:] call[name[s].set_gateway, parameter[name[self]]] return[name[subadressen]]
keyword[def] identifier[list_subadressen_by_huisnummer] ( identifier[self] , identifier[huisnummer] ): literal[string] keyword[try] : identifier[id] = identifier[huisnummer] . identifier[id] keyword[except] identifier[AttributeError] : identifier[id] = identifier[huisnummer] keyword[def] identifier[creator] (): identifier[res] = identifier[crab_gateway_request] ( identifier[self] . identifier[client] , literal[string] , identifier[id] ) keyword[try] : keyword[return] [ identifier[Subadres] ( identifier[r] . identifier[SubadresId] , identifier[r] . identifier[Subadres] , identifier[r] . identifier[StatusSubadres] ) keyword[for] identifier[r] keyword[in] identifier[res] . identifier[SubadresWithStatusItem] ] keyword[except] identifier[AttributeError] : keyword[return] [] keyword[if] identifier[self] . identifier[caches] [ literal[string] ]. identifier[is_configured] : identifier[key] = literal[string] %( identifier[id] ) identifier[subadressen] = identifier[self] . identifier[caches] [ literal[string] ]. identifier[get_or_create] ( identifier[key] , identifier[creator] ) keyword[else] : identifier[subadressen] = identifier[creator] () keyword[for] identifier[s] keyword[in] identifier[subadressen] : identifier[s] . identifier[set_gateway] ( identifier[self] ) keyword[return] identifier[subadressen]
def list_subadressen_by_huisnummer(self, huisnummer): """ List all `subadressen` for a :class:`Huisnummer`. :param huisnummer: The :class:`Huisnummer` for which the `subadressen` are wanted. OR A huisnummer id. :rtype: A :class:`list` of :class:`Gebouw` """ try: id = huisnummer.id # depends on [control=['try'], data=[]] except AttributeError: id = huisnummer # depends on [control=['except'], data=[]] def creator(): res = crab_gateway_request(self.client, 'ListSubadressenWithStatusByHuisnummerId', id) try: return [Subadres(r.SubadresId, r.Subadres, r.StatusSubadres) for r in res.SubadresWithStatusItem] # depends on [control=['try'], data=[]] except AttributeError: return [] # depends on [control=['except'], data=[]] if self.caches['short'].is_configured: key = 'ListSubadressenWithStatusByHuisnummerId#%s' % id subadressen = self.caches['short'].get_or_create(key, creator) # depends on [control=['if'], data=[]] else: subadressen = creator() for s in subadressen: s.set_gateway(self) # depends on [control=['for'], data=['s']] return subadressen
def find_connection(self): '''find an antenna tracker connection if possible''' if self.connection is not None: return self.connection for m in self.mpstate.mav_master: if 'HEARTBEAT' in m.messages: if m.messages['HEARTBEAT'].type == mavutil.mavlink.MAV_TYPE_ANTENNA_TRACKER: return m return None
def function[find_connection, parameter[self]]: constant[find an antenna tracker connection if possible] if compare[name[self].connection is_not constant[None]] begin[:] return[name[self].connection] for taget[name[m]] in starred[name[self].mpstate.mav_master] begin[:] if compare[constant[HEARTBEAT] in name[m].messages] begin[:] if compare[call[name[m].messages][constant[HEARTBEAT]].type equal[==] name[mavutil].mavlink.MAV_TYPE_ANTENNA_TRACKER] begin[:] return[name[m]] return[constant[None]]
keyword[def] identifier[find_connection] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[connection] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[self] . identifier[connection] keyword[for] identifier[m] keyword[in] identifier[self] . identifier[mpstate] . identifier[mav_master] : keyword[if] literal[string] keyword[in] identifier[m] . identifier[messages] : keyword[if] identifier[m] . identifier[messages] [ literal[string] ]. identifier[type] == identifier[mavutil] . identifier[mavlink] . identifier[MAV_TYPE_ANTENNA_TRACKER] : keyword[return] identifier[m] keyword[return] keyword[None]
def find_connection(self): """find an antenna tracker connection if possible""" if self.connection is not None: return self.connection # depends on [control=['if'], data=[]] for m in self.mpstate.mav_master: if 'HEARTBEAT' in m.messages: if m.messages['HEARTBEAT'].type == mavutil.mavlink.MAV_TYPE_ANTENNA_TRACKER: return m # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['m']] return None
def _get_os_properties(): """Retrieve distribution properties. **Note that platform.linux_distribution and platform.dist are deprecated and will be removed in Python 3.7. By that time, distro will become mandatory. """ if IS_DISTRO_INSTALLED: return distro.linux_distribution(full_distribution_name=False) return platform.linux_distribution(full_distribution_name=False)
def function[_get_os_properties, parameter[]]: constant[Retrieve distribution properties. **Note that platform.linux_distribution and platform.dist are deprecated and will be removed in Python 3.7. By that time, distro will become mandatory. ] if name[IS_DISTRO_INSTALLED] begin[:] return[call[name[distro].linux_distribution, parameter[]]] return[call[name[platform].linux_distribution, parameter[]]]
keyword[def] identifier[_get_os_properties] (): literal[string] keyword[if] identifier[IS_DISTRO_INSTALLED] : keyword[return] identifier[distro] . identifier[linux_distribution] ( identifier[full_distribution_name] = keyword[False] ) keyword[return] identifier[platform] . identifier[linux_distribution] ( identifier[full_distribution_name] = keyword[False] )
def _get_os_properties(): """Retrieve distribution properties. **Note that platform.linux_distribution and platform.dist are deprecated and will be removed in Python 3.7. By that time, distro will become mandatory. """ if IS_DISTRO_INSTALLED: return distro.linux_distribution(full_distribution_name=False) # depends on [control=['if'], data=[]] return platform.linux_distribution(full_distribution_name=False)
def get_security_group_id(self, name): """ Take name string, give back security group ID. To get around VPC's API being stupid. """ # Memoize entire list of groups if not hasattr(self, '_security_groups'): self._security_groups = {} for group in self.get_all_security_groups(): self._security_groups[group.name] = group.id return self._security_groups[name]
def function[get_security_group_id, parameter[self, name]]: constant[ Take name string, give back security group ID. To get around VPC's API being stupid. ] if <ast.UnaryOp object at 0x7da18bc729b0> begin[:] name[self]._security_groups assign[=] dictionary[[], []] for taget[name[group]] in starred[call[name[self].get_all_security_groups, parameter[]]] begin[:] call[name[self]._security_groups][name[group].name] assign[=] name[group].id return[call[name[self]._security_groups][name[name]]]
keyword[def] identifier[get_security_group_id] ( identifier[self] , identifier[name] ): literal[string] keyword[if] keyword[not] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[_security_groups] ={} keyword[for] identifier[group] keyword[in] identifier[self] . identifier[get_all_security_groups] (): identifier[self] . identifier[_security_groups] [ identifier[group] . identifier[name] ]= identifier[group] . identifier[id] keyword[return] identifier[self] . identifier[_security_groups] [ identifier[name] ]
def get_security_group_id(self, name): """ Take name string, give back security group ID. To get around VPC's API being stupid. """ # Memoize entire list of groups if not hasattr(self, '_security_groups'): self._security_groups = {} for group in self.get_all_security_groups(): self._security_groups[group.name] = group.id # depends on [control=['for'], data=['group']] # depends on [control=['if'], data=[]] return self._security_groups[name]
def _save_group_field(self, field_type, group): """This method packs a group field""" if field_type == 0x0000: # Ignored (commentar block) pass elif field_type == 0x0001: if group.id_ is not None: return (4, struct.pack('<I', group.id_)) elif field_type == 0x0002: if group.title is not None: return (len(group.title.encode())+1, (group.title+'\0').encode()) elif field_type == 0x0003: if group.creation is not None: return (5, self._pack_date(group.creation)) elif field_type == 0x0004: if group.last_mod is not None: return (5, self._pack_date(group.last_mod)) elif field_type == 0x0005: if group.last_access is not None: return (5, self._pack_date(group.last_access)) elif field_type == 0x0006: if group.expire is not None: return (5, self._pack_date(group.expire)) elif field_type == 0x0007: if group.image is not None: return (4, struct.pack('<I', group.image)) elif field_type == 0x0008: if group.level is not None: return (2, struct.pack('<H', group.level)) elif field_type == 0x0009: if group.flags is not None: return (4, struct.pack('<I', group.flags)) return False
def function[_save_group_field, parameter[self, field_type, group]]: constant[This method packs a group field] if compare[name[field_type] equal[==] constant[0]] begin[:] pass return[constant[False]]
keyword[def] identifier[_save_group_field] ( identifier[self] , identifier[field_type] , identifier[group] ): literal[string] keyword[if] identifier[field_type] == literal[int] : keyword[pass] keyword[elif] identifier[field_type] == literal[int] : keyword[if] identifier[group] . identifier[id_] keyword[is] keyword[not] keyword[None] : keyword[return] ( literal[int] , identifier[struct] . identifier[pack] ( literal[string] , identifier[group] . identifier[id_] )) keyword[elif] identifier[field_type] == literal[int] : keyword[if] identifier[group] . identifier[title] keyword[is] keyword[not] keyword[None] : keyword[return] ( identifier[len] ( identifier[group] . identifier[title] . identifier[encode] ())+ literal[int] , ( identifier[group] . identifier[title] + literal[string] ). identifier[encode] ()) keyword[elif] identifier[field_type] == literal[int] : keyword[if] identifier[group] . identifier[creation] keyword[is] keyword[not] keyword[None] : keyword[return] ( literal[int] , identifier[self] . identifier[_pack_date] ( identifier[group] . identifier[creation] )) keyword[elif] identifier[field_type] == literal[int] : keyword[if] identifier[group] . identifier[last_mod] keyword[is] keyword[not] keyword[None] : keyword[return] ( literal[int] , identifier[self] . identifier[_pack_date] ( identifier[group] . identifier[last_mod] )) keyword[elif] identifier[field_type] == literal[int] : keyword[if] identifier[group] . identifier[last_access] keyword[is] keyword[not] keyword[None] : keyword[return] ( literal[int] , identifier[self] . identifier[_pack_date] ( identifier[group] . identifier[last_access] )) keyword[elif] identifier[field_type] == literal[int] : keyword[if] identifier[group] . identifier[expire] keyword[is] keyword[not] keyword[None] : keyword[return] ( literal[int] , identifier[self] . identifier[_pack_date] ( identifier[group] . identifier[expire] )) keyword[elif] identifier[field_type] == literal[int] : keyword[if] identifier[group] . identifier[image] keyword[is] keyword[not] keyword[None] : keyword[return] ( literal[int] , identifier[struct] . identifier[pack] ( literal[string] , identifier[group] . identifier[image] )) keyword[elif] identifier[field_type] == literal[int] : keyword[if] identifier[group] . identifier[level] keyword[is] keyword[not] keyword[None] : keyword[return] ( literal[int] , identifier[struct] . identifier[pack] ( literal[string] , identifier[group] . identifier[level] )) keyword[elif] identifier[field_type] == literal[int] : keyword[if] identifier[group] . identifier[flags] keyword[is] keyword[not] keyword[None] : keyword[return] ( literal[int] , identifier[struct] . identifier[pack] ( literal[string] , identifier[group] . identifier[flags] )) keyword[return] keyword[False]
def _save_group_field(self, field_type, group): """This method packs a group field""" if field_type == 0: # Ignored (commentar block) pass # depends on [control=['if'], data=[]] elif field_type == 1: if group.id_ is not None: return (4, struct.pack('<I', group.id_)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif field_type == 2: if group.title is not None: return (len(group.title.encode()) + 1, (group.title + '\x00').encode()) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif field_type == 3: if group.creation is not None: return (5, self._pack_date(group.creation)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif field_type == 4: if group.last_mod is not None: return (5, self._pack_date(group.last_mod)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif field_type == 5: if group.last_access is not None: return (5, self._pack_date(group.last_access)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif field_type == 6: if group.expire is not None: return (5, self._pack_date(group.expire)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif field_type == 7: if group.image is not None: return (4, struct.pack('<I', group.image)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif field_type == 8: if group.level is not None: return (2, struct.pack('<H', group.level)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif field_type == 9: if group.flags is not None: return (4, struct.pack('<I', group.flags)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return False
def get_consumers(self, consumer_cls, channel): """ Kombu callback to set up consumers. Called after any (re)connection to the broker. """ _log.debug('setting up consumers %s', self) for provider in self._providers: callbacks = [partial(self.handle_message, provider)] consumer = consumer_cls( queues=[provider.queue], callbacks=callbacks, accept=self.accept ) consumer.qos(prefetch_count=self.prefetch_count) self._consumers[provider] = consumer return self._consumers.values()
def function[get_consumers, parameter[self, consumer_cls, channel]]: constant[ Kombu callback to set up consumers. Called after any (re)connection to the broker. ] call[name[_log].debug, parameter[constant[setting up consumers %s], name[self]]] for taget[name[provider]] in starred[name[self]._providers] begin[:] variable[callbacks] assign[=] list[[<ast.Call object at 0x7da18fe93dc0>]] variable[consumer] assign[=] call[name[consumer_cls], parameter[]] call[name[consumer].qos, parameter[]] call[name[self]._consumers][name[provider]] assign[=] name[consumer] return[call[name[self]._consumers.values, parameter[]]]
keyword[def] identifier[get_consumers] ( identifier[self] , identifier[consumer_cls] , identifier[channel] ): literal[string] identifier[_log] . identifier[debug] ( literal[string] , identifier[self] ) keyword[for] identifier[provider] keyword[in] identifier[self] . identifier[_providers] : identifier[callbacks] =[ identifier[partial] ( identifier[self] . identifier[handle_message] , identifier[provider] )] identifier[consumer] = identifier[consumer_cls] ( identifier[queues] =[ identifier[provider] . identifier[queue] ], identifier[callbacks] = identifier[callbacks] , identifier[accept] = identifier[self] . identifier[accept] ) identifier[consumer] . identifier[qos] ( identifier[prefetch_count] = identifier[self] . identifier[prefetch_count] ) identifier[self] . identifier[_consumers] [ identifier[provider] ]= identifier[consumer] keyword[return] identifier[self] . identifier[_consumers] . identifier[values] ()
def get_consumers(self, consumer_cls, channel): """ Kombu callback to set up consumers. Called after any (re)connection to the broker. """ _log.debug('setting up consumers %s', self) for provider in self._providers: callbacks = [partial(self.handle_message, provider)] consumer = consumer_cls(queues=[provider.queue], callbacks=callbacks, accept=self.accept) consumer.qos(prefetch_count=self.prefetch_count) self._consumers[provider] = consumer # depends on [control=['for'], data=['provider']] return self._consumers.values()
def get_tty_size(self): """ Get the current terminal size without using a subprocess http://stackoverflow.com/questions/566746 I have no clue what-so-fucking ever over how this works or why it returns the size of the terminal in both cells and pixels. But hey, it does. """ if sys.platform == 'win32': # stdin, stdout, stderr = -10, -11, -12 ret = self._tty_size_windows(-10) ret = ret or self._tty_size_windows(-11) ret = ret or self._tty_size_windows(-12) else: # stdin, stdout, stderr = 0, 1, 2 ret = self._tty_size_linux(0) ret = ret or self._tty_size_linux(1) ret = ret or self._tty_size_linux(2) return ret or (25, 80)
def function[get_tty_size, parameter[self]]: constant[ Get the current terminal size without using a subprocess http://stackoverflow.com/questions/566746 I have no clue what-so-fucking ever over how this works or why it returns the size of the terminal in both cells and pixels. But hey, it does. ] if compare[name[sys].platform equal[==] constant[win32]] begin[:] variable[ret] assign[=] call[name[self]._tty_size_windows, parameter[<ast.UnaryOp object at 0x7da1b0b621a0>]] variable[ret] assign[=] <ast.BoolOp object at 0x7da1b0b62c80> variable[ret] assign[=] <ast.BoolOp object at 0x7da1b0b62b00> return[<ast.BoolOp object at 0x7da1b0b60070>]
keyword[def] identifier[get_tty_size] ( identifier[self] ): literal[string] keyword[if] identifier[sys] . identifier[platform] == literal[string] : identifier[ret] = identifier[self] . identifier[_tty_size_windows] (- literal[int] ) identifier[ret] = identifier[ret] keyword[or] identifier[self] . identifier[_tty_size_windows] (- literal[int] ) identifier[ret] = identifier[ret] keyword[or] identifier[self] . identifier[_tty_size_windows] (- literal[int] ) keyword[else] : identifier[ret] = identifier[self] . identifier[_tty_size_linux] ( literal[int] ) identifier[ret] = identifier[ret] keyword[or] identifier[self] . identifier[_tty_size_linux] ( literal[int] ) identifier[ret] = identifier[ret] keyword[or] identifier[self] . identifier[_tty_size_linux] ( literal[int] ) keyword[return] identifier[ret] keyword[or] ( literal[int] , literal[int] )
def get_tty_size(self): """ Get the current terminal size without using a subprocess http://stackoverflow.com/questions/566746 I have no clue what-so-fucking ever over how this works or why it returns the size of the terminal in both cells and pixels. But hey, it does. """ if sys.platform == 'win32': # stdin, stdout, stderr = -10, -11, -12 ret = self._tty_size_windows(-10) ret = ret or self._tty_size_windows(-11) ret = ret or self._tty_size_windows(-12) # depends on [control=['if'], data=[]] else: # stdin, stdout, stderr = 0, 1, 2 ret = self._tty_size_linux(0) ret = ret or self._tty_size_linux(1) ret = ret or self._tty_size_linux(2) return ret or (25, 80)
def frange(x, y, jump=1): """ range for floats """ precision = get_sig_digits(jump) while x < y: yield round(x, precision) x += jump
def function[frange, parameter[x, y, jump]]: constant[ range for floats ] variable[precision] assign[=] call[name[get_sig_digits], parameter[name[jump]]] while compare[name[x] less[<] name[y]] begin[:] <ast.Yield object at 0x7da207f9ab30> <ast.AugAssign object at 0x7da207f99060>
keyword[def] identifier[frange] ( identifier[x] , identifier[y] , identifier[jump] = literal[int] ): literal[string] identifier[precision] = identifier[get_sig_digits] ( identifier[jump] ) keyword[while] identifier[x] < identifier[y] : keyword[yield] identifier[round] ( identifier[x] , identifier[precision] ) identifier[x] += identifier[jump]
def frange(x, y, jump=1): """ range for floats """ precision = get_sig_digits(jump) while x < y: yield round(x, precision) x += jump # depends on [control=['while'], data=['x']]
def scan_file(pym, filename, sentinel, installed): '''Entry point scan that creates a PyModule instance if needed. ''' if not utils.is_python_script(filename): return if not pym: # This is for finding a previously created instance, not finding an # installed module with the same name. Might need to base the name # on the actual paths to reduce ambiguity in the printed scan results. module = os.path.basename(filename) pym = utils.find_package(module, installed) if not pym: pym = PyModule(module, 'SCRIPT', os.path.abspath(filename)) installed.insert(0, pym) else: pym.is_scan = True for imp_type, import_path, extra_file_scan in _scan_file(filename, sentinel): dep = utils.find_package(import_path, installed) if dep: dep.add_dependant(pym) pym.add_dependency(dep) if imp_type != 'import': pym.add_framework(imp_type) if extra_file_scan: # extra_file_scan should be a directory or file containing the # import name scan_filename = utils.file_containing_import(import_path, extra_file_scan) log.info('Related scan: %s - %s', import_path, scan_filename) if scan_filename.endswith('__init__.py'): scan_directory(pym, os.path.dirname(scan_filename), sentinel, installed) else: scan_file(pym, scan_filename, sentinel, installed) return pym
def function[scan_file, parameter[pym, filename, sentinel, installed]]: constant[Entry point scan that creates a PyModule instance if needed. ] if <ast.UnaryOp object at 0x7da204344280> begin[:] return[None] if <ast.UnaryOp object at 0x7da18f09c430> begin[:] variable[module] assign[=] call[name[os].path.basename, parameter[name[filename]]] variable[pym] assign[=] call[name[utils].find_package, parameter[name[module], name[installed]]] if <ast.UnaryOp object at 0x7da18f09f940> begin[:] variable[pym] assign[=] call[name[PyModule], parameter[name[module], constant[SCRIPT], call[name[os].path.abspath, parameter[name[filename]]]]] call[name[installed].insert, parameter[constant[0], name[pym]]] for taget[tuple[[<ast.Name object at 0x7da1b229a2f0>, <ast.Name object at 0x7da1b2298e20>, <ast.Name object at 0x7da1b229bb20>]]] in starred[call[name[_scan_file], parameter[name[filename], name[sentinel]]]] begin[:] variable[dep] assign[=] call[name[utils].find_package, parameter[name[import_path], name[installed]]] if name[dep] begin[:] call[name[dep].add_dependant, parameter[name[pym]]] call[name[pym].add_dependency, parameter[name[dep]]] if compare[name[imp_type] not_equal[!=] constant[import]] begin[:] call[name[pym].add_framework, parameter[name[imp_type]]] if name[extra_file_scan] begin[:] variable[scan_filename] assign[=] call[name[utils].file_containing_import, parameter[name[import_path], name[extra_file_scan]]] call[name[log].info, parameter[constant[Related scan: %s - %s], name[import_path], name[scan_filename]]] if call[name[scan_filename].endswith, parameter[constant[__init__.py]]] begin[:] call[name[scan_directory], parameter[name[pym], call[name[os].path.dirname, parameter[name[scan_filename]]], name[sentinel], name[installed]]] return[name[pym]]
keyword[def] identifier[scan_file] ( identifier[pym] , identifier[filename] , identifier[sentinel] , identifier[installed] ): literal[string] keyword[if] keyword[not] identifier[utils] . identifier[is_python_script] ( identifier[filename] ): keyword[return] keyword[if] keyword[not] identifier[pym] : identifier[module] = identifier[os] . identifier[path] . identifier[basename] ( identifier[filename] ) identifier[pym] = identifier[utils] . identifier[find_package] ( identifier[module] , identifier[installed] ) keyword[if] keyword[not] identifier[pym] : identifier[pym] = identifier[PyModule] ( identifier[module] , literal[string] , identifier[os] . identifier[path] . identifier[abspath] ( identifier[filename] )) identifier[installed] . identifier[insert] ( literal[int] , identifier[pym] ) keyword[else] : identifier[pym] . identifier[is_scan] = keyword[True] keyword[for] identifier[imp_type] , identifier[import_path] , identifier[extra_file_scan] keyword[in] identifier[_scan_file] ( identifier[filename] , identifier[sentinel] ): identifier[dep] = identifier[utils] . identifier[find_package] ( identifier[import_path] , identifier[installed] ) keyword[if] identifier[dep] : identifier[dep] . identifier[add_dependant] ( identifier[pym] ) identifier[pym] . identifier[add_dependency] ( identifier[dep] ) keyword[if] identifier[imp_type] != literal[string] : identifier[pym] . identifier[add_framework] ( identifier[imp_type] ) keyword[if] identifier[extra_file_scan] : identifier[scan_filename] = identifier[utils] . identifier[file_containing_import] ( identifier[import_path] , identifier[extra_file_scan] ) identifier[log] . identifier[info] ( literal[string] , identifier[import_path] , identifier[scan_filename] ) keyword[if] identifier[scan_filename] . identifier[endswith] ( literal[string] ): identifier[scan_directory] ( identifier[pym] , identifier[os] . identifier[path] . identifier[dirname] ( identifier[scan_filename] ), identifier[sentinel] , identifier[installed] ) keyword[else] : identifier[scan_file] ( identifier[pym] , identifier[scan_filename] , identifier[sentinel] , identifier[installed] ) keyword[return] identifier[pym]
def scan_file(pym, filename, sentinel, installed): """Entry point scan that creates a PyModule instance if needed. """ if not utils.is_python_script(filename): return # depends on [control=['if'], data=[]] if not pym: # This is for finding a previously created instance, not finding an # installed module with the same name. Might need to base the name # on the actual paths to reduce ambiguity in the printed scan results. module = os.path.basename(filename) pym = utils.find_package(module, installed) if not pym: pym = PyModule(module, 'SCRIPT', os.path.abspath(filename)) installed.insert(0, pym) # depends on [control=['if'], data=[]] else: pym.is_scan = True # depends on [control=['if'], data=[]] for (imp_type, import_path, extra_file_scan) in _scan_file(filename, sentinel): dep = utils.find_package(import_path, installed) if dep: dep.add_dependant(pym) pym.add_dependency(dep) if imp_type != 'import': pym.add_framework(imp_type) # depends on [control=['if'], data=['imp_type']] # depends on [control=['if'], data=[]] if extra_file_scan: # extra_file_scan should be a directory or file containing the # import name scan_filename = utils.file_containing_import(import_path, extra_file_scan) log.info('Related scan: %s - %s', import_path, scan_filename) if scan_filename.endswith('__init__.py'): scan_directory(pym, os.path.dirname(scan_filename), sentinel, installed) # depends on [control=['if'], data=[]] else: scan_file(pym, scan_filename, sentinel, installed) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return pym
def remove_empty_dirs(path): """ removes empty dirs under a given path """ for root, dirs, files in os.walk(path): for d in dirs: dir_path = os.path.join(root, d) if not os.listdir(dir_path): os.rmdir(dir_path)
def function[remove_empty_dirs, parameter[path]]: constant[ removes empty dirs under a given path ] for taget[tuple[[<ast.Name object at 0x7da1b26ad300>, <ast.Name object at 0x7da1b26acdc0>, <ast.Name object at 0x7da1b26afdc0>]]] in starred[call[name[os].walk, parameter[name[path]]]] begin[:] for taget[name[d]] in starred[name[dirs]] begin[:] variable[dir_path] assign[=] call[name[os].path.join, parameter[name[root], name[d]]] if <ast.UnaryOp object at 0x7da1b26adf00> begin[:] call[name[os].rmdir, parameter[name[dir_path]]]
keyword[def] identifier[remove_empty_dirs] ( identifier[path] ): literal[string] keyword[for] identifier[root] , identifier[dirs] , identifier[files] keyword[in] identifier[os] . identifier[walk] ( identifier[path] ): keyword[for] identifier[d] keyword[in] identifier[dirs] : identifier[dir_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[root] , identifier[d] ) keyword[if] keyword[not] identifier[os] . identifier[listdir] ( identifier[dir_path] ): identifier[os] . identifier[rmdir] ( identifier[dir_path] )
def remove_empty_dirs(path): """ removes empty dirs under a given path """ for (root, dirs, files) in os.walk(path): for d in dirs: dir_path = os.path.join(root, d) if not os.listdir(dir_path): os.rmdir(dir_path) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['d']] # depends on [control=['for'], data=[]]
def random_square_mask(shape, fraction): """Create a numpy array with specified shape and masked fraction. Args: shape: tuple, shape of the mask to create. fraction: float, fraction of the mask area to populate with `mask_scalar`. Returns: numpy.array: A numpy array storing the mask. """ mask = np.ones(shape) patch_area = shape[0]*shape[1]*fraction patch_dim = np.int(math.floor(math.sqrt(patch_area))) if patch_area == 0 or patch_dim == 0: return mask x = np.random.randint(shape[0] - patch_dim) y = np.random.randint(shape[1] - patch_dim) mask[x:(x + patch_dim), y:(y + patch_dim), :] = 0 return mask
def function[random_square_mask, parameter[shape, fraction]]: constant[Create a numpy array with specified shape and masked fraction. Args: shape: tuple, shape of the mask to create. fraction: float, fraction of the mask area to populate with `mask_scalar`. Returns: numpy.array: A numpy array storing the mask. ] variable[mask] assign[=] call[name[np].ones, parameter[name[shape]]] variable[patch_area] assign[=] binary_operation[binary_operation[call[name[shape]][constant[0]] * call[name[shape]][constant[1]]] * name[fraction]] variable[patch_dim] assign[=] call[name[np].int, parameter[call[name[math].floor, parameter[call[name[math].sqrt, parameter[name[patch_area]]]]]]] if <ast.BoolOp object at 0x7da1b201c730> begin[:] return[name[mask]] variable[x] assign[=] call[name[np].random.randint, parameter[binary_operation[call[name[shape]][constant[0]] - name[patch_dim]]]] variable[y] assign[=] call[name[np].random.randint, parameter[binary_operation[call[name[shape]][constant[1]] - name[patch_dim]]]] call[name[mask]][tuple[[<ast.Slice object at 0x7da1b208b2b0>, <ast.Slice object at 0x7da1b2088700>, <ast.Slice object at 0x7da1b20882b0>]]] assign[=] constant[0] return[name[mask]]
keyword[def] identifier[random_square_mask] ( identifier[shape] , identifier[fraction] ): literal[string] identifier[mask] = identifier[np] . identifier[ones] ( identifier[shape] ) identifier[patch_area] = identifier[shape] [ literal[int] ]* identifier[shape] [ literal[int] ]* identifier[fraction] identifier[patch_dim] = identifier[np] . identifier[int] ( identifier[math] . identifier[floor] ( identifier[math] . identifier[sqrt] ( identifier[patch_area] ))) keyword[if] identifier[patch_area] == literal[int] keyword[or] identifier[patch_dim] == literal[int] : keyword[return] identifier[mask] identifier[x] = identifier[np] . identifier[random] . identifier[randint] ( identifier[shape] [ literal[int] ]- identifier[patch_dim] ) identifier[y] = identifier[np] . identifier[random] . identifier[randint] ( identifier[shape] [ literal[int] ]- identifier[patch_dim] ) identifier[mask] [ identifier[x] :( identifier[x] + identifier[patch_dim] ), identifier[y] :( identifier[y] + identifier[patch_dim] ),:]= literal[int] keyword[return] identifier[mask]
def random_square_mask(shape, fraction): """Create a numpy array with specified shape and masked fraction. Args: shape: tuple, shape of the mask to create. fraction: float, fraction of the mask area to populate with `mask_scalar`. Returns: numpy.array: A numpy array storing the mask. """ mask = np.ones(shape) patch_area = shape[0] * shape[1] * fraction patch_dim = np.int(math.floor(math.sqrt(patch_area))) if patch_area == 0 or patch_dim == 0: return mask # depends on [control=['if'], data=[]] x = np.random.randint(shape[0] - patch_dim) y = np.random.randint(shape[1] - patch_dim) mask[x:x + patch_dim, y:y + patch_dim, :] = 0 return mask
def extract_pdbid(string): """Use regular expressions to get a PDB ID from a string""" p = re.compile("[0-9][0-9a-z]{3}") m = p.search(string.lower()) try: return m.group() except AttributeError: return "UnknownProtein"
def function[extract_pdbid, parameter[string]]: constant[Use regular expressions to get a PDB ID from a string] variable[p] assign[=] call[name[re].compile, parameter[constant[[0-9][0-9a-z]{3}]]] variable[m] assign[=] call[name[p].search, parameter[call[name[string].lower, parameter[]]]] <ast.Try object at 0x7da207f9a860>
keyword[def] identifier[extract_pdbid] ( identifier[string] ): literal[string] identifier[p] = identifier[re] . identifier[compile] ( literal[string] ) identifier[m] = identifier[p] . identifier[search] ( identifier[string] . identifier[lower] ()) keyword[try] : keyword[return] identifier[m] . identifier[group] () keyword[except] identifier[AttributeError] : keyword[return] literal[string]
def extract_pdbid(string): """Use regular expressions to get a PDB ID from a string""" p = re.compile('[0-9][0-9a-z]{3}') m = p.search(string.lower()) try: return m.group() # depends on [control=['try'], data=[]] except AttributeError: return 'UnknownProtein' # depends on [control=['except'], data=[]]
def list_exes(self): """List the installed executables by this project.""" return [path.join(self.env_bin, f) for f in os.listdir(self.env_bin)]
def function[list_exes, parameter[self]]: constant[List the installed executables by this project.] return[<ast.ListComp object at 0x7da18bcca050>]
keyword[def] identifier[list_exes] ( identifier[self] ): literal[string] keyword[return] [ identifier[path] . identifier[join] ( identifier[self] . identifier[env_bin] , identifier[f] ) keyword[for] identifier[f] keyword[in] identifier[os] . identifier[listdir] ( identifier[self] . identifier[env_bin] )]
def list_exes(self): """List the installed executables by this project.""" return [path.join(self.env_bin, f) for f in os.listdir(self.env_bin)]
def size_of_generator(generator, memory_efficient=True): """Get number of items in a generator function. - memory_efficient = True, 3 times slower, but memory_efficient. - memory_efficient = False, faster, but cost more memory. **中文文档** 计算一个生成器函数中的元素的个数。使用memory_efficient=True的方法可以避免将生成器中的 所有元素放入内存, 但是速度稍慢于memory_efficient=False的方法。 """ if memory_efficient: counter = 0 for _ in generator: counter += 1 return counter else: return len(list(generator))
def function[size_of_generator, parameter[generator, memory_efficient]]: constant[Get number of items in a generator function. - memory_efficient = True, 3 times slower, but memory_efficient. - memory_efficient = False, faster, but cost more memory. **中文文档** 计算一个生成器函数中的元素的个数。使用memory_efficient=True的方法可以避免将生成器中的 所有元素放入内存, 但是速度稍慢于memory_efficient=False的方法。 ] if name[memory_efficient] begin[:] variable[counter] assign[=] constant[0] for taget[name[_]] in starred[name[generator]] begin[:] <ast.AugAssign object at 0x7da1b23b2bf0> return[name[counter]]
keyword[def] identifier[size_of_generator] ( identifier[generator] , identifier[memory_efficient] = keyword[True] ): literal[string] keyword[if] identifier[memory_efficient] : identifier[counter] = literal[int] keyword[for] identifier[_] keyword[in] identifier[generator] : identifier[counter] += literal[int] keyword[return] identifier[counter] keyword[else] : keyword[return] identifier[len] ( identifier[list] ( identifier[generator] ))
def size_of_generator(generator, memory_efficient=True): """Get number of items in a generator function. - memory_efficient = True, 3 times slower, but memory_efficient. - memory_efficient = False, faster, but cost more memory. **中文文档** 计算一个生成器函数中的元素的个数。使用memory_efficient=True的方法可以避免将生成器中的 所有元素放入内存, 但是速度稍慢于memory_efficient=False的方法。 """ if memory_efficient: counter = 0 for _ in generator: counter += 1 # depends on [control=['for'], data=[]] return counter # depends on [control=['if'], data=[]] else: return len(list(generator))
def _get_oauth_session(self): """Creates a new OAuth session :return: - OAuth2Session object """ return self._get_session( OAuth2Session( client_id=self.client_id, token=self.token, token_updater=self.token_updater, auto_refresh_url=self.token_url, auto_refresh_kwargs={ "client_id": self.client_id, "client_secret": self.client_secret } ) )
def function[_get_oauth_session, parameter[self]]: constant[Creates a new OAuth session :return: - OAuth2Session object ] return[call[name[self]._get_session, parameter[call[name[OAuth2Session], parameter[]]]]]
keyword[def] identifier[_get_oauth_session] ( identifier[self] ): literal[string] keyword[return] identifier[self] . identifier[_get_session] ( identifier[OAuth2Session] ( identifier[client_id] = identifier[self] . identifier[client_id] , identifier[token] = identifier[self] . identifier[token] , identifier[token_updater] = identifier[self] . identifier[token_updater] , identifier[auto_refresh_url] = identifier[self] . identifier[token_url] , identifier[auto_refresh_kwargs] ={ literal[string] : identifier[self] . identifier[client_id] , literal[string] : identifier[self] . identifier[client_secret] } ) )
def _get_oauth_session(self): """Creates a new OAuth session :return: - OAuth2Session object """ return self._get_session(OAuth2Session(client_id=self.client_id, token=self.token, token_updater=self.token_updater, auto_refresh_url=self.token_url, auto_refresh_kwargs={'client_id': self.client_id, 'client_secret': self.client_secret}))
def _set_box(self): """ Set the box size for the molecular assembly """ net_volume = 0.0 for idx, mol in enumerate(self.mols): length = max([np.max(mol.cart_coords[:, i])-np.min(mol.cart_coords[:, i]) for i in range(3)]) + 2.0 net_volume += (length**3.0) * float(self.param_list[idx]['number']) length = net_volume**(1.0/3.0) for idx, mol in enumerate(self.mols): self.param_list[idx]['inside box'] = '0.0 0.0 0.0 {} {} {}'.format( length, length, length)
def function[_set_box, parameter[self]]: constant[ Set the box size for the molecular assembly ] variable[net_volume] assign[=] constant[0.0] for taget[tuple[[<ast.Name object at 0x7da204567d30>, <ast.Name object at 0x7da2045674f0>]]] in starred[call[name[enumerate], parameter[name[self].mols]]] begin[:] variable[length] assign[=] binary_operation[call[name[max], parameter[<ast.ListComp object at 0x7da2045655a0>]] + constant[2.0]] <ast.AugAssign object at 0x7da2045663b0> variable[length] assign[=] binary_operation[name[net_volume] ** binary_operation[constant[1.0] / constant[3.0]]] for taget[tuple[[<ast.Name object at 0x7da2045652d0>, <ast.Name object at 0x7da204564550>]]] in starred[call[name[enumerate], parameter[name[self].mols]]] begin[:] call[call[name[self].param_list][name[idx]]][constant[inside box]] assign[=] call[constant[0.0 0.0 0.0 {} {} {}].format, parameter[name[length], name[length], name[length]]]
keyword[def] identifier[_set_box] ( identifier[self] ): literal[string] identifier[net_volume] = literal[int] keyword[for] identifier[idx] , identifier[mol] keyword[in] identifier[enumerate] ( identifier[self] . identifier[mols] ): identifier[length] = identifier[max] ([ identifier[np] . identifier[max] ( identifier[mol] . identifier[cart_coords] [:, identifier[i] ])- identifier[np] . identifier[min] ( identifier[mol] . identifier[cart_coords] [:, identifier[i] ]) keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] )])+ literal[int] identifier[net_volume] +=( identifier[length] ** literal[int] )* identifier[float] ( identifier[self] . identifier[param_list] [ identifier[idx] ][ literal[string] ]) identifier[length] = identifier[net_volume] **( literal[int] / literal[int] ) keyword[for] identifier[idx] , identifier[mol] keyword[in] identifier[enumerate] ( identifier[self] . identifier[mols] ): identifier[self] . identifier[param_list] [ identifier[idx] ][ literal[string] ]= literal[string] . identifier[format] ( identifier[length] , identifier[length] , identifier[length] )
def _set_box(self): """ Set the box size for the molecular assembly """ net_volume = 0.0 for (idx, mol) in enumerate(self.mols): length = max([np.max(mol.cart_coords[:, i]) - np.min(mol.cart_coords[:, i]) for i in range(3)]) + 2.0 net_volume += length ** 3.0 * float(self.param_list[idx]['number']) # depends on [control=['for'], data=[]] length = net_volume ** (1.0 / 3.0) for (idx, mol) in enumerate(self.mols): self.param_list[idx]['inside box'] = '0.0 0.0 0.0 {} {} {}'.format(length, length, length) # depends on [control=['for'], data=[]]
def send_email(self, user, subject, msg): """Should be overwritten in the setup""" print('To:', user) print('Subject:', subject) print(msg)
def function[send_email, parameter[self, user, subject, msg]]: constant[Should be overwritten in the setup] call[name[print], parameter[constant[To:], name[user]]] call[name[print], parameter[constant[Subject:], name[subject]]] call[name[print], parameter[name[msg]]]
keyword[def] identifier[send_email] ( identifier[self] , identifier[user] , identifier[subject] , identifier[msg] ): literal[string] identifier[print] ( literal[string] , identifier[user] ) identifier[print] ( literal[string] , identifier[subject] ) identifier[print] ( identifier[msg] )
def send_email(self, user, subject, msg): """Should be overwritten in the setup""" print('To:', user) print('Subject:', subject) print(msg)
def _set_nameserver_cos(self, v, load=False): """ Setter method for nameserver_cos, mapped from YANG variable /brocade_nameserver_rpc/get_nameserver_detail/output/show_nameserver/nameserver_cos (nameserver-cos-type) If this variable is read-only (config: false) in the source YANG file, then _set_nameserver_cos is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_nameserver_cos() directly. YANG Description: Indicates the Fibre Channel Class of Service supported by the device. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'F|1|2|3|,', 'length': [u'0..8']}), is_leaf=True, yang_name="nameserver-cos", rest_name="nameserver-cos", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'info': u'class of service'}}, namespace='urn:brocade.com:mgmt:brocade-nameserver', defining_module='brocade-nameserver', yang_type='nameserver-cos-type', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """nameserver_cos must be of a type compatible with nameserver-cos-type""", 'defined-type': "brocade-nameserver:nameserver-cos-type", 'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'F|1|2|3|,', 'length': [u'0..8']}), is_leaf=True, yang_name="nameserver-cos", rest_name="nameserver-cos", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'info': u'class of service'}}, namespace='urn:brocade.com:mgmt:brocade-nameserver', defining_module='brocade-nameserver', yang_type='nameserver-cos-type', is_config=True)""", }) self.__nameserver_cos = t if hasattr(self, '_set'): self._set()
def function[_set_nameserver_cos, parameter[self, v, load]]: constant[ Setter method for nameserver_cos, mapped from YANG variable /brocade_nameserver_rpc/get_nameserver_detail/output/show_nameserver/nameserver_cos (nameserver-cos-type) If this variable is read-only (config: false) in the source YANG file, then _set_nameserver_cos is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_nameserver_cos() directly. YANG Description: Indicates the Fibre Channel Class of Service supported by the device. ] if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:] variable[v] assign[=] call[name[v]._utype, parameter[name[v]]] <ast.Try object at 0x7da1b24a3e80> name[self].__nameserver_cos assign[=] name[t] if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:] call[name[self]._set, parameter[]]
keyword[def] identifier[_set_nameserver_cos] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ): literal[string] keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ): identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] ) keyword[try] : identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[RestrictedClassType] ( identifier[base_type] = identifier[unicode] , identifier[restriction_dict] ={ literal[string] : literal[string] , literal[string] :[ literal[string] ]}), identifier[is_leaf] = keyword[True] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[False] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] ) keyword[except] ( identifier[TypeError] , identifier[ValueError] ): keyword[raise] identifier[ValueError] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , }) identifier[self] . identifier[__nameserver_cos] = identifier[t] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[_set] ()
def _set_nameserver_cos(self, v, load=False): """ Setter method for nameserver_cos, mapped from YANG variable /brocade_nameserver_rpc/get_nameserver_detail/output/show_nameserver/nameserver_cos (nameserver-cos-type) If this variable is read-only (config: false) in the source YANG file, then _set_nameserver_cos is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_nameserver_cos() directly. YANG Description: Indicates the Fibre Channel Class of Service supported by the device. """ if hasattr(v, '_utype'): v = v._utype(v) # depends on [control=['if'], data=[]] try: t = YANGDynClass(v, base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'F|1|2|3|,', 'length': [u'0..8']}), is_leaf=True, yang_name='nameserver-cos', rest_name='nameserver-cos', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'info': u'class of service'}}, namespace='urn:brocade.com:mgmt:brocade-nameserver', defining_module='brocade-nameserver', yang_type='nameserver-cos-type', is_config=True) # depends on [control=['try'], data=[]] except (TypeError, ValueError): raise ValueError({'error-string': 'nameserver_cos must be of a type compatible with nameserver-cos-type', 'defined-type': 'brocade-nameserver:nameserver-cos-type', 'generated-type': 'YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={\'pattern\': u\'F|1|2|3|,\', \'length\': [u\'0..8\']}), is_leaf=True, yang_name="nameserver-cos", rest_name="nameserver-cos", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u\'tailf-common\': {u\'info\': u\'class of service\'}}, namespace=\'urn:brocade.com:mgmt:brocade-nameserver\', defining_module=\'brocade-nameserver\', yang_type=\'nameserver-cos-type\', is_config=True)'}) # depends on [control=['except'], data=[]] self.__nameserver_cos = t if hasattr(self, '_set'): self._set() # depends on [control=['if'], data=[]]
def create_service_network(self, tenant_name, network, subnet, dhcp_range=True): """Create network on the DCNM. :param tenant_name: name of tenant the network belongs to :param network: network parameters :param subnet: subnet parameters of the network """ network_info = {} subnet_ip_mask = subnet.cidr.split('/') if self._default_md is None: self._set_default_mobility_domain() vlan_id = '0' gw_ip = subnet.gateway_ip part_name = network.part_name if not part_name: part_name = self._part_name if network.vlan_id: vlan_id = str(network.vlan_id) if network.mob_domain_name is not None: mob_domain_name = network.mob_domain_name else: mob_domain_name = self._default_md else: mob_domain_name = None seg_id = str(network.segmentation_id) seg_str = "$segmentId=" + seg_id cfg_args = [ seg_str, "$netMaskLength=" + subnet_ip_mask[1], "$gatewayIpAddress=" + gw_ip, "$networkName=" + network.name, "$vlanId=" + vlan_id, "$vrfName=" + tenant_name + ':' + part_name ] cfg_args = ';'.join(cfg_args) ip_range = ','.join(["%s-%s" % (p['start'], p['end']) for p in subnet.allocation_pools]) dhcp_scopes = {'ipRange': ip_range, 'subnet': subnet.cidr, 'gateway': gw_ip} network_info = {"vlanId": vlan_id, "mobilityDomainId": mob_domain_name, "profileName": network.config_profile, "networkName": network.name, "configArg": cfg_args, "organizationName": tenant_name, "partitionName": part_name, "description": network.name, "netmaskLength": subnet_ip_mask[1], "gateway": gw_ip} if seg_id: network_info["segmentId"] = seg_id if dhcp_range: network_info["dhcpScope"] = dhcp_scopes if hasattr(subnet, 'secondary_gw'): network_info["secondaryGateway"] = subnet.secondary_gw if self._is_iplus: # Need to add the vrf name to the network info prof = self._config_profile_get(network.config_profile) if prof and prof.get('profileSubType') == 'network:universal': # For universal profile vrf has to e organization:partition network_info["vrfName"] = ':'.join((tenant_name, part_name)) else: # Otherwise, it should be left empty. network_info["vrfName"] = "" LOG.info("Creating %s network in DCNM.", network_info) res = self._create_network(network_info) if res and res.status_code in self._resp_ok: LOG.info("Created %s network in DCNM.", network_info) else: LOG.error("Failed to create %s network in DCNM.", network_info) raise dexc.DfaClientRequestFailed(reason=self._failure_msg(res))
def function[create_service_network, parameter[self, tenant_name, network, subnet, dhcp_range]]: constant[Create network on the DCNM. :param tenant_name: name of tenant the network belongs to :param network: network parameters :param subnet: subnet parameters of the network ] variable[network_info] assign[=] dictionary[[], []] variable[subnet_ip_mask] assign[=] call[name[subnet].cidr.split, parameter[constant[/]]] if compare[name[self]._default_md is constant[None]] begin[:] call[name[self]._set_default_mobility_domain, parameter[]] variable[vlan_id] assign[=] constant[0] variable[gw_ip] assign[=] name[subnet].gateway_ip variable[part_name] assign[=] name[network].part_name if <ast.UnaryOp object at 0x7da1b1a5fc70> begin[:] variable[part_name] assign[=] name[self]._part_name if name[network].vlan_id begin[:] variable[vlan_id] assign[=] call[name[str], parameter[name[network].vlan_id]] if compare[name[network].mob_domain_name is_not constant[None]] begin[:] variable[mob_domain_name] assign[=] name[network].mob_domain_name variable[seg_id] assign[=] call[name[str], parameter[name[network].segmentation_id]] variable[seg_str] assign[=] binary_operation[constant[$segmentId=] + name[seg_id]] variable[cfg_args] assign[=] list[[<ast.Name object at 0x7da1b1a5c5b0>, <ast.BinOp object at 0x7da1b1a5d870>, <ast.BinOp object at 0x7da1b1a5f100>, <ast.BinOp object at 0x7da1b1a5c4f0>, <ast.BinOp object at 0x7da1b1a5f370>, <ast.BinOp object at 0x7da1b1a5de40>]] variable[cfg_args] assign[=] call[constant[;].join, parameter[name[cfg_args]]] variable[ip_range] assign[=] call[constant[,].join, parameter[<ast.ListComp object at 0x7da1b1b151b0>]] variable[dhcp_scopes] assign[=] dictionary[[<ast.Constant object at 0x7da1b1b14be0>, <ast.Constant object at 0x7da1b1b149d0>, <ast.Constant object at 0x7da1b1b14ca0>], [<ast.Name object at 0x7da1b1b14130>, <ast.Attribute object at 0x7da1b1b140a0>, <ast.Name object at 0x7da1b1b14b50>]] variable[network_info] assign[=] dictionary[[<ast.Constant object at 0x7da18dc989d0>, <ast.Constant object at 0x7da18dc99db0>, <ast.Constant object at 0x7da18dc99c60>, <ast.Constant object at 0x7da18dc9a740>, <ast.Constant object at 0x7da18dc9be50>, <ast.Constant object at 0x7da18dc99240>, <ast.Constant object at 0x7da18dc99f30>, <ast.Constant object at 0x7da18dc9b010>, <ast.Constant object at 0x7da18dc98280>, <ast.Constant object at 0x7da18dc9ab30>], [<ast.Name object at 0x7da18dc98c40>, <ast.Name object at 0x7da18dc988e0>, <ast.Attribute object at 0x7da18dc9ab00>, <ast.Attribute object at 0x7da18dc99420>, <ast.Name object at 0x7da18dc9bac0>, <ast.Name object at 0x7da18dc99c00>, <ast.Name object at 0x7da18dc9ae00>, <ast.Attribute object at 0x7da18dc9b4f0>, <ast.Subscript object at 0x7da18dc99480>, <ast.Name object at 0x7da18dc986a0>]] if name[seg_id] begin[:] call[name[network_info]][constant[segmentId]] assign[=] name[seg_id] if name[dhcp_range] begin[:] call[name[network_info]][constant[dhcpScope]] assign[=] name[dhcp_scopes] if call[name[hasattr], parameter[name[subnet], constant[secondary_gw]]] begin[:] call[name[network_info]][constant[secondaryGateway]] assign[=] name[subnet].secondary_gw if name[self]._is_iplus begin[:] variable[prof] assign[=] call[name[self]._config_profile_get, parameter[name[network].config_profile]] if <ast.BoolOp object at 0x7da1b1c62890> begin[:] call[name[network_info]][constant[vrfName]] assign[=] call[constant[:].join, parameter[tuple[[<ast.Name object at 0x7da1b1be5f30>, <ast.Name object at 0x7da1b1be5e10>]]]] call[name[LOG].info, parameter[constant[Creating %s network in DCNM.], name[network_info]]] variable[res] assign[=] call[name[self]._create_network, parameter[name[network_info]]] if <ast.BoolOp object at 0x7da1b1be7d90> begin[:] call[name[LOG].info, parameter[constant[Created %s network in DCNM.], name[network_info]]]
keyword[def] identifier[create_service_network] ( identifier[self] , identifier[tenant_name] , identifier[network] , identifier[subnet] , identifier[dhcp_range] = keyword[True] ): literal[string] identifier[network_info] ={} identifier[subnet_ip_mask] = identifier[subnet] . identifier[cidr] . identifier[split] ( literal[string] ) keyword[if] identifier[self] . identifier[_default_md] keyword[is] keyword[None] : identifier[self] . identifier[_set_default_mobility_domain] () identifier[vlan_id] = literal[string] identifier[gw_ip] = identifier[subnet] . identifier[gateway_ip] identifier[part_name] = identifier[network] . identifier[part_name] keyword[if] keyword[not] identifier[part_name] : identifier[part_name] = identifier[self] . identifier[_part_name] keyword[if] identifier[network] . identifier[vlan_id] : identifier[vlan_id] = identifier[str] ( identifier[network] . identifier[vlan_id] ) keyword[if] identifier[network] . identifier[mob_domain_name] keyword[is] keyword[not] keyword[None] : identifier[mob_domain_name] = identifier[network] . identifier[mob_domain_name] keyword[else] : identifier[mob_domain_name] = identifier[self] . identifier[_default_md] keyword[else] : identifier[mob_domain_name] = keyword[None] identifier[seg_id] = identifier[str] ( identifier[network] . identifier[segmentation_id] ) identifier[seg_str] = literal[string] + identifier[seg_id] identifier[cfg_args] =[ identifier[seg_str] , literal[string] + identifier[subnet_ip_mask] [ literal[int] ], literal[string] + identifier[gw_ip] , literal[string] + identifier[network] . identifier[name] , literal[string] + identifier[vlan_id] , literal[string] + identifier[tenant_name] + literal[string] + identifier[part_name] ] identifier[cfg_args] = literal[string] . identifier[join] ( identifier[cfg_args] ) identifier[ip_range] = literal[string] . identifier[join] ([ literal[string] %( identifier[p] [ literal[string] ], identifier[p] [ literal[string] ]) keyword[for] identifier[p] keyword[in] identifier[subnet] . identifier[allocation_pools] ]) identifier[dhcp_scopes] ={ literal[string] : identifier[ip_range] , literal[string] : identifier[subnet] . identifier[cidr] , literal[string] : identifier[gw_ip] } identifier[network_info] ={ literal[string] : identifier[vlan_id] , literal[string] : identifier[mob_domain_name] , literal[string] : identifier[network] . identifier[config_profile] , literal[string] : identifier[network] . identifier[name] , literal[string] : identifier[cfg_args] , literal[string] : identifier[tenant_name] , literal[string] : identifier[part_name] , literal[string] : identifier[network] . identifier[name] , literal[string] : identifier[subnet_ip_mask] [ literal[int] ], literal[string] : identifier[gw_ip] } keyword[if] identifier[seg_id] : identifier[network_info] [ literal[string] ]= identifier[seg_id] keyword[if] identifier[dhcp_range] : identifier[network_info] [ literal[string] ]= identifier[dhcp_scopes] keyword[if] identifier[hasattr] ( identifier[subnet] , literal[string] ): identifier[network_info] [ literal[string] ]= identifier[subnet] . identifier[secondary_gw] keyword[if] identifier[self] . identifier[_is_iplus] : identifier[prof] = identifier[self] . identifier[_config_profile_get] ( identifier[network] . identifier[config_profile] ) keyword[if] identifier[prof] keyword[and] identifier[prof] . identifier[get] ( literal[string] )== literal[string] : identifier[network_info] [ literal[string] ]= literal[string] . identifier[join] (( identifier[tenant_name] , identifier[part_name] )) keyword[else] : identifier[network_info] [ literal[string] ]= literal[string] identifier[LOG] . identifier[info] ( literal[string] , identifier[network_info] ) identifier[res] = identifier[self] . identifier[_create_network] ( identifier[network_info] ) keyword[if] identifier[res] keyword[and] identifier[res] . identifier[status_code] keyword[in] identifier[self] . identifier[_resp_ok] : identifier[LOG] . identifier[info] ( literal[string] , identifier[network_info] ) keyword[else] : identifier[LOG] . identifier[error] ( literal[string] , identifier[network_info] ) keyword[raise] identifier[dexc] . identifier[DfaClientRequestFailed] ( identifier[reason] = identifier[self] . identifier[_failure_msg] ( identifier[res] ))
def create_service_network(self, tenant_name, network, subnet, dhcp_range=True): """Create network on the DCNM. :param tenant_name: name of tenant the network belongs to :param network: network parameters :param subnet: subnet parameters of the network """ network_info = {} subnet_ip_mask = subnet.cidr.split('/') if self._default_md is None: self._set_default_mobility_domain() # depends on [control=['if'], data=[]] vlan_id = '0' gw_ip = subnet.gateway_ip part_name = network.part_name if not part_name: part_name = self._part_name # depends on [control=['if'], data=[]] if network.vlan_id: vlan_id = str(network.vlan_id) if network.mob_domain_name is not None: mob_domain_name = network.mob_domain_name # depends on [control=['if'], data=[]] else: mob_domain_name = self._default_md # depends on [control=['if'], data=[]] else: mob_domain_name = None seg_id = str(network.segmentation_id) seg_str = '$segmentId=' + seg_id cfg_args = [seg_str, '$netMaskLength=' + subnet_ip_mask[1], '$gatewayIpAddress=' + gw_ip, '$networkName=' + network.name, '$vlanId=' + vlan_id, '$vrfName=' + tenant_name + ':' + part_name] cfg_args = ';'.join(cfg_args) ip_range = ','.join(['%s-%s' % (p['start'], p['end']) for p in subnet.allocation_pools]) dhcp_scopes = {'ipRange': ip_range, 'subnet': subnet.cidr, 'gateway': gw_ip} network_info = {'vlanId': vlan_id, 'mobilityDomainId': mob_domain_name, 'profileName': network.config_profile, 'networkName': network.name, 'configArg': cfg_args, 'organizationName': tenant_name, 'partitionName': part_name, 'description': network.name, 'netmaskLength': subnet_ip_mask[1], 'gateway': gw_ip} if seg_id: network_info['segmentId'] = seg_id # depends on [control=['if'], data=[]] if dhcp_range: network_info['dhcpScope'] = dhcp_scopes # depends on [control=['if'], data=[]] if hasattr(subnet, 'secondary_gw'): network_info['secondaryGateway'] = subnet.secondary_gw # depends on [control=['if'], data=[]] if self._is_iplus: # Need to add the vrf name to the network info prof = self._config_profile_get(network.config_profile) if prof and prof.get('profileSubType') == 'network:universal': # For universal profile vrf has to e organization:partition network_info['vrfName'] = ':'.join((tenant_name, part_name)) # depends on [control=['if'], data=[]] else: # Otherwise, it should be left empty. network_info['vrfName'] = '' # depends on [control=['if'], data=[]] LOG.info('Creating %s network in DCNM.', network_info) res = self._create_network(network_info) if res and res.status_code in self._resp_ok: LOG.info('Created %s network in DCNM.', network_info) # depends on [control=['if'], data=[]] else: LOG.error('Failed to create %s network in DCNM.', network_info) raise dexc.DfaClientRequestFailed(reason=self._failure_msg(res))
def get_frame(self, in_data, frame_count, time_info, status): """ Callback function for the pyaudio stream. Don't use directly. """ while self.keep_listening: try: frame = self.queue.get(False, timeout=queue_timeout) return (frame, pyaudio.paContinue) except Empty: pass return (None, pyaudio.paComplete)
def function[get_frame, parameter[self, in_data, frame_count, time_info, status]]: constant[ Callback function for the pyaudio stream. Don't use directly. ] while name[self].keep_listening begin[:] <ast.Try object at 0x7da1b25d5a50> return[tuple[[<ast.Constant object at 0x7da1b25d5e40>, <ast.Attribute object at 0x7da1b25d5fc0>]]]
keyword[def] identifier[get_frame] ( identifier[self] , identifier[in_data] , identifier[frame_count] , identifier[time_info] , identifier[status] ): literal[string] keyword[while] identifier[self] . identifier[keep_listening] : keyword[try] : identifier[frame] = identifier[self] . identifier[queue] . identifier[get] ( keyword[False] , identifier[timeout] = identifier[queue_timeout] ) keyword[return] ( identifier[frame] , identifier[pyaudio] . identifier[paContinue] ) keyword[except] identifier[Empty] : keyword[pass] keyword[return] ( keyword[None] , identifier[pyaudio] . identifier[paComplete] )
def get_frame(self, in_data, frame_count, time_info, status): """ Callback function for the pyaudio stream. Don't use directly. """ while self.keep_listening: try: frame = self.queue.get(False, timeout=queue_timeout) return (frame, pyaudio.paContinue) # depends on [control=['try'], data=[]] except Empty: pass # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]] return (None, pyaudio.paComplete)
def add_handler(self, message_type, handler): """Manage callbacks for message handlers.""" if message_type not in self._handlers: self._handlers[message_type] = [] if handler not in self._handlers[message_type]: self._handlers[message_type].append(handler)
def function[add_handler, parameter[self, message_type, handler]]: constant[Manage callbacks for message handlers.] if compare[name[message_type] <ast.NotIn object at 0x7da2590d7190> name[self]._handlers] begin[:] call[name[self]._handlers][name[message_type]] assign[=] list[[]] if compare[name[handler] <ast.NotIn object at 0x7da2590d7190> call[name[self]._handlers][name[message_type]]] begin[:] call[call[name[self]._handlers][name[message_type]].append, parameter[name[handler]]]
keyword[def] identifier[add_handler] ( identifier[self] , identifier[message_type] , identifier[handler] ): literal[string] keyword[if] identifier[message_type] keyword[not] keyword[in] identifier[self] . identifier[_handlers] : identifier[self] . identifier[_handlers] [ identifier[message_type] ]=[] keyword[if] identifier[handler] keyword[not] keyword[in] identifier[self] . identifier[_handlers] [ identifier[message_type] ]: identifier[self] . identifier[_handlers] [ identifier[message_type] ]. identifier[append] ( identifier[handler] )
def add_handler(self, message_type, handler): """Manage callbacks for message handlers.""" if message_type not in self._handlers: self._handlers[message_type] = [] # depends on [control=['if'], data=['message_type']] if handler not in self._handlers[message_type]: self._handlers[message_type].append(handler) # depends on [control=['if'], data=['handler']]
def run(self): """run""" if self.callback: log.info(("{} - using callback={}") .format(self.name, self.callback)) self.callback(name=self.response_name, task_queue=self.task_queue, result_queue=self.result_queue, shutdown_msg=self.shutdown_msg) else: log.info("did not find a callback method " "using - using default handler") proc_name = self.name while True: next_task = self.task_queue.get() if next_task: if str(next_task) == self.shutdown_msg: # Poison pill means shutdown log.info(("{}: Exiting msg={}") .format(self.name, next_task)) self.task_queue.task_done() break log.info(("Consumer: {} {}") .format(proc_name, next_task)) self.task_queue.task_done() if self.need_response: answer = "processed: {}".format(next_task()) self.result_queue.put(answer) # end of if custome callback handler or not return
def function[run, parameter[self]]: constant[run] if name[self].callback begin[:] call[name[log].info, parameter[call[constant[{} - using callback={}].format, parameter[name[self].name, name[self].callback]]]] call[name[self].callback, parameter[]] return[None]
keyword[def] identifier[run] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[callback] : identifier[log] . identifier[info] (( literal[string] ) . identifier[format] ( identifier[self] . identifier[name] , identifier[self] . identifier[callback] )) identifier[self] . identifier[callback] ( identifier[name] = identifier[self] . identifier[response_name] , identifier[task_queue] = identifier[self] . identifier[task_queue] , identifier[result_queue] = identifier[self] . identifier[result_queue] , identifier[shutdown_msg] = identifier[self] . identifier[shutdown_msg] ) keyword[else] : identifier[log] . identifier[info] ( literal[string] literal[string] ) identifier[proc_name] = identifier[self] . identifier[name] keyword[while] keyword[True] : identifier[next_task] = identifier[self] . identifier[task_queue] . identifier[get] () keyword[if] identifier[next_task] : keyword[if] identifier[str] ( identifier[next_task] )== identifier[self] . identifier[shutdown_msg] : identifier[log] . identifier[info] (( literal[string] ) . identifier[format] ( identifier[self] . identifier[name] , identifier[next_task] )) identifier[self] . identifier[task_queue] . identifier[task_done] () keyword[break] identifier[log] . identifier[info] (( literal[string] ) . identifier[format] ( identifier[proc_name] , identifier[next_task] )) identifier[self] . identifier[task_queue] . identifier[task_done] () keyword[if] identifier[self] . identifier[need_response] : identifier[answer] = literal[string] . identifier[format] ( identifier[next_task] ()) identifier[self] . identifier[result_queue] . identifier[put] ( identifier[answer] ) keyword[return]
def run(self): """run""" if self.callback: log.info('{} - using callback={}'.format(self.name, self.callback)) self.callback(name=self.response_name, task_queue=self.task_queue, result_queue=self.result_queue, shutdown_msg=self.shutdown_msg) # depends on [control=['if'], data=[]] else: log.info('did not find a callback method using - using default handler') proc_name = self.name while True: next_task = self.task_queue.get() if next_task: if str(next_task) == self.shutdown_msg: # Poison pill means shutdown log.info('{}: Exiting msg={}'.format(self.name, next_task)) self.task_queue.task_done() break # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] log.info('Consumer: {} {}'.format(proc_name, next_task)) self.task_queue.task_done() if self.need_response: answer = 'processed: {}'.format(next_task()) self.result_queue.put(answer) # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] # end of if custome callback handler or not return
def get_processid(config): """Return process id of anycast-healthchecker. Arguments: config (obj): A configparser object with the configuration of anycast-healthchecker. Returns: The process id found in the pid file Raises: ValueError in the following cases - pidfile option is missing from the configuration - pid is either -1 or 1 - stale pidfile, either with no data or invalid data - failure to read pidfile """ pidfile = config.get('daemon', 'pidfile', fallback=None) if pidfile is None: raise ValueError("Configuration doesn't have pidfile option!") try: with open(pidfile, 'r') as _file: pid = _file.read().rstrip() try: pid = int(pid) except ValueError: raise ValueError("stale pid file with invalid data:{}" .format(pid)) else: if pid in [-1, 1]: raise ValueError("invalid PID ({})".format(pid)) else: return pid except OSError as exc: if exc.errno == 2: print("CRITICAL: anycast-healthchecker could be down as pid file " "{} doesn't exist".format(pidfile)) sys.exit(2) else: raise ValueError("error while reading pid file:{}".format(exc))
def function[get_processid, parameter[config]]: constant[Return process id of anycast-healthchecker. Arguments: config (obj): A configparser object with the configuration of anycast-healthchecker. Returns: The process id found in the pid file Raises: ValueError in the following cases - pidfile option is missing from the configuration - pid is either -1 or 1 - stale pidfile, either with no data or invalid data - failure to read pidfile ] variable[pidfile] assign[=] call[name[config].get, parameter[constant[daemon], constant[pidfile]]] if compare[name[pidfile] is constant[None]] begin[:] <ast.Raise object at 0x7da1b0d62530> <ast.Try object at 0x7da1b0d61e70>
keyword[def] identifier[get_processid] ( identifier[config] ): literal[string] identifier[pidfile] = identifier[config] . identifier[get] ( literal[string] , literal[string] , identifier[fallback] = keyword[None] ) keyword[if] identifier[pidfile] keyword[is] keyword[None] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[try] : keyword[with] identifier[open] ( identifier[pidfile] , literal[string] ) keyword[as] identifier[_file] : identifier[pid] = identifier[_file] . identifier[read] (). identifier[rstrip] () keyword[try] : identifier[pid] = identifier[int] ( identifier[pid] ) keyword[except] identifier[ValueError] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[pid] )) keyword[else] : keyword[if] identifier[pid] keyword[in] [- literal[int] , literal[int] ]: keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[pid] )) keyword[else] : keyword[return] identifier[pid] keyword[except] identifier[OSError] keyword[as] identifier[exc] : keyword[if] identifier[exc] . identifier[errno] == literal[int] : identifier[print] ( literal[string] literal[string] . identifier[format] ( identifier[pidfile] )) identifier[sys] . identifier[exit] ( literal[int] ) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[exc] ))
def get_processid(config): """Return process id of anycast-healthchecker. Arguments: config (obj): A configparser object with the configuration of anycast-healthchecker. Returns: The process id found in the pid file Raises: ValueError in the following cases - pidfile option is missing from the configuration - pid is either -1 or 1 - stale pidfile, either with no data or invalid data - failure to read pidfile """ pidfile = config.get('daemon', 'pidfile', fallback=None) if pidfile is None: raise ValueError("Configuration doesn't have pidfile option!") # depends on [control=['if'], data=[]] try: with open(pidfile, 'r') as _file: pid = _file.read().rstrip() try: pid = int(pid) # depends on [control=['try'], data=[]] except ValueError: raise ValueError('stale pid file with invalid data:{}'.format(pid)) # depends on [control=['except'], data=[]] else: if pid in [-1, 1]: raise ValueError('invalid PID ({})'.format(pid)) # depends on [control=['if'], data=['pid']] else: return pid # depends on [control=['with'], data=['_file']] # depends on [control=['try'], data=[]] except OSError as exc: if exc.errno == 2: print("CRITICAL: anycast-healthchecker could be down as pid file {} doesn't exist".format(pidfile)) sys.exit(2) # depends on [control=['if'], data=[]] else: raise ValueError('error while reading pid file:{}'.format(exc)) # depends on [control=['except'], data=['exc']]
def _ScanFileSystemForWindowsDirectory(self, path_resolver): """Scans a file system for a known Windows directory. Args: path_resolver (WindowsPathResolver): Windows path resolver. Returns: bool: True if a known Windows directory was found. """ result = False for windows_path in self._WINDOWS_DIRECTORIES: windows_path_spec = path_resolver.ResolvePath(windows_path) result = windows_path_spec is not None if result: self._windows_directory = windows_path break return result
def function[_ScanFileSystemForWindowsDirectory, parameter[self, path_resolver]]: constant[Scans a file system for a known Windows directory. Args: path_resolver (WindowsPathResolver): Windows path resolver. Returns: bool: True if a known Windows directory was found. ] variable[result] assign[=] constant[False] for taget[name[windows_path]] in starred[name[self]._WINDOWS_DIRECTORIES] begin[:] variable[windows_path_spec] assign[=] call[name[path_resolver].ResolvePath, parameter[name[windows_path]]] variable[result] assign[=] compare[name[windows_path_spec] is_not constant[None]] if name[result] begin[:] name[self]._windows_directory assign[=] name[windows_path] break return[name[result]]
keyword[def] identifier[_ScanFileSystemForWindowsDirectory] ( identifier[self] , identifier[path_resolver] ): literal[string] identifier[result] = keyword[False] keyword[for] identifier[windows_path] keyword[in] identifier[self] . identifier[_WINDOWS_DIRECTORIES] : identifier[windows_path_spec] = identifier[path_resolver] . identifier[ResolvePath] ( identifier[windows_path] ) identifier[result] = identifier[windows_path_spec] keyword[is] keyword[not] keyword[None] keyword[if] identifier[result] : identifier[self] . identifier[_windows_directory] = identifier[windows_path] keyword[break] keyword[return] identifier[result]
def _ScanFileSystemForWindowsDirectory(self, path_resolver): """Scans a file system for a known Windows directory. Args: path_resolver (WindowsPathResolver): Windows path resolver. Returns: bool: True if a known Windows directory was found. """ result = False for windows_path in self._WINDOWS_DIRECTORIES: windows_path_spec = path_resolver.ResolvePath(windows_path) result = windows_path_spec is not None if result: self._windows_directory = windows_path break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['windows_path']] return result
def inodeusage(args=None): ''' Return inode usage information for volumes mounted on this minion CLI Example: .. code-block:: bash salt '*' disk.inodeusage ''' flags = _clean_flags(args, 'disk.inodeusage') if __grains__['kernel'] == 'AIX': cmd = 'df -i' else: cmd = 'df -iP' if flags: cmd += ' -{0}'.format(flags) ret = {} out = __salt__['cmd.run'](cmd, python_shell=False).splitlines() for line in out: if line.startswith('Filesystem'): continue comps = line.split() # Don't choke on empty lines if not comps: continue try: if __grains__['kernel'] == 'OpenBSD': ret[comps[8]] = { 'inodes': int(comps[5]) + int(comps[6]), 'used': comps[5], 'free': comps[6], 'use': comps[7], 'filesystem': comps[0], } elif __grains__['kernel'] == 'AIX': ret[comps[6]] = { 'inodes': comps[4], 'used': comps[5], 'free': comps[2], 'use': comps[5], 'filesystem': comps[0], } else: ret[comps[5]] = { 'inodes': comps[1], 'used': comps[2], 'free': comps[3], 'use': comps[4], 'filesystem': comps[0], } except (IndexError, ValueError): log.error('Problem parsing inode usage information') ret = {} return ret
def function[inodeusage, parameter[args]]: constant[ Return inode usage information for volumes mounted on this minion CLI Example: .. code-block:: bash salt '*' disk.inodeusage ] variable[flags] assign[=] call[name[_clean_flags], parameter[name[args], constant[disk.inodeusage]]] if compare[call[name[__grains__]][constant[kernel]] equal[==] constant[AIX]] begin[:] variable[cmd] assign[=] constant[df -i] if name[flags] begin[:] <ast.AugAssign object at 0x7da1b1f2b220> variable[ret] assign[=] dictionary[[], []] variable[out] assign[=] call[call[call[name[__salt__]][constant[cmd.run]], parameter[name[cmd]]].splitlines, parameter[]] for taget[name[line]] in starred[name[out]] begin[:] if call[name[line].startswith, parameter[constant[Filesystem]]] begin[:] continue variable[comps] assign[=] call[name[line].split, parameter[]] if <ast.UnaryOp object at 0x7da1b1f2a620> begin[:] continue <ast.Try object at 0x7da1b1f2a2f0> return[name[ret]]
keyword[def] identifier[inodeusage] ( identifier[args] = keyword[None] ): literal[string] identifier[flags] = identifier[_clean_flags] ( identifier[args] , literal[string] ) keyword[if] identifier[__grains__] [ literal[string] ]== literal[string] : identifier[cmd] = literal[string] keyword[else] : identifier[cmd] = literal[string] keyword[if] identifier[flags] : identifier[cmd] += literal[string] . identifier[format] ( identifier[flags] ) identifier[ret] ={} identifier[out] = identifier[__salt__] [ literal[string] ]( identifier[cmd] , identifier[python_shell] = keyword[False] ). identifier[splitlines] () keyword[for] identifier[line] keyword[in] identifier[out] : keyword[if] identifier[line] . identifier[startswith] ( literal[string] ): keyword[continue] identifier[comps] = identifier[line] . identifier[split] () keyword[if] keyword[not] identifier[comps] : keyword[continue] keyword[try] : keyword[if] identifier[__grains__] [ literal[string] ]== literal[string] : identifier[ret] [ identifier[comps] [ literal[int] ]]={ literal[string] : identifier[int] ( identifier[comps] [ literal[int] ])+ identifier[int] ( identifier[comps] [ literal[int] ]), literal[string] : identifier[comps] [ literal[int] ], literal[string] : identifier[comps] [ literal[int] ], literal[string] : identifier[comps] [ literal[int] ], literal[string] : identifier[comps] [ literal[int] ], } keyword[elif] identifier[__grains__] [ literal[string] ]== literal[string] : identifier[ret] [ identifier[comps] [ literal[int] ]]={ literal[string] : identifier[comps] [ literal[int] ], literal[string] : identifier[comps] [ literal[int] ], literal[string] : identifier[comps] [ literal[int] ], literal[string] : identifier[comps] [ literal[int] ], literal[string] : identifier[comps] [ literal[int] ], } keyword[else] : identifier[ret] [ identifier[comps] [ literal[int] ]]={ literal[string] : identifier[comps] [ literal[int] ], literal[string] : identifier[comps] [ literal[int] ], literal[string] : identifier[comps] [ literal[int] ], literal[string] : identifier[comps] [ literal[int] ], literal[string] : identifier[comps] [ literal[int] ], } keyword[except] ( identifier[IndexError] , identifier[ValueError] ): identifier[log] . identifier[error] ( literal[string] ) identifier[ret] ={} keyword[return] identifier[ret]
def inodeusage(args=None): """ Return inode usage information for volumes mounted on this minion CLI Example: .. code-block:: bash salt '*' disk.inodeusage """ flags = _clean_flags(args, 'disk.inodeusage') if __grains__['kernel'] == 'AIX': cmd = 'df -i' # depends on [control=['if'], data=[]] else: cmd = 'df -iP' if flags: cmd += ' -{0}'.format(flags) # depends on [control=['if'], data=[]] ret = {} out = __salt__['cmd.run'](cmd, python_shell=False).splitlines() for line in out: if line.startswith('Filesystem'): continue # depends on [control=['if'], data=[]] comps = line.split() # Don't choke on empty lines if not comps: continue # depends on [control=['if'], data=[]] try: if __grains__['kernel'] == 'OpenBSD': ret[comps[8]] = {'inodes': int(comps[5]) + int(comps[6]), 'used': comps[5], 'free': comps[6], 'use': comps[7], 'filesystem': comps[0]} # depends on [control=['if'], data=[]] elif __grains__['kernel'] == 'AIX': ret[comps[6]] = {'inodes': comps[4], 'used': comps[5], 'free': comps[2], 'use': comps[5], 'filesystem': comps[0]} # depends on [control=['if'], data=[]] else: ret[comps[5]] = {'inodes': comps[1], 'used': comps[2], 'free': comps[3], 'use': comps[4], 'filesystem': comps[0]} # depends on [control=['try'], data=[]] except (IndexError, ValueError): log.error('Problem parsing inode usage information') ret = {} # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['line']] return ret
def get_padding_bias(x): """Calculate bias tensor from padding values in tensor. Bias tensor that is added to the pre-softmax multi-headed attention logits, which has shape [batch_size, num_heads, length, length]. The tensor is zero at non-padding locations, and -1e9 (negative infinity) at padding locations. Args: x: int tensor with shape [batch_size, length] Returns: Attention bias tensor of shape [batch_size, 1, 1, length]. """ with tf.name_scope("attention_bias"): padding = get_padding(x) attention_bias = padding * _NEG_INF attention_bias = tf.expand_dims( tf.expand_dims(attention_bias, axis=1), axis=1) return attention_bias
def function[get_padding_bias, parameter[x]]: constant[Calculate bias tensor from padding values in tensor. Bias tensor that is added to the pre-softmax multi-headed attention logits, which has shape [batch_size, num_heads, length, length]. The tensor is zero at non-padding locations, and -1e9 (negative infinity) at padding locations. Args: x: int tensor with shape [batch_size, length] Returns: Attention bias tensor of shape [batch_size, 1, 1, length]. ] with call[name[tf].name_scope, parameter[constant[attention_bias]]] begin[:] variable[padding] assign[=] call[name[get_padding], parameter[name[x]]] variable[attention_bias] assign[=] binary_operation[name[padding] * name[_NEG_INF]] variable[attention_bias] assign[=] call[name[tf].expand_dims, parameter[call[name[tf].expand_dims, parameter[name[attention_bias]]]]] return[name[attention_bias]]
keyword[def] identifier[get_padding_bias] ( identifier[x] ): literal[string] keyword[with] identifier[tf] . identifier[name_scope] ( literal[string] ): identifier[padding] = identifier[get_padding] ( identifier[x] ) identifier[attention_bias] = identifier[padding] * identifier[_NEG_INF] identifier[attention_bias] = identifier[tf] . identifier[expand_dims] ( identifier[tf] . identifier[expand_dims] ( identifier[attention_bias] , identifier[axis] = literal[int] ), identifier[axis] = literal[int] ) keyword[return] identifier[attention_bias]
def get_padding_bias(x): """Calculate bias tensor from padding values in tensor. Bias tensor that is added to the pre-softmax multi-headed attention logits, which has shape [batch_size, num_heads, length, length]. The tensor is zero at non-padding locations, and -1e9 (negative infinity) at padding locations. Args: x: int tensor with shape [batch_size, length] Returns: Attention bias tensor of shape [batch_size, 1, 1, length]. """ with tf.name_scope('attention_bias'): padding = get_padding(x) attention_bias = padding * _NEG_INF attention_bias = tf.expand_dims(tf.expand_dims(attention_bias, axis=1), axis=1) # depends on [control=['with'], data=[]] return attention_bias
def _check_auth(self, username: str, password: str) -> bool: """Check if a username/password combination is valid.""" try: return self.credentials[username] == password except KeyError: return False
def function[_check_auth, parameter[self, username, password]]: constant[Check if a username/password combination is valid.] <ast.Try object at 0x7da18eb54f70>
keyword[def] identifier[_check_auth] ( identifier[self] , identifier[username] : identifier[str] , identifier[password] : identifier[str] )-> identifier[bool] : literal[string] keyword[try] : keyword[return] identifier[self] . identifier[credentials] [ identifier[username] ]== identifier[password] keyword[except] identifier[KeyError] : keyword[return] keyword[False]
def _check_auth(self, username: str, password: str) -> bool: """Check if a username/password combination is valid.""" try: return self.credentials[username] == password # depends on [control=['try'], data=[]] except KeyError: return False # depends on [control=['except'], data=[]]
def local_machine_uuid(): """Return local machine unique identifier. >>> uuid = local_machine_uuid() """ result = subprocess.check_output( 'hal-get-property --udi ' '/org/freedesktop/Hal/devices/computer ' '--key system.hardware.uuid'.split() ).strip() return uuid.UUID(hex=result)
def function[local_machine_uuid, parameter[]]: constant[Return local machine unique identifier. >>> uuid = local_machine_uuid() ] variable[result] assign[=] call[call[name[subprocess].check_output, parameter[call[constant[hal-get-property --udi /org/freedesktop/Hal/devices/computer --key system.hardware.uuid].split, parameter[]]]].strip, parameter[]] return[call[name[uuid].UUID, parameter[]]]
keyword[def] identifier[local_machine_uuid] (): literal[string] identifier[result] = identifier[subprocess] . identifier[check_output] ( literal[string] literal[string] literal[string] . identifier[split] () ). identifier[strip] () keyword[return] identifier[uuid] . identifier[UUID] ( identifier[hex] = identifier[result] )
def local_machine_uuid(): """Return local machine unique identifier. >>> uuid = local_machine_uuid() """ result = subprocess.check_output('hal-get-property --udi /org/freedesktop/Hal/devices/computer --key system.hardware.uuid'.split()).strip() return uuid.UUID(hex=result)
def build_tree(self, data, tagname, attrs=None, depth=0): r"""Build xml tree. :param data: data for build xml. :param tagname: element tag name. :param attrs: element attributes. Default:``None``. :type attrs: dict or None :param depth: element depth of the hierarchy. Default:``0``. :type depth: int """ if data is None: data = '' indent = ('\n%s' % (self.__options['indent'] * depth)) if self.__options['indent'] else '' if isinstance(data, utils.DictTypes): if self.__options['hasattr'] and self.check_structure(data.keys()): attrs, values = self.pickdata(data) self.build_tree(values, tagname, attrs, depth) else: self.__tree.append('%s%s' % (indent, self.tag_start(tagname, attrs))) iter = data.iteritems() if self.__options['ksort']: iter = sorted(iter, key=lambda x:x[0], reverse=self.__options['reverse']) for k, v in iter: attrs = {} if self.__options['hasattr'] and isinstance(v, utils.DictTypes) and self.check_structure(v.keys()): attrs, v = self.pickdata(v) self.build_tree(v, k, attrs, depth+1) self.__tree.append('%s%s' % (indent, self.tag_end(tagname))) elif utils.is_iterable(data): for v in data: self.build_tree(v, tagname, attrs, depth) else: self.__tree.append(indent) data = self.safedata(data, self.__options['cdata']) self.__tree.append(self.build_tag(tagname, data, attrs))
def function[build_tree, parameter[self, data, tagname, attrs, depth]]: constant[Build xml tree. :param data: data for build xml. :param tagname: element tag name. :param attrs: element attributes. Default:``None``. :type attrs: dict or None :param depth: element depth of the hierarchy. Default:``0``. :type depth: int ] if compare[name[data] is constant[None]] begin[:] variable[data] assign[=] constant[] variable[indent] assign[=] <ast.IfExp object at 0x7da1b2528b50> if call[name[isinstance], parameter[name[data], name[utils].DictTypes]] begin[:] if <ast.BoolOp object at 0x7da1b2528f40> begin[:] <ast.Tuple object at 0x7da1b2529180> assign[=] call[name[self].pickdata, parameter[name[data]]] call[name[self].build_tree, parameter[name[values], name[tagname], name[attrs], name[depth]]]
keyword[def] identifier[build_tree] ( identifier[self] , identifier[data] , identifier[tagname] , identifier[attrs] = keyword[None] , identifier[depth] = literal[int] ): literal[string] keyword[if] identifier[data] keyword[is] keyword[None] : identifier[data] = literal[string] identifier[indent] =( literal[string] %( identifier[self] . identifier[__options] [ literal[string] ]* identifier[depth] )) keyword[if] identifier[self] . identifier[__options] [ literal[string] ] keyword[else] literal[string] keyword[if] identifier[isinstance] ( identifier[data] , identifier[utils] . identifier[DictTypes] ): keyword[if] identifier[self] . identifier[__options] [ literal[string] ] keyword[and] identifier[self] . identifier[check_structure] ( identifier[data] . identifier[keys] ()): identifier[attrs] , identifier[values] = identifier[self] . identifier[pickdata] ( identifier[data] ) identifier[self] . identifier[build_tree] ( identifier[values] , identifier[tagname] , identifier[attrs] , identifier[depth] ) keyword[else] : identifier[self] . identifier[__tree] . identifier[append] ( literal[string] %( identifier[indent] , identifier[self] . identifier[tag_start] ( identifier[tagname] , identifier[attrs] ))) identifier[iter] = identifier[data] . identifier[iteritems] () keyword[if] identifier[self] . identifier[__options] [ literal[string] ]: identifier[iter] = identifier[sorted] ( identifier[iter] , identifier[key] = keyword[lambda] identifier[x] : identifier[x] [ literal[int] ], identifier[reverse] = identifier[self] . identifier[__options] [ literal[string] ]) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[iter] : identifier[attrs] ={} keyword[if] identifier[self] . identifier[__options] [ literal[string] ] keyword[and] identifier[isinstance] ( identifier[v] , identifier[utils] . identifier[DictTypes] ) keyword[and] identifier[self] . identifier[check_structure] ( identifier[v] . identifier[keys] ()): identifier[attrs] , identifier[v] = identifier[self] . identifier[pickdata] ( identifier[v] ) identifier[self] . identifier[build_tree] ( identifier[v] , identifier[k] , identifier[attrs] , identifier[depth] + literal[int] ) identifier[self] . identifier[__tree] . identifier[append] ( literal[string] %( identifier[indent] , identifier[self] . identifier[tag_end] ( identifier[tagname] ))) keyword[elif] identifier[utils] . identifier[is_iterable] ( identifier[data] ): keyword[for] identifier[v] keyword[in] identifier[data] : identifier[self] . identifier[build_tree] ( identifier[v] , identifier[tagname] , identifier[attrs] , identifier[depth] ) keyword[else] : identifier[self] . identifier[__tree] . identifier[append] ( identifier[indent] ) identifier[data] = identifier[self] . identifier[safedata] ( identifier[data] , identifier[self] . identifier[__options] [ literal[string] ]) identifier[self] . identifier[__tree] . identifier[append] ( identifier[self] . identifier[build_tag] ( identifier[tagname] , identifier[data] , identifier[attrs] ))
def build_tree(self, data, tagname, attrs=None, depth=0): """Build xml tree. :param data: data for build xml. :param tagname: element tag name. :param attrs: element attributes. Default:``None``. :type attrs: dict or None :param depth: element depth of the hierarchy. Default:``0``. :type depth: int """ if data is None: data = '' # depends on [control=['if'], data=['data']] indent = '\n%s' % (self.__options['indent'] * depth) if self.__options['indent'] else '' if isinstance(data, utils.DictTypes): if self.__options['hasattr'] and self.check_structure(data.keys()): (attrs, values) = self.pickdata(data) self.build_tree(values, tagname, attrs, depth) # depends on [control=['if'], data=[]] else: self.__tree.append('%s%s' % (indent, self.tag_start(tagname, attrs))) iter = data.iteritems() if self.__options['ksort']: iter = sorted(iter, key=lambda x: x[0], reverse=self.__options['reverse']) # depends on [control=['if'], data=[]] for (k, v) in iter: attrs = {} if self.__options['hasattr'] and isinstance(v, utils.DictTypes) and self.check_structure(v.keys()): (attrs, v) = self.pickdata(v) # depends on [control=['if'], data=[]] self.build_tree(v, k, attrs, depth + 1) # depends on [control=['for'], data=[]] self.__tree.append('%s%s' % (indent, self.tag_end(tagname))) # depends on [control=['if'], data=[]] elif utils.is_iterable(data): for v in data: self.build_tree(v, tagname, attrs, depth) # depends on [control=['for'], data=['v']] # depends on [control=['if'], data=[]] else: self.__tree.append(indent) data = self.safedata(data, self.__options['cdata']) self.__tree.append(self.build_tag(tagname, data, attrs))
def cur_model(model=None): """Get and/or set the current model. If ``model`` is given, set the current model to ``model`` and return it. ``model`` can be the name of a model object, or a model object itself. If ``model`` is not given, the current model is returned. """ if model is None: if _system.currentmodel is not None: return _system.currentmodel.interface else: return None else: if isinstance(model, _Model): _system.currentmodel = model._impl else: _system.currentmodel = _system.models[model] return _system.currentmodel.interface
def function[cur_model, parameter[model]]: constant[Get and/or set the current model. If ``model`` is given, set the current model to ``model`` and return it. ``model`` can be the name of a model object, or a model object itself. If ``model`` is not given, the current model is returned. ] if compare[name[model] is constant[None]] begin[:] if compare[name[_system].currentmodel is_not constant[None]] begin[:] return[name[_system].currentmodel.interface]
keyword[def] identifier[cur_model] ( identifier[model] = keyword[None] ): literal[string] keyword[if] identifier[model] keyword[is] keyword[None] : keyword[if] identifier[_system] . identifier[currentmodel] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[_system] . identifier[currentmodel] . identifier[interface] keyword[else] : keyword[return] keyword[None] keyword[else] : keyword[if] identifier[isinstance] ( identifier[model] , identifier[_Model] ): identifier[_system] . identifier[currentmodel] = identifier[model] . identifier[_impl] keyword[else] : identifier[_system] . identifier[currentmodel] = identifier[_system] . identifier[models] [ identifier[model] ] keyword[return] identifier[_system] . identifier[currentmodel] . identifier[interface]
def cur_model(model=None): """Get and/or set the current model. If ``model`` is given, set the current model to ``model`` and return it. ``model`` can be the name of a model object, or a model object itself. If ``model`` is not given, the current model is returned. """ if model is None: if _system.currentmodel is not None: return _system.currentmodel.interface # depends on [control=['if'], data=[]] else: return None # depends on [control=['if'], data=[]] else: if isinstance(model, _Model): _system.currentmodel = model._impl # depends on [control=['if'], data=[]] else: _system.currentmodel = _system.models[model] return _system.currentmodel.interface
def _get_hanging_wall_distance_term(self, dists, ztor): """ Returns the hanging wall distance scaling term (equation 7, page 146) """ fhngr = np.ones_like(dists.rjb, dtype=float) idx = dists.rjb > 0. if ztor < 1.: temp_rjb = np.sqrt(dists.rjb[idx] ** 2. + 1.) r_max = np.max(np.column_stack([dists.rrup[idx], temp_rjb]), axis=1) fhngr[idx] = (r_max - dists.rjb[idx]) / r_max else: fhngr[idx] = (dists.rrup[idx] - dists.rjb[idx]) / dists.rrup[idx] return fhngr
def function[_get_hanging_wall_distance_term, parameter[self, dists, ztor]]: constant[ Returns the hanging wall distance scaling term (equation 7, page 146) ] variable[fhngr] assign[=] call[name[np].ones_like, parameter[name[dists].rjb]] variable[idx] assign[=] compare[name[dists].rjb greater[>] constant[0.0]] if compare[name[ztor] less[<] constant[1.0]] begin[:] variable[temp_rjb] assign[=] call[name[np].sqrt, parameter[binary_operation[binary_operation[call[name[dists].rjb][name[idx]] ** constant[2.0]] + constant[1.0]]]] variable[r_max] assign[=] call[name[np].max, parameter[call[name[np].column_stack, parameter[list[[<ast.Subscript object at 0x7da18ede4b20>, <ast.Name object at 0x7da18ede63e0>]]]]]] call[name[fhngr]][name[idx]] assign[=] binary_operation[binary_operation[name[r_max] - call[name[dists].rjb][name[idx]]] / name[r_max]] return[name[fhngr]]
keyword[def] identifier[_get_hanging_wall_distance_term] ( identifier[self] , identifier[dists] , identifier[ztor] ): literal[string] identifier[fhngr] = identifier[np] . identifier[ones_like] ( identifier[dists] . identifier[rjb] , identifier[dtype] = identifier[float] ) identifier[idx] = identifier[dists] . identifier[rjb] > literal[int] keyword[if] identifier[ztor] < literal[int] : identifier[temp_rjb] = identifier[np] . identifier[sqrt] ( identifier[dists] . identifier[rjb] [ identifier[idx] ]** literal[int] + literal[int] ) identifier[r_max] = identifier[np] . identifier[max] ( identifier[np] . identifier[column_stack] ([ identifier[dists] . identifier[rrup] [ identifier[idx] ], identifier[temp_rjb] ]), identifier[axis] = literal[int] ) identifier[fhngr] [ identifier[idx] ]=( identifier[r_max] - identifier[dists] . identifier[rjb] [ identifier[idx] ])/ identifier[r_max] keyword[else] : identifier[fhngr] [ identifier[idx] ]=( identifier[dists] . identifier[rrup] [ identifier[idx] ]- identifier[dists] . identifier[rjb] [ identifier[idx] ])/ identifier[dists] . identifier[rrup] [ identifier[idx] ] keyword[return] identifier[fhngr]
def _get_hanging_wall_distance_term(self, dists, ztor): """ Returns the hanging wall distance scaling term (equation 7, page 146) """ fhngr = np.ones_like(dists.rjb, dtype=float) idx = dists.rjb > 0.0 if ztor < 1.0: temp_rjb = np.sqrt(dists.rjb[idx] ** 2.0 + 1.0) r_max = np.max(np.column_stack([dists.rrup[idx], temp_rjb]), axis=1) fhngr[idx] = (r_max - dists.rjb[idx]) / r_max # depends on [control=['if'], data=[]] else: fhngr[idx] = (dists.rrup[idx] - dists.rjb[idx]) / dists.rrup[idx] return fhngr
def options(self, parser, env): """Sets additional command line options.""" Plugin.options(self, parser, env) parser.add_option( '--html-file', action='store', dest='html_file', metavar="FILE", default=env.get('NOSE_HTML_FILE', 'nosetests.html'), help="Path to html file to store the report in. " "Default is nosetests.html in the working directory " "[NOSE_HTML_FILE]")
def function[options, parameter[self, parser, env]]: constant[Sets additional command line options.] call[name[Plugin].options, parameter[name[self], name[parser], name[env]]] call[name[parser].add_option, parameter[constant[--html-file]]]
keyword[def] identifier[options] ( identifier[self] , identifier[parser] , identifier[env] ): literal[string] identifier[Plugin] . identifier[options] ( identifier[self] , identifier[parser] , identifier[env] ) identifier[parser] . identifier[add_option] ( literal[string] , identifier[action] = literal[string] , identifier[dest] = literal[string] , identifier[metavar] = literal[string] , identifier[default] = identifier[env] . identifier[get] ( literal[string] , literal[string] ), identifier[help] = literal[string] literal[string] literal[string] )
def options(self, parser, env): """Sets additional command line options.""" Plugin.options(self, parser, env) parser.add_option('--html-file', action='store', dest='html_file', metavar='FILE', default=env.get('NOSE_HTML_FILE', 'nosetests.html'), help='Path to html file to store the report in. Default is nosetests.html in the working directory [NOSE_HTML_FILE]')
def main_view(request, ident, stateless=False, cache_id=None, **kwargs): 'Main view for a dash app' _, app = DashApp.locate_item(ident, stateless, cache_id=cache_id) view_func = app.locate_endpoint_function() resp = view_func() return HttpResponse(resp)
def function[main_view, parameter[request, ident, stateless, cache_id]]: constant[Main view for a dash app] <ast.Tuple object at 0x7da207f99b10> assign[=] call[name[DashApp].locate_item, parameter[name[ident], name[stateless]]] variable[view_func] assign[=] call[name[app].locate_endpoint_function, parameter[]] variable[resp] assign[=] call[name[view_func], parameter[]] return[call[name[HttpResponse], parameter[name[resp]]]]
keyword[def] identifier[main_view] ( identifier[request] , identifier[ident] , identifier[stateless] = keyword[False] , identifier[cache_id] = keyword[None] ,** identifier[kwargs] ): literal[string] identifier[_] , identifier[app] = identifier[DashApp] . identifier[locate_item] ( identifier[ident] , identifier[stateless] , identifier[cache_id] = identifier[cache_id] ) identifier[view_func] = identifier[app] . identifier[locate_endpoint_function] () identifier[resp] = identifier[view_func] () keyword[return] identifier[HttpResponse] ( identifier[resp] )
def main_view(request, ident, stateless=False, cache_id=None, **kwargs): """Main view for a dash app""" (_, app) = DashApp.locate_item(ident, stateless, cache_id=cache_id) view_func = app.locate_endpoint_function() resp = view_func() return HttpResponse(resp)
def forward(self, x, boxes): """ Arguments: x (list[Tensor]): feature maps for each level boxes (list[BoxList]): boxes to be used to perform the pooling operation. Returns: result (Tensor) """ num_levels = len(self.poolers) rois = self.convert_to_roi_format(boxes) if num_levels == 1: return self.poolers[0](x[0], rois) levels = self.map_levels(boxes) num_rois = len(rois) num_channels = x[0].shape[1] output_size = self.output_size[0] dtype, device = x[0].dtype, x[0].device result = torch.zeros( (num_rois, num_channels, output_size, output_size), dtype=dtype, device=device, ) for level, (per_level_feature, pooler) in enumerate(zip(x, self.poolers)): idx_in_level = torch.nonzero(levels == level).squeeze(1) rois_per_level = rois[idx_in_level] result[idx_in_level] = pooler(per_level_feature, rois_per_level) return result
def function[forward, parameter[self, x, boxes]]: constant[ Arguments: x (list[Tensor]): feature maps for each level boxes (list[BoxList]): boxes to be used to perform the pooling operation. Returns: result (Tensor) ] variable[num_levels] assign[=] call[name[len], parameter[name[self].poolers]] variable[rois] assign[=] call[name[self].convert_to_roi_format, parameter[name[boxes]]] if compare[name[num_levels] equal[==] constant[1]] begin[:] return[call[call[name[self].poolers][constant[0]], parameter[call[name[x]][constant[0]], name[rois]]]] variable[levels] assign[=] call[name[self].map_levels, parameter[name[boxes]]] variable[num_rois] assign[=] call[name[len], parameter[name[rois]]] variable[num_channels] assign[=] call[call[name[x]][constant[0]].shape][constant[1]] variable[output_size] assign[=] call[name[self].output_size][constant[0]] <ast.Tuple object at 0x7da18f810340> assign[=] tuple[[<ast.Attribute object at 0x7da18f812140>, <ast.Attribute object at 0x7da1b21bfdc0>]] variable[result] assign[=] call[name[torch].zeros, parameter[tuple[[<ast.Name object at 0x7da1b21bc280>, <ast.Name object at 0x7da1b21bd8d0>, <ast.Name object at 0x7da1b21bc2b0>, <ast.Name object at 0x7da1b21bc100>]]]] for taget[tuple[[<ast.Name object at 0x7da1b21bd8a0>, <ast.Tuple object at 0x7da1b21bcc10>]]] in starred[call[name[enumerate], parameter[call[name[zip], parameter[name[x], name[self].poolers]]]]] begin[:] variable[idx_in_level] assign[=] call[call[name[torch].nonzero, parameter[compare[name[levels] equal[==] name[level]]]].squeeze, parameter[constant[1]]] variable[rois_per_level] assign[=] call[name[rois]][name[idx_in_level]] call[name[result]][name[idx_in_level]] assign[=] call[name[pooler], parameter[name[per_level_feature], name[rois_per_level]]] return[name[result]]
keyword[def] identifier[forward] ( identifier[self] , identifier[x] , identifier[boxes] ): literal[string] identifier[num_levels] = identifier[len] ( identifier[self] . identifier[poolers] ) identifier[rois] = identifier[self] . identifier[convert_to_roi_format] ( identifier[boxes] ) keyword[if] identifier[num_levels] == literal[int] : keyword[return] identifier[self] . identifier[poolers] [ literal[int] ]( identifier[x] [ literal[int] ], identifier[rois] ) identifier[levels] = identifier[self] . identifier[map_levels] ( identifier[boxes] ) identifier[num_rois] = identifier[len] ( identifier[rois] ) identifier[num_channels] = identifier[x] [ literal[int] ]. identifier[shape] [ literal[int] ] identifier[output_size] = identifier[self] . identifier[output_size] [ literal[int] ] identifier[dtype] , identifier[device] = identifier[x] [ literal[int] ]. identifier[dtype] , identifier[x] [ literal[int] ]. identifier[device] identifier[result] = identifier[torch] . identifier[zeros] ( ( identifier[num_rois] , identifier[num_channels] , identifier[output_size] , identifier[output_size] ), identifier[dtype] = identifier[dtype] , identifier[device] = identifier[device] , ) keyword[for] identifier[level] ,( identifier[per_level_feature] , identifier[pooler] ) keyword[in] identifier[enumerate] ( identifier[zip] ( identifier[x] , identifier[self] . identifier[poolers] )): identifier[idx_in_level] = identifier[torch] . identifier[nonzero] ( identifier[levels] == identifier[level] ). identifier[squeeze] ( literal[int] ) identifier[rois_per_level] = identifier[rois] [ identifier[idx_in_level] ] identifier[result] [ identifier[idx_in_level] ]= identifier[pooler] ( identifier[per_level_feature] , identifier[rois_per_level] ) keyword[return] identifier[result]
def forward(self, x, boxes): """ Arguments: x (list[Tensor]): feature maps for each level boxes (list[BoxList]): boxes to be used to perform the pooling operation. Returns: result (Tensor) """ num_levels = len(self.poolers) rois = self.convert_to_roi_format(boxes) if num_levels == 1: return self.poolers[0](x[0], rois) # depends on [control=['if'], data=[]] levels = self.map_levels(boxes) num_rois = len(rois) num_channels = x[0].shape[1] output_size = self.output_size[0] (dtype, device) = (x[0].dtype, x[0].device) result = torch.zeros((num_rois, num_channels, output_size, output_size), dtype=dtype, device=device) for (level, (per_level_feature, pooler)) in enumerate(zip(x, self.poolers)): idx_in_level = torch.nonzero(levels == level).squeeze(1) rois_per_level = rois[idx_in_level] result[idx_in_level] = pooler(per_level_feature, rois_per_level) # depends on [control=['for'], data=[]] return result
def summary_df_from_array(results_array, names, axis=0, **kwargs): """Make a panda data frame of the mean and std devs of an array of results, including the uncertainties on the values. This function converts the array to a DataFrame and calls summary_df on it. Parameters ---------- results_array: 2d numpy array names: list of str Names for the output df's columns. axis: int, optional Axis on which to calculate summary statistics. Returns ------- df: MultiIndex DataFrame See summary_df docstring for more details. """ assert axis == 0 or axis == 1 df = pd.DataFrame(results_array) if axis == 1: df = df.T df.columns = names return summary_df(df, **kwargs)
def function[summary_df_from_array, parameter[results_array, names, axis]]: constant[Make a panda data frame of the mean and std devs of an array of results, including the uncertainties on the values. This function converts the array to a DataFrame and calls summary_df on it. Parameters ---------- results_array: 2d numpy array names: list of str Names for the output df's columns. axis: int, optional Axis on which to calculate summary statistics. Returns ------- df: MultiIndex DataFrame See summary_df docstring for more details. ] assert[<ast.BoolOp object at 0x7da1b0fa6da0>] variable[df] assign[=] call[name[pd].DataFrame, parameter[name[results_array]]] if compare[name[axis] equal[==] constant[1]] begin[:] variable[df] assign[=] name[df].T name[df].columns assign[=] name[names] return[call[name[summary_df], parameter[name[df]]]]
keyword[def] identifier[summary_df_from_array] ( identifier[results_array] , identifier[names] , identifier[axis] = literal[int] ,** identifier[kwargs] ): literal[string] keyword[assert] identifier[axis] == literal[int] keyword[or] identifier[axis] == literal[int] identifier[df] = identifier[pd] . identifier[DataFrame] ( identifier[results_array] ) keyword[if] identifier[axis] == literal[int] : identifier[df] = identifier[df] . identifier[T] identifier[df] . identifier[columns] = identifier[names] keyword[return] identifier[summary_df] ( identifier[df] ,** identifier[kwargs] )
def summary_df_from_array(results_array, names, axis=0, **kwargs): """Make a panda data frame of the mean and std devs of an array of results, including the uncertainties on the values. This function converts the array to a DataFrame and calls summary_df on it. Parameters ---------- results_array: 2d numpy array names: list of str Names for the output df's columns. axis: int, optional Axis on which to calculate summary statistics. Returns ------- df: MultiIndex DataFrame See summary_df docstring for more details. """ assert axis == 0 or axis == 1 df = pd.DataFrame(results_array) if axis == 1: df = df.T # depends on [control=['if'], data=[]] df.columns = names return summary_df(df, **kwargs)
def _neighbors(coordinate, radius): """ Returns coordinates around given coordinate, within given radius. Includes given coordinate. @param coordinate (numpy.array) N-dimensional integer coordinate @param radius (int) Radius around `coordinate` @return (numpy.array) List of coordinates """ ranges = (xrange(n-radius, n+radius+1) for n in coordinate.tolist()) return numpy.array(list(itertools.product(*ranges)))
def function[_neighbors, parameter[coordinate, radius]]: constant[ Returns coordinates around given coordinate, within given radius. Includes given coordinate. @param coordinate (numpy.array) N-dimensional integer coordinate @param radius (int) Radius around `coordinate` @return (numpy.array) List of coordinates ] variable[ranges] assign[=] <ast.GeneratorExp object at 0x7da18f09dfc0> return[call[name[numpy].array, parameter[call[name[list], parameter[call[name[itertools].product, parameter[<ast.Starred object at 0x7da20c6a8eb0>]]]]]]]
keyword[def] identifier[_neighbors] ( identifier[coordinate] , identifier[radius] ): literal[string] identifier[ranges] =( identifier[xrange] ( identifier[n] - identifier[radius] , identifier[n] + identifier[radius] + literal[int] ) keyword[for] identifier[n] keyword[in] identifier[coordinate] . identifier[tolist] ()) keyword[return] identifier[numpy] . identifier[array] ( identifier[list] ( identifier[itertools] . identifier[product] (* identifier[ranges] )))
def _neighbors(coordinate, radius): """ Returns coordinates around given coordinate, within given radius. Includes given coordinate. @param coordinate (numpy.array) N-dimensional integer coordinate @param radius (int) Radius around `coordinate` @return (numpy.array) List of coordinates """ ranges = (xrange(n - radius, n + radius + 1) for n in coordinate.tolist()) return numpy.array(list(itertools.product(*ranges)))
def parse_archive_uri(uri): """Given an archive URI, parse to a split ident-hash.""" parsed = urlparse(uri) path = parsed.path.rstrip('/').split('/') ident_hash = path[-1] ident_hash = unquote(ident_hash) return ident_hash
def function[parse_archive_uri, parameter[uri]]: constant[Given an archive URI, parse to a split ident-hash.] variable[parsed] assign[=] call[name[urlparse], parameter[name[uri]]] variable[path] assign[=] call[call[name[parsed].path.rstrip, parameter[constant[/]]].split, parameter[constant[/]]] variable[ident_hash] assign[=] call[name[path]][<ast.UnaryOp object at 0x7da1b00d8280>] variable[ident_hash] assign[=] call[name[unquote], parameter[name[ident_hash]]] return[name[ident_hash]]
keyword[def] identifier[parse_archive_uri] ( identifier[uri] ): literal[string] identifier[parsed] = identifier[urlparse] ( identifier[uri] ) identifier[path] = identifier[parsed] . identifier[path] . identifier[rstrip] ( literal[string] ). identifier[split] ( literal[string] ) identifier[ident_hash] = identifier[path] [- literal[int] ] identifier[ident_hash] = identifier[unquote] ( identifier[ident_hash] ) keyword[return] identifier[ident_hash]
def parse_archive_uri(uri): """Given an archive URI, parse to a split ident-hash.""" parsed = urlparse(uri) path = parsed.path.rstrip('/').split('/') ident_hash = path[-1] ident_hash = unquote(ident_hash) return ident_hash
def as_dict(self): """ Serializes the object necessary data in a dictionary. :returns: Serialized data in a dictionary. :rtype: dict """ element_dict = dict() if hasattr(self, 'namespace'): element_dict['namespace'] = self.namespace if hasattr(self, 'name'): element_dict['name'] = self.name if hasattr(self, 'text'): element_dict['text'] = self.text attr_dict = dict() for attr in self.attrs: if hasattr(self, attr): attr_dict[attr] = getattr(self, attr) element_dict['attrs'] = attr_dict return element_dict
def function[as_dict, parameter[self]]: constant[ Serializes the object necessary data in a dictionary. :returns: Serialized data in a dictionary. :rtype: dict ] variable[element_dict] assign[=] call[name[dict], parameter[]] if call[name[hasattr], parameter[name[self], constant[namespace]]] begin[:] call[name[element_dict]][constant[namespace]] assign[=] name[self].namespace if call[name[hasattr], parameter[name[self], constant[name]]] begin[:] call[name[element_dict]][constant[name]] assign[=] name[self].name if call[name[hasattr], parameter[name[self], constant[text]]] begin[:] call[name[element_dict]][constant[text]] assign[=] name[self].text variable[attr_dict] assign[=] call[name[dict], parameter[]] for taget[name[attr]] in starred[name[self].attrs] begin[:] if call[name[hasattr], parameter[name[self], name[attr]]] begin[:] call[name[attr_dict]][name[attr]] assign[=] call[name[getattr], parameter[name[self], name[attr]]] call[name[element_dict]][constant[attrs]] assign[=] name[attr_dict] return[name[element_dict]]
keyword[def] identifier[as_dict] ( identifier[self] ): literal[string] identifier[element_dict] = identifier[dict] () keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[element_dict] [ literal[string] ]= identifier[self] . identifier[namespace] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[element_dict] [ literal[string] ]= identifier[self] . identifier[name] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[element_dict] [ literal[string] ]= identifier[self] . identifier[text] identifier[attr_dict] = identifier[dict] () keyword[for] identifier[attr] keyword[in] identifier[self] . identifier[attrs] : keyword[if] identifier[hasattr] ( identifier[self] , identifier[attr] ): identifier[attr_dict] [ identifier[attr] ]= identifier[getattr] ( identifier[self] , identifier[attr] ) identifier[element_dict] [ literal[string] ]= identifier[attr_dict] keyword[return] identifier[element_dict]
def as_dict(self): """ Serializes the object necessary data in a dictionary. :returns: Serialized data in a dictionary. :rtype: dict """ element_dict = dict() if hasattr(self, 'namespace'): element_dict['namespace'] = self.namespace # depends on [control=['if'], data=[]] if hasattr(self, 'name'): element_dict['name'] = self.name # depends on [control=['if'], data=[]] if hasattr(self, 'text'): element_dict['text'] = self.text # depends on [control=['if'], data=[]] attr_dict = dict() for attr in self.attrs: if hasattr(self, attr): attr_dict[attr] = getattr(self, attr) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['attr']] element_dict['attrs'] = attr_dict return element_dict
def dump_all(data_list, stream=None, **kwargs): """ Serialize YAMLDict into a YAML stream. If stream is None, return the produced string instead. """ return yaml.dump_all( data_list, stream=stream, Dumper=YAMLDictDumper, **kwargs )
def function[dump_all, parameter[data_list, stream]]: constant[ Serialize YAMLDict into a YAML stream. If stream is None, return the produced string instead. ] return[call[name[yaml].dump_all, parameter[name[data_list]]]]
keyword[def] identifier[dump_all] ( identifier[data_list] , identifier[stream] = keyword[None] ,** identifier[kwargs] ): literal[string] keyword[return] identifier[yaml] . identifier[dump_all] ( identifier[data_list] , identifier[stream] = identifier[stream] , identifier[Dumper] = identifier[YAMLDictDumper] , ** identifier[kwargs] )
def dump_all(data_list, stream=None, **kwargs): """ Serialize YAMLDict into a YAML stream. If stream is None, return the produced string instead. """ return yaml.dump_all(data_list, stream=stream, Dumper=YAMLDictDumper, **kwargs)
def cancel_order(order): """ 撤单 :param order: 需要撤销的order对象 :type order: :class:`~Order` object """ env = Environment.get_instance() if env.can_cancel_order(order): env.broker.cancel_order(order) return order
def function[cancel_order, parameter[order]]: constant[ 撤单 :param order: 需要撤销的order对象 :type order: :class:`~Order` object ] variable[env] assign[=] call[name[Environment].get_instance, parameter[]] if call[name[env].can_cancel_order, parameter[name[order]]] begin[:] call[name[env].broker.cancel_order, parameter[name[order]]] return[name[order]]
keyword[def] identifier[cancel_order] ( identifier[order] ): literal[string] identifier[env] = identifier[Environment] . identifier[get_instance] () keyword[if] identifier[env] . identifier[can_cancel_order] ( identifier[order] ): identifier[env] . identifier[broker] . identifier[cancel_order] ( identifier[order] ) keyword[return] identifier[order]
def cancel_order(order): """ 撤单 :param order: 需要撤销的order对象 :type order: :class:`~Order` object """ env = Environment.get_instance() if env.can_cancel_order(order): env.broker.cancel_order(order) # depends on [control=['if'], data=[]] return order
def __update_siblings_active_labels_states(self, active_label): """ Updates given **Active_QLabel** Widget siblings states. :param active_label: Active label. :type active_label: Active_QLabel """ LOGGER.debug("> Clicked 'Active_QLabel': '{0}'.".format(active_label)) for item in self.__active_labels: if item is active_label: continue umbra.ui.common.signals_blocker(item, item.set_checked, False)
def function[__update_siblings_active_labels_states, parameter[self, active_label]]: constant[ Updates given **Active_QLabel** Widget siblings states. :param active_label: Active label. :type active_label: Active_QLabel ] call[name[LOGGER].debug, parameter[call[constant[> Clicked 'Active_QLabel': '{0}'.].format, parameter[name[active_label]]]]] for taget[name[item]] in starred[name[self].__active_labels] begin[:] if compare[name[item] is name[active_label]] begin[:] continue call[name[umbra].ui.common.signals_blocker, parameter[name[item], name[item].set_checked, constant[False]]]
keyword[def] identifier[__update_siblings_active_labels_states] ( identifier[self] , identifier[active_label] ): literal[string] identifier[LOGGER] . identifier[debug] ( literal[string] . identifier[format] ( identifier[active_label] )) keyword[for] identifier[item] keyword[in] identifier[self] . identifier[__active_labels] : keyword[if] identifier[item] keyword[is] identifier[active_label] : keyword[continue] identifier[umbra] . identifier[ui] . identifier[common] . identifier[signals_blocker] ( identifier[item] , identifier[item] . identifier[set_checked] , keyword[False] )
def __update_siblings_active_labels_states(self, active_label): """ Updates given **Active_QLabel** Widget siblings states. :param active_label: Active label. :type active_label: Active_QLabel """ LOGGER.debug("> Clicked 'Active_QLabel': '{0}'.".format(active_label)) for item in self.__active_labels: if item is active_label: continue # depends on [control=['if'], data=[]] umbra.ui.common.signals_blocker(item, item.set_checked, False) # depends on [control=['for'], data=['item']]
async def patch_register(self, register: Dict, request: 'Request'): """ Store all options in the "choices" sub-register. We store both the text and the potential intent, in order to match both regular quick reply clicks but also the user typing stuff on his keyboard that matches more or less the content of quick replies. """ register['choices'] = { o.slug: { 'intent': o.intent.key if o.intent else None, 'text': await render(o.text, request), } for o in self.options if isinstance(o, QuickRepliesList.TextOption) } return register
<ast.AsyncFunctionDef object at 0x7da20e9605b0>
keyword[async] keyword[def] identifier[patch_register] ( identifier[self] , identifier[register] : identifier[Dict] , identifier[request] : literal[string] ): literal[string] identifier[register] [ literal[string] ]={ identifier[o] . identifier[slug] :{ literal[string] : identifier[o] . identifier[intent] . identifier[key] keyword[if] identifier[o] . identifier[intent] keyword[else] keyword[None] , literal[string] : keyword[await] identifier[render] ( identifier[o] . identifier[text] , identifier[request] ), } keyword[for] identifier[o] keyword[in] identifier[self] . identifier[options] keyword[if] identifier[isinstance] ( identifier[o] , identifier[QuickRepliesList] . identifier[TextOption] ) } keyword[return] identifier[register]
async def patch_register(self, register: Dict, request: 'Request'): """ Store all options in the "choices" sub-register. We store both the text and the potential intent, in order to match both regular quick reply clicks but also the user typing stuff on his keyboard that matches more or less the content of quick replies. """ register['choices'] = {o.slug: {'intent': o.intent.key if o.intent else None, 'text': await render(o.text, request)} for o in self.options if isinstance(o, QuickRepliesList.TextOption)} return register
def get_xref_graph(ont): """ Creates a basic graph object corresponding to a remote ontology """ g = networkx.MultiGraph() for (c,x) in fetchall_xrefs(ont): g.add_edge(c,x,source=c) return g
def function[get_xref_graph, parameter[ont]]: constant[ Creates a basic graph object corresponding to a remote ontology ] variable[g] assign[=] call[name[networkx].MultiGraph, parameter[]] for taget[tuple[[<ast.Name object at 0x7da20e9562c0>, <ast.Name object at 0x7da1b083e470>]]] in starred[call[name[fetchall_xrefs], parameter[name[ont]]]] begin[:] call[name[g].add_edge, parameter[name[c], name[x]]] return[name[g]]
keyword[def] identifier[get_xref_graph] ( identifier[ont] ): literal[string] identifier[g] = identifier[networkx] . identifier[MultiGraph] () keyword[for] ( identifier[c] , identifier[x] ) keyword[in] identifier[fetchall_xrefs] ( identifier[ont] ): identifier[g] . identifier[add_edge] ( identifier[c] , identifier[x] , identifier[source] = identifier[c] ) keyword[return] identifier[g]
def get_xref_graph(ont): """ Creates a basic graph object corresponding to a remote ontology """ g = networkx.MultiGraph() for (c, x) in fetchall_xrefs(ont): g.add_edge(c, x, source=c) # depends on [control=['for'], data=[]] return g
def is_all_field_none(self): """ :rtype: bool """ if self._type_ is not None: return False if self._value is not None: return False if self._name is not None: return False return True
def function[is_all_field_none, parameter[self]]: constant[ :rtype: bool ] if compare[name[self]._type_ is_not constant[None]] begin[:] return[constant[False]] if compare[name[self]._value is_not constant[None]] begin[:] return[constant[False]] if compare[name[self]._name is_not constant[None]] begin[:] return[constant[False]] return[constant[True]]
keyword[def] identifier[is_all_field_none] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_type_] keyword[is] keyword[not] keyword[None] : keyword[return] keyword[False] keyword[if] identifier[self] . identifier[_value] keyword[is] keyword[not] keyword[None] : keyword[return] keyword[False] keyword[if] identifier[self] . identifier[_name] keyword[is] keyword[not] keyword[None] : keyword[return] keyword[False] keyword[return] keyword[True]
def is_all_field_none(self): """ :rtype: bool """ if self._type_ is not None: return False # depends on [control=['if'], data=[]] if self._value is not None: return False # depends on [control=['if'], data=[]] if self._name is not None: return False # depends on [control=['if'], data=[]] return True
def _get_manager_class(self, register_callables=False): """ Returns a new 'Manager' subclass with registered methods. If 'register_callable' is True, defines the 'callable' arguments. """ class _EvaluatorSyncManager(managers.BaseManager): """ A custom BaseManager. Please see the documentation of `multiprocessing` for more information. """ pass inqueue = queue.Queue() outqueue = queue.Queue() namespace = Namespace() if register_callables: _EvaluatorSyncManager.register( "get_inqueue", callable=lambda: inqueue, ) _EvaluatorSyncManager.register( "get_outqueue", callable=lambda: outqueue, ) _EvaluatorSyncManager.register( "get_state", callable=self._get_secondary_state, ) _EvaluatorSyncManager.register( "set_state", callable=lambda v: self._secondary_state.set(v), ) _EvaluatorSyncManager.register( "get_namespace", callable=lambda: namespace, ) else: _EvaluatorSyncManager.register( "get_inqueue", ) _EvaluatorSyncManager.register( "get_outqueue", ) _EvaluatorSyncManager.register( "get_state", ) _EvaluatorSyncManager.register( "set_state", ) _EvaluatorSyncManager.register( "get_namespace", ) return _EvaluatorSyncManager
def function[_get_manager_class, parameter[self, register_callables]]: constant[ Returns a new 'Manager' subclass with registered methods. If 'register_callable' is True, defines the 'callable' arguments. ] class class[_EvaluatorSyncManager, parameter[]] begin[:] constant[ A custom BaseManager. Please see the documentation of `multiprocessing` for more information. ] pass variable[inqueue] assign[=] call[name[queue].Queue, parameter[]] variable[outqueue] assign[=] call[name[queue].Queue, parameter[]] variable[namespace] assign[=] call[name[Namespace], parameter[]] if name[register_callables] begin[:] call[name[_EvaluatorSyncManager].register, parameter[constant[get_inqueue]]] call[name[_EvaluatorSyncManager].register, parameter[constant[get_outqueue]]] call[name[_EvaluatorSyncManager].register, parameter[constant[get_state]]] call[name[_EvaluatorSyncManager].register, parameter[constant[set_state]]] call[name[_EvaluatorSyncManager].register, parameter[constant[get_namespace]]] return[name[_EvaluatorSyncManager]]
keyword[def] identifier[_get_manager_class] ( identifier[self] , identifier[register_callables] = keyword[False] ): literal[string] keyword[class] identifier[_EvaluatorSyncManager] ( identifier[managers] . identifier[BaseManager] ): literal[string] keyword[pass] identifier[inqueue] = identifier[queue] . identifier[Queue] () identifier[outqueue] = identifier[queue] . identifier[Queue] () identifier[namespace] = identifier[Namespace] () keyword[if] identifier[register_callables] : identifier[_EvaluatorSyncManager] . identifier[register] ( literal[string] , identifier[callable] = keyword[lambda] : identifier[inqueue] , ) identifier[_EvaluatorSyncManager] . identifier[register] ( literal[string] , identifier[callable] = keyword[lambda] : identifier[outqueue] , ) identifier[_EvaluatorSyncManager] . identifier[register] ( literal[string] , identifier[callable] = identifier[self] . identifier[_get_secondary_state] , ) identifier[_EvaluatorSyncManager] . identifier[register] ( literal[string] , identifier[callable] = keyword[lambda] identifier[v] : identifier[self] . identifier[_secondary_state] . identifier[set] ( identifier[v] ), ) identifier[_EvaluatorSyncManager] . identifier[register] ( literal[string] , identifier[callable] = keyword[lambda] : identifier[namespace] , ) keyword[else] : identifier[_EvaluatorSyncManager] . identifier[register] ( literal[string] , ) identifier[_EvaluatorSyncManager] . identifier[register] ( literal[string] , ) identifier[_EvaluatorSyncManager] . identifier[register] ( literal[string] , ) identifier[_EvaluatorSyncManager] . identifier[register] ( literal[string] , ) identifier[_EvaluatorSyncManager] . identifier[register] ( literal[string] , ) keyword[return] identifier[_EvaluatorSyncManager]
def _get_manager_class(self, register_callables=False): """ Returns a new 'Manager' subclass with registered methods. If 'register_callable' is True, defines the 'callable' arguments. """ class _EvaluatorSyncManager(managers.BaseManager): """ A custom BaseManager. Please see the documentation of `multiprocessing` for more information. """ pass inqueue = queue.Queue() outqueue = queue.Queue() namespace = Namespace() if register_callables: _EvaluatorSyncManager.register('get_inqueue', callable=lambda : inqueue) _EvaluatorSyncManager.register('get_outqueue', callable=lambda : outqueue) _EvaluatorSyncManager.register('get_state', callable=self._get_secondary_state) _EvaluatorSyncManager.register('set_state', callable=lambda v: self._secondary_state.set(v)) _EvaluatorSyncManager.register('get_namespace', callable=lambda : namespace) # depends on [control=['if'], data=[]] else: _EvaluatorSyncManager.register('get_inqueue') _EvaluatorSyncManager.register('get_outqueue') _EvaluatorSyncManager.register('get_state') _EvaluatorSyncManager.register('set_state') _EvaluatorSyncManager.register('get_namespace') return _EvaluatorSyncManager
def corr(self): '''The correlation matrix''' cov = self.cov() N = cov.shape[0] corr = ndarray((N,N)) for r in range(N): for c in range(r): corr[r,c] = corr[c,r] = cov[r,c]/sqrt(cov[r,r]*cov[c,c]) corr[r,r] = 1. return corr
def function[corr, parameter[self]]: constant[The correlation matrix] variable[cov] assign[=] call[name[self].cov, parameter[]] variable[N] assign[=] call[name[cov].shape][constant[0]] variable[corr] assign[=] call[name[ndarray], parameter[tuple[[<ast.Name object at 0x7da1b0e17340>, <ast.Name object at 0x7da1b0e17250>]]]] for taget[name[r]] in starred[call[name[range], parameter[name[N]]]] begin[:] for taget[name[c]] in starred[call[name[range], parameter[name[r]]]] begin[:] call[name[corr]][tuple[[<ast.Name object at 0x7da1b0e17d90>, <ast.Name object at 0x7da1b0e15930>]]] assign[=] binary_operation[call[name[cov]][tuple[[<ast.Name object at 0x7da1b0f2e440>, <ast.Name object at 0x7da1b0f2e2c0>]]] / call[name[sqrt], parameter[binary_operation[call[name[cov]][tuple[[<ast.Name object at 0x7da1b0f2ed40>, <ast.Name object at 0x7da1b0f2f130>]]] * call[name[cov]][tuple[[<ast.Name object at 0x7da1b0f2efb0>, <ast.Name object at 0x7da1b0f2efe0>]]]]]]] call[name[corr]][tuple[[<ast.Name object at 0x7da1b0f2cdc0>, <ast.Name object at 0x7da1b0f2cb50>]]] assign[=] constant[1.0] return[name[corr]]
keyword[def] identifier[corr] ( identifier[self] ): literal[string] identifier[cov] = identifier[self] . identifier[cov] () identifier[N] = identifier[cov] . identifier[shape] [ literal[int] ] identifier[corr] = identifier[ndarray] (( identifier[N] , identifier[N] )) keyword[for] identifier[r] keyword[in] identifier[range] ( identifier[N] ): keyword[for] identifier[c] keyword[in] identifier[range] ( identifier[r] ): identifier[corr] [ identifier[r] , identifier[c] ]= identifier[corr] [ identifier[c] , identifier[r] ]= identifier[cov] [ identifier[r] , identifier[c] ]/ identifier[sqrt] ( identifier[cov] [ identifier[r] , identifier[r] ]* identifier[cov] [ identifier[c] , identifier[c] ]) identifier[corr] [ identifier[r] , identifier[r] ]= literal[int] keyword[return] identifier[corr]
def corr(self): """The correlation matrix""" cov = self.cov() N = cov.shape[0] corr = ndarray((N, N)) for r in range(N): for c in range(r): corr[r, c] = corr[c, r] = cov[r, c] / sqrt(cov[r, r] * cov[c, c]) # depends on [control=['for'], data=['c']] corr[r, r] = 1.0 # depends on [control=['for'], data=['r']] return corr
def run_once(func): ''' Decorator for making sure a method can only be executed once ''' def wrapper(*args, **kwargs): if not wrapper.has_run: wrapper.has_run = True return func(*args, **kwargs) wrapper.has_run = False return wrapper
def function[run_once, parameter[func]]: constant[ Decorator for making sure a method can only be executed once ] def function[wrapper, parameter[]]: if <ast.UnaryOp object at 0x7da20c991360> begin[:] name[wrapper].has_run assign[=] constant[True] return[call[name[func], parameter[<ast.Starred object at 0x7da20c992d40>]]] name[wrapper].has_run assign[=] constant[False] return[name[wrapper]]
keyword[def] identifier[run_once] ( identifier[func] ): literal[string] keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ): keyword[if] keyword[not] identifier[wrapper] . identifier[has_run] : identifier[wrapper] . identifier[has_run] = keyword[True] keyword[return] identifier[func] (* identifier[args] ,** identifier[kwargs] ) identifier[wrapper] . identifier[has_run] = keyword[False] keyword[return] identifier[wrapper]
def run_once(func): """ Decorator for making sure a method can only be executed once """ def wrapper(*args, **kwargs): if not wrapper.has_run: wrapper.has_run = True return func(*args, **kwargs) # depends on [control=['if'], data=[]] wrapper.has_run = False return wrapper
def transform_annotation(self, ann, duration): '''Transform an annotation to dynamic label encoding. Parameters ---------- ann : jams.Annotation The annotation to convert duration : number > 0 The duration of the track Returns ------- data : dict data['tags'] : np.ndarray, shape=(n, n_labels) A time-varying binary encoding of the labels ''' intervals, values = ann.to_interval_values() # Suppress all intervals not in the encoder tags = [] for v in values: if v in self._classes: tags.extend(self.encoder.transform([[v]])) else: tags.extend(self.encoder.transform([[]])) tags = np.asarray(tags) target = self.encode_intervals(duration, intervals, tags) return {'tags': target}
def function[transform_annotation, parameter[self, ann, duration]]: constant[Transform an annotation to dynamic label encoding. Parameters ---------- ann : jams.Annotation The annotation to convert duration : number > 0 The duration of the track Returns ------- data : dict data['tags'] : np.ndarray, shape=(n, n_labels) A time-varying binary encoding of the labels ] <ast.Tuple object at 0x7da20c76f9a0> assign[=] call[name[ann].to_interval_values, parameter[]] variable[tags] assign[=] list[[]] for taget[name[v]] in starred[name[values]] begin[:] if compare[name[v] in name[self]._classes] begin[:] call[name[tags].extend, parameter[call[name[self].encoder.transform, parameter[list[[<ast.List object at 0x7da1b10e7430>]]]]]] variable[tags] assign[=] call[name[np].asarray, parameter[name[tags]]] variable[target] assign[=] call[name[self].encode_intervals, parameter[name[duration], name[intervals], name[tags]]] return[dictionary[[<ast.Constant object at 0x7da1b10e7b50>], [<ast.Name object at 0x7da1b10e7370>]]]
keyword[def] identifier[transform_annotation] ( identifier[self] , identifier[ann] , identifier[duration] ): literal[string] identifier[intervals] , identifier[values] = identifier[ann] . identifier[to_interval_values] () identifier[tags] =[] keyword[for] identifier[v] keyword[in] identifier[values] : keyword[if] identifier[v] keyword[in] identifier[self] . identifier[_classes] : identifier[tags] . identifier[extend] ( identifier[self] . identifier[encoder] . identifier[transform] ([[ identifier[v] ]])) keyword[else] : identifier[tags] . identifier[extend] ( identifier[self] . identifier[encoder] . identifier[transform] ([[]])) identifier[tags] = identifier[np] . identifier[asarray] ( identifier[tags] ) identifier[target] = identifier[self] . identifier[encode_intervals] ( identifier[duration] , identifier[intervals] , identifier[tags] ) keyword[return] { literal[string] : identifier[target] }
def transform_annotation(self, ann, duration): """Transform an annotation to dynamic label encoding. Parameters ---------- ann : jams.Annotation The annotation to convert duration : number > 0 The duration of the track Returns ------- data : dict data['tags'] : np.ndarray, shape=(n, n_labels) A time-varying binary encoding of the labels """ (intervals, values) = ann.to_interval_values() # Suppress all intervals not in the encoder tags = [] for v in values: if v in self._classes: tags.extend(self.encoder.transform([[v]])) # depends on [control=['if'], data=['v']] else: tags.extend(self.encoder.transform([[]])) # depends on [control=['for'], data=['v']] tags = np.asarray(tags) target = self.encode_intervals(duration, intervals, tags) return {'tags': target}
def _get_accepted(self, graph): """ Find the accepted states Args: graph (DFA): The DFA states Return: list: Returns the list of the accepted states """ accepted = [] for state in graph.states: if state.final != TropicalWeight(float('inf')): accepted.append(state) return accepted
def function[_get_accepted, parameter[self, graph]]: constant[ Find the accepted states Args: graph (DFA): The DFA states Return: list: Returns the list of the accepted states ] variable[accepted] assign[=] list[[]] for taget[name[state]] in starred[name[graph].states] begin[:] if compare[name[state].final not_equal[!=] call[name[TropicalWeight], parameter[call[name[float], parameter[constant[inf]]]]]] begin[:] call[name[accepted].append, parameter[name[state]]] return[name[accepted]]
keyword[def] identifier[_get_accepted] ( identifier[self] , identifier[graph] ): literal[string] identifier[accepted] =[] keyword[for] identifier[state] keyword[in] identifier[graph] . identifier[states] : keyword[if] identifier[state] . identifier[final] != identifier[TropicalWeight] ( identifier[float] ( literal[string] )): identifier[accepted] . identifier[append] ( identifier[state] ) keyword[return] identifier[accepted]
def _get_accepted(self, graph): """ Find the accepted states Args: graph (DFA): The DFA states Return: list: Returns the list of the accepted states """ accepted = [] for state in graph.states: if state.final != TropicalWeight(float('inf')): accepted.append(state) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['state']] return accepted
def get_mail_addresses(message, header_name): """ Retrieve all email addresses from one message header. """ headers = [h for h in message.get_all(header_name, [])] addresses = email.utils.getaddresses(headers) for index, (address_name, address_email) in enumerate(addresses): addresses[index] = {'name': decode_mail_header(address_name), 'email': address_email} logger.debug("{} Mail address in message: <{}> {}".format( header_name.upper(), address_name, address_email)) return addresses
def function[get_mail_addresses, parameter[message, header_name]]: constant[ Retrieve all email addresses from one message header. ] variable[headers] assign[=] <ast.ListComp object at 0x7da2041db250> variable[addresses] assign[=] call[name[email].utils.getaddresses, parameter[name[headers]]] for taget[tuple[[<ast.Name object at 0x7da2041da200>, <ast.Tuple object at 0x7da2041db9d0>]]] in starred[call[name[enumerate], parameter[name[addresses]]]] begin[:] call[name[addresses]][name[index]] assign[=] dictionary[[<ast.Constant object at 0x7da2041db5b0>, <ast.Constant object at 0x7da2041d8a90>], [<ast.Call object at 0x7da2041dac50>, <ast.Name object at 0x7da2041d8250>]] call[name[logger].debug, parameter[call[constant[{} Mail address in message: <{}> {}].format, parameter[call[name[header_name].upper, parameter[]], name[address_name], name[address_email]]]]] return[name[addresses]]
keyword[def] identifier[get_mail_addresses] ( identifier[message] , identifier[header_name] ): literal[string] identifier[headers] =[ identifier[h] keyword[for] identifier[h] keyword[in] identifier[message] . identifier[get_all] ( identifier[header_name] ,[])] identifier[addresses] = identifier[email] . identifier[utils] . identifier[getaddresses] ( identifier[headers] ) keyword[for] identifier[index] ,( identifier[address_name] , identifier[address_email] ) keyword[in] identifier[enumerate] ( identifier[addresses] ): identifier[addresses] [ identifier[index] ]={ literal[string] : identifier[decode_mail_header] ( identifier[address_name] ), literal[string] : identifier[address_email] } identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[header_name] . identifier[upper] (), identifier[address_name] , identifier[address_email] )) keyword[return] identifier[addresses]
def get_mail_addresses(message, header_name): """ Retrieve all email addresses from one message header. """ headers = [h for h in message.get_all(header_name, [])] addresses = email.utils.getaddresses(headers) for (index, (address_name, address_email)) in enumerate(addresses): addresses[index] = {'name': decode_mail_header(address_name), 'email': address_email} logger.debug('{} Mail address in message: <{}> {}'.format(header_name.upper(), address_name, address_email)) # depends on [control=['for'], data=[]] return addresses