code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def parse_fixed_width(types, lines): """Parse a fixed width line.""" values = [] line = [] for width, parser in types: if not line: line = lines.pop(0).replace('\n', '') values.append(parser(line[:width])) line = line[width:] return values
def function[parse_fixed_width, parameter[types, lines]]: constant[Parse a fixed width line.] variable[values] assign[=] list[[]] variable[line] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b2347ca0>, <ast.Name object at 0x7da1b2347c70>]]] in starred[name[types]] begin[:] if <ast.UnaryOp object at 0x7da1b2347be0> begin[:] variable[line] assign[=] call[call[name[lines].pop, parameter[constant[0]]].replace, parameter[constant[ ], constant[]]] call[name[values].append, parameter[call[name[parser], parameter[call[name[line]][<ast.Slice object at 0x7da1b242a590>]]]]] variable[line] assign[=] call[name[line]][<ast.Slice object at 0x7da1b24b3f40>] return[name[values]]
keyword[def] identifier[parse_fixed_width] ( identifier[types] , identifier[lines] ): literal[string] identifier[values] =[] identifier[line] =[] keyword[for] identifier[width] , identifier[parser] keyword[in] identifier[types] : keyword[if] keyword[not] identifier[line] : identifier[line] = identifier[lines] . identifier[pop] ( literal[int] ). identifier[replace] ( literal[string] , literal[string] ) identifier[values] . identifier[append] ( identifier[parser] ( identifier[line] [: identifier[width] ])) identifier[line] = identifier[line] [ identifier[width] :] keyword[return] identifier[values]
def parse_fixed_width(types, lines): """Parse a fixed width line.""" values = [] line = [] for (width, parser) in types: if not line: line = lines.pop(0).replace('\n', '') # depends on [control=['if'], data=[]] values.append(parser(line[:width])) line = line[width:] # depends on [control=['for'], data=[]] return values
def tile_to_path(self, tile): '''return full path to a tile''' return os.path.join(self.cache_path, self.service, tile.path())
def function[tile_to_path, parameter[self, tile]]: constant[return full path to a tile] return[call[name[os].path.join, parameter[name[self].cache_path, name[self].service, call[name[tile].path, parameter[]]]]]
keyword[def] identifier[tile_to_path] ( identifier[self] , identifier[tile] ): literal[string] keyword[return] identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[cache_path] , identifier[self] . identifier[service] , identifier[tile] . identifier[path] ())
def tile_to_path(self, tile): """return full path to a tile""" return os.path.join(self.cache_path, self.service, tile.path())
def inertia_tensor(self): """the intertia tensor of the molecule""" result = np.zeros((3,3), float) for i in range(self.size): r = self.coordinates[i] - self.com # the diagonal term result.ravel()[::4] += self.masses[i]*(r**2).sum() # the outer product term result -= self.masses[i]*np.outer(r,r) return result
def function[inertia_tensor, parameter[self]]: constant[the intertia tensor of the molecule] variable[result] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Constant object at 0x7da20c6a8280>, <ast.Constant object at 0x7da20c6a9ff0>]], name[float]]] for taget[name[i]] in starred[call[name[range], parameter[name[self].size]]] begin[:] variable[r] assign[=] binary_operation[call[name[self].coordinates][name[i]] - name[self].com] <ast.AugAssign object at 0x7da20c6a9ab0> <ast.AugAssign object at 0x7da20c6a9e40> return[name[result]]
keyword[def] identifier[inertia_tensor] ( identifier[self] ): literal[string] identifier[result] = identifier[np] . identifier[zeros] (( literal[int] , literal[int] ), identifier[float] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[size] ): identifier[r] = identifier[self] . identifier[coordinates] [ identifier[i] ]- identifier[self] . identifier[com] identifier[result] . identifier[ravel] ()[:: literal[int] ]+= identifier[self] . identifier[masses] [ identifier[i] ]*( identifier[r] ** literal[int] ). identifier[sum] () identifier[result] -= identifier[self] . identifier[masses] [ identifier[i] ]* identifier[np] . identifier[outer] ( identifier[r] , identifier[r] ) keyword[return] identifier[result]
def inertia_tensor(self): """the intertia tensor of the molecule""" result = np.zeros((3, 3), float) for i in range(self.size): r = self.coordinates[i] - self.com # the diagonal term result.ravel()[::4] += self.masses[i] * (r ** 2).sum() # the outer product term result -= self.masses[i] * np.outer(r, r) # depends on [control=['for'], data=['i']] return result
def logout(self): """ Logout and remove vid """ response = None try: response = requests.delete( urls.login(), headers={ 'Cookie': 'vid={}'.format(self._vid)}) except requests.exceptions.RequestException as ex: raise RequestError(ex) _validate_response(response)
def function[logout, parameter[self]]: constant[ Logout and remove vid ] variable[response] assign[=] constant[None] <ast.Try object at 0x7da1b1020f10> call[name[_validate_response], parameter[name[response]]]
keyword[def] identifier[logout] ( identifier[self] ): literal[string] identifier[response] = keyword[None] keyword[try] : identifier[response] = identifier[requests] . identifier[delete] ( identifier[urls] . identifier[login] (), identifier[headers] ={ literal[string] : literal[string] . identifier[format] ( identifier[self] . identifier[_vid] )}) keyword[except] identifier[requests] . identifier[exceptions] . identifier[RequestException] keyword[as] identifier[ex] : keyword[raise] identifier[RequestError] ( identifier[ex] ) identifier[_validate_response] ( identifier[response] )
def logout(self): """ Logout and remove vid """ response = None try: response = requests.delete(urls.login(), headers={'Cookie': 'vid={}'.format(self._vid)}) # depends on [control=['try'], data=[]] except requests.exceptions.RequestException as ex: raise RequestError(ex) # depends on [control=['except'], data=['ex']] _validate_response(response)
def analysis_summary_report(feature, parent): """Retrieve an HTML table report of current selected analysis. """ _ = feature, parent # NOQA project_context_scope = QgsExpressionContextUtils.projectScope( QgsProject.instance()) key = provenance_layer_analysis_impacted['provenance_key'] if not project_context_scope.hasVariable(key): return None analysis_dir = dirname(project_context_scope.variable(key)) return get_impact_report_as_string(analysis_dir)
def function[analysis_summary_report, parameter[feature, parent]]: constant[Retrieve an HTML table report of current selected analysis. ] variable[_] assign[=] tuple[[<ast.Name object at 0x7da1b0c513c0>, <ast.Name object at 0x7da1b0c50130>]] variable[project_context_scope] assign[=] call[name[QgsExpressionContextUtils].projectScope, parameter[call[name[QgsProject].instance, parameter[]]]] variable[key] assign[=] call[name[provenance_layer_analysis_impacted]][constant[provenance_key]] if <ast.UnaryOp object at 0x7da1b0c51d80> begin[:] return[constant[None]] variable[analysis_dir] assign[=] call[name[dirname], parameter[call[name[project_context_scope].variable, parameter[name[key]]]]] return[call[name[get_impact_report_as_string], parameter[name[analysis_dir]]]]
keyword[def] identifier[analysis_summary_report] ( identifier[feature] , identifier[parent] ): literal[string] identifier[_] = identifier[feature] , identifier[parent] identifier[project_context_scope] = identifier[QgsExpressionContextUtils] . identifier[projectScope] ( identifier[QgsProject] . identifier[instance] ()) identifier[key] = identifier[provenance_layer_analysis_impacted] [ literal[string] ] keyword[if] keyword[not] identifier[project_context_scope] . identifier[hasVariable] ( identifier[key] ): keyword[return] keyword[None] identifier[analysis_dir] = identifier[dirname] ( identifier[project_context_scope] . identifier[variable] ( identifier[key] )) keyword[return] identifier[get_impact_report_as_string] ( identifier[analysis_dir] )
def analysis_summary_report(feature, parent): """Retrieve an HTML table report of current selected analysis. """ _ = (feature, parent) # NOQA project_context_scope = QgsExpressionContextUtils.projectScope(QgsProject.instance()) key = provenance_layer_analysis_impacted['provenance_key'] if not project_context_scope.hasVariable(key): return None # depends on [control=['if'], data=[]] analysis_dir = dirname(project_context_scope.variable(key)) return get_impact_report_as_string(analysis_dir)
def get_graph_by_id(self, network_id: int) -> BELGraph: """Get a network from the database by its identifier and converts it to a BEL graph.""" network = self.get_network_by_id(network_id) log.debug('converting network [id=%d] %s to bel graph', network_id, network) return network.as_bel()
def function[get_graph_by_id, parameter[self, network_id]]: constant[Get a network from the database by its identifier and converts it to a BEL graph.] variable[network] assign[=] call[name[self].get_network_by_id, parameter[name[network_id]]] call[name[log].debug, parameter[constant[converting network [id=%d] %s to bel graph], name[network_id], name[network]]] return[call[name[network].as_bel, parameter[]]]
keyword[def] identifier[get_graph_by_id] ( identifier[self] , identifier[network_id] : identifier[int] )-> identifier[BELGraph] : literal[string] identifier[network] = identifier[self] . identifier[get_network_by_id] ( identifier[network_id] ) identifier[log] . identifier[debug] ( literal[string] , identifier[network_id] , identifier[network] ) keyword[return] identifier[network] . identifier[as_bel] ()
def get_graph_by_id(self, network_id: int) -> BELGraph: """Get a network from the database by its identifier and converts it to a BEL graph.""" network = self.get_network_by_id(network_id) log.debug('converting network [id=%d] %s to bel graph', network_id, network) return network.as_bel()
def publish(self, load): ''' Publish "load" to minions ''' payload = {'enc': 'aes'} crypticle = salt.crypt.Crypticle(self.opts, salt.master.SMaster.secrets['aes']['secret'].value) payload['load'] = crypticle.dumps(load) if self.opts['sign_pub_messages']: master_pem_path = os.path.join(self.opts['pki_dir'], 'master.pem') log.debug("Signing data packet") payload['sig'] = salt.crypt.sign_message(master_pem_path, payload['load']) # Use the Salt IPC server if self.opts.get('ipc_mode', '') == 'tcp': pull_uri = int(self.opts.get('tcp_master_publish_pull', 4514)) else: pull_uri = os.path.join(self.opts['sock_dir'], 'publish_pull.ipc') # TODO: switch to the actual asynchronous interface #pub_sock = salt.transport.ipc.IPCMessageClient(self.opts, io_loop=self.io_loop) pub_sock = salt.utils.asynchronous.SyncWrapper( salt.transport.ipc.IPCMessageClient, (pull_uri,) ) pub_sock.connect() int_payload = {'payload': self.serial.dumps(payload)} # add some targeting stuff for lists only (for now) if load['tgt_type'] == 'list': if isinstance(load['tgt'], six.string_types): # Fetch a list of minions that match _res = self.ckminions.check_minions(load['tgt'], tgt_type=load['tgt_type']) match_ids = _res['minions'] log.debug("Publish Side Match: %s", match_ids) # Send list of miions thru so zmq can target them int_payload['topic_lst'] = match_ids else: int_payload['topic_lst'] = load['tgt'] # Send it over IPC! pub_sock.send(int_payload)
def function[publish, parameter[self, load]]: constant[ Publish "load" to minions ] variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da1b1f48fa0>], [<ast.Constant object at 0x7da1b1f49690>]] variable[crypticle] assign[=] call[name[salt].crypt.Crypticle, parameter[name[self].opts, call[call[name[salt].master.SMaster.secrets][constant[aes]]][constant[secret]].value]] call[name[payload]][constant[load]] assign[=] call[name[crypticle].dumps, parameter[name[load]]] if call[name[self].opts][constant[sign_pub_messages]] begin[:] variable[master_pem_path] assign[=] call[name[os].path.join, parameter[call[name[self].opts][constant[pki_dir]], constant[master.pem]]] call[name[log].debug, parameter[constant[Signing data packet]]] call[name[payload]][constant[sig]] assign[=] call[name[salt].crypt.sign_message, parameter[name[master_pem_path], call[name[payload]][constant[load]]]] if compare[call[name[self].opts.get, parameter[constant[ipc_mode], constant[]]] equal[==] constant[tcp]] begin[:] variable[pull_uri] assign[=] call[name[int], parameter[call[name[self].opts.get, parameter[constant[tcp_master_publish_pull], constant[4514]]]]] variable[pub_sock] assign[=] call[name[salt].utils.asynchronous.SyncWrapper, parameter[name[salt].transport.ipc.IPCMessageClient, tuple[[<ast.Name object at 0x7da1b1f496f0>]]]] call[name[pub_sock].connect, parameter[]] variable[int_payload] assign[=] dictionary[[<ast.Constant object at 0x7da1b1f48e50>], [<ast.Call object at 0x7da1b1f49210>]] if compare[call[name[load]][constant[tgt_type]] equal[==] constant[list]] begin[:] if call[name[isinstance], parameter[call[name[load]][constant[tgt]], name[six].string_types]] begin[:] variable[_res] assign[=] call[name[self].ckminions.check_minions, parameter[call[name[load]][constant[tgt]]]] variable[match_ids] assign[=] call[name[_res]][constant[minions]] call[name[log].debug, parameter[constant[Publish Side Match: %s], name[match_ids]]] call[name[int_payload]][constant[topic_lst]] assign[=] name[match_ids] call[name[pub_sock].send, parameter[name[int_payload]]]
keyword[def] identifier[publish] ( identifier[self] , identifier[load] ): literal[string] identifier[payload] ={ literal[string] : literal[string] } identifier[crypticle] = identifier[salt] . identifier[crypt] . identifier[Crypticle] ( identifier[self] . identifier[opts] , identifier[salt] . identifier[master] . identifier[SMaster] . identifier[secrets] [ literal[string] ][ literal[string] ]. identifier[value] ) identifier[payload] [ literal[string] ]= identifier[crypticle] . identifier[dumps] ( identifier[load] ) keyword[if] identifier[self] . identifier[opts] [ literal[string] ]: identifier[master_pem_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[opts] [ literal[string] ], literal[string] ) identifier[log] . identifier[debug] ( literal[string] ) identifier[payload] [ literal[string] ]= identifier[salt] . identifier[crypt] . identifier[sign_message] ( identifier[master_pem_path] , identifier[payload] [ literal[string] ]) keyword[if] identifier[self] . identifier[opts] . identifier[get] ( literal[string] , literal[string] )== literal[string] : identifier[pull_uri] = identifier[int] ( identifier[self] . identifier[opts] . identifier[get] ( literal[string] , literal[int] )) keyword[else] : identifier[pull_uri] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[opts] [ literal[string] ], literal[string] ) identifier[pub_sock] = identifier[salt] . identifier[utils] . identifier[asynchronous] . identifier[SyncWrapper] ( identifier[salt] . identifier[transport] . identifier[ipc] . identifier[IPCMessageClient] , ( identifier[pull_uri] ,) ) identifier[pub_sock] . identifier[connect] () identifier[int_payload] ={ literal[string] : identifier[self] . identifier[serial] . identifier[dumps] ( identifier[payload] )} keyword[if] identifier[load] [ literal[string] ]== literal[string] : keyword[if] identifier[isinstance] ( identifier[load] [ literal[string] ], identifier[six] . identifier[string_types] ): identifier[_res] = identifier[self] . identifier[ckminions] . identifier[check_minions] ( identifier[load] [ literal[string] ], identifier[tgt_type] = identifier[load] [ literal[string] ]) identifier[match_ids] = identifier[_res] [ literal[string] ] identifier[log] . identifier[debug] ( literal[string] , identifier[match_ids] ) identifier[int_payload] [ literal[string] ]= identifier[match_ids] keyword[else] : identifier[int_payload] [ literal[string] ]= identifier[load] [ literal[string] ] identifier[pub_sock] . identifier[send] ( identifier[int_payload] )
def publish(self, load): """ Publish "load" to minions """ payload = {'enc': 'aes'} crypticle = salt.crypt.Crypticle(self.opts, salt.master.SMaster.secrets['aes']['secret'].value) payload['load'] = crypticle.dumps(load) if self.opts['sign_pub_messages']: master_pem_path = os.path.join(self.opts['pki_dir'], 'master.pem') log.debug('Signing data packet') payload['sig'] = salt.crypt.sign_message(master_pem_path, payload['load']) # depends on [control=['if'], data=[]] # Use the Salt IPC server if self.opts.get('ipc_mode', '') == 'tcp': pull_uri = int(self.opts.get('tcp_master_publish_pull', 4514)) # depends on [control=['if'], data=[]] else: pull_uri = os.path.join(self.opts['sock_dir'], 'publish_pull.ipc') # TODO: switch to the actual asynchronous interface #pub_sock = salt.transport.ipc.IPCMessageClient(self.opts, io_loop=self.io_loop) pub_sock = salt.utils.asynchronous.SyncWrapper(salt.transport.ipc.IPCMessageClient, (pull_uri,)) pub_sock.connect() int_payload = {'payload': self.serial.dumps(payload)} # add some targeting stuff for lists only (for now) if load['tgt_type'] == 'list': if isinstance(load['tgt'], six.string_types): # Fetch a list of minions that match _res = self.ckminions.check_minions(load['tgt'], tgt_type=load['tgt_type']) match_ids = _res['minions'] log.debug('Publish Side Match: %s', match_ids) # Send list of miions thru so zmq can target them int_payload['topic_lst'] = match_ids # depends on [control=['if'], data=[]] else: int_payload['topic_lst'] = load['tgt'] # depends on [control=['if'], data=[]] # Send it over IPC! pub_sock.send(int_payload)
def read_abinit_hdr(self): """ Read the variables associated to the Abinit header. Return :class:`AbinitHeader` """ d = {} for hvar in _HDR_VARIABLES.values(): ncname = hvar.etsf_name if hvar.etsf_name is not None else hvar.name if ncname in self.rootgrp.variables: d[hvar.name] = self.read_value(ncname) elif ncname in self.rootgrp.dimensions: d[hvar.name] = self.read_dimvalue(ncname) else: raise ValueError("Cannot find `%s` in `%s`" % (ncname, self.path)) # Convert scalars to (well) scalars. if hasattr(d[hvar.name], "shape") and not d[hvar.name].shape: d[hvar.name] = np.asscalar(d[hvar.name]) if hvar.name in ("title", "md5_pseudos", "codvsn"): # Convert array of numpy bytes to list of strings if hvar.name == "codvsn": d[hvar.name] = "".join(bs.decode("utf-8").strip() for bs in d[hvar.name]) else: d[hvar.name] = ["".join(bs.decode("utf-8") for bs in astr).strip() for astr in d[hvar.name]] return AbinitHeader(d)
def function[read_abinit_hdr, parameter[self]]: constant[ Read the variables associated to the Abinit header. Return :class:`AbinitHeader` ] variable[d] assign[=] dictionary[[], []] for taget[name[hvar]] in starred[call[name[_HDR_VARIABLES].values, parameter[]]] begin[:] variable[ncname] assign[=] <ast.IfExp object at 0x7da20c76dcf0> if compare[name[ncname] in name[self].rootgrp.variables] begin[:] call[name[d]][name[hvar].name] assign[=] call[name[self].read_value, parameter[name[ncname]]] if <ast.BoolOp object at 0x7da20c76ffa0> begin[:] call[name[d]][name[hvar].name] assign[=] call[name[np].asscalar, parameter[call[name[d]][name[hvar].name]]] if compare[name[hvar].name in tuple[[<ast.Constant object at 0x7da20c76e980>, <ast.Constant object at 0x7da20c76caf0>, <ast.Constant object at 0x7da20c76f250>]]] begin[:] if compare[name[hvar].name equal[==] constant[codvsn]] begin[:] call[name[d]][name[hvar].name] assign[=] call[constant[].join, parameter[<ast.GeneratorExp object at 0x7da20c76e260>]] return[call[name[AbinitHeader], parameter[name[d]]]]
keyword[def] identifier[read_abinit_hdr] ( identifier[self] ): literal[string] identifier[d] ={} keyword[for] identifier[hvar] keyword[in] identifier[_HDR_VARIABLES] . identifier[values] (): identifier[ncname] = identifier[hvar] . identifier[etsf_name] keyword[if] identifier[hvar] . identifier[etsf_name] keyword[is] keyword[not] keyword[None] keyword[else] identifier[hvar] . identifier[name] keyword[if] identifier[ncname] keyword[in] identifier[self] . identifier[rootgrp] . identifier[variables] : identifier[d] [ identifier[hvar] . identifier[name] ]= identifier[self] . identifier[read_value] ( identifier[ncname] ) keyword[elif] identifier[ncname] keyword[in] identifier[self] . identifier[rootgrp] . identifier[dimensions] : identifier[d] [ identifier[hvar] . identifier[name] ]= identifier[self] . identifier[read_dimvalue] ( identifier[ncname] ) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] %( identifier[ncname] , identifier[self] . identifier[path] )) keyword[if] identifier[hasattr] ( identifier[d] [ identifier[hvar] . identifier[name] ], literal[string] ) keyword[and] keyword[not] identifier[d] [ identifier[hvar] . identifier[name] ]. identifier[shape] : identifier[d] [ identifier[hvar] . identifier[name] ]= identifier[np] . identifier[asscalar] ( identifier[d] [ identifier[hvar] . identifier[name] ]) keyword[if] identifier[hvar] . identifier[name] keyword[in] ( literal[string] , literal[string] , literal[string] ): keyword[if] identifier[hvar] . identifier[name] == literal[string] : identifier[d] [ identifier[hvar] . identifier[name] ]= literal[string] . identifier[join] ( identifier[bs] . identifier[decode] ( literal[string] ). identifier[strip] () keyword[for] identifier[bs] keyword[in] identifier[d] [ identifier[hvar] . identifier[name] ]) keyword[else] : identifier[d] [ identifier[hvar] . identifier[name] ]=[ literal[string] . identifier[join] ( identifier[bs] . identifier[decode] ( literal[string] ) keyword[for] identifier[bs] keyword[in] identifier[astr] ). identifier[strip] () keyword[for] identifier[astr] keyword[in] identifier[d] [ identifier[hvar] . identifier[name] ]] keyword[return] identifier[AbinitHeader] ( identifier[d] )
def read_abinit_hdr(self): """ Read the variables associated to the Abinit header. Return :class:`AbinitHeader` """ d = {} for hvar in _HDR_VARIABLES.values(): ncname = hvar.etsf_name if hvar.etsf_name is not None else hvar.name if ncname in self.rootgrp.variables: d[hvar.name] = self.read_value(ncname) # depends on [control=['if'], data=['ncname']] elif ncname in self.rootgrp.dimensions: d[hvar.name] = self.read_dimvalue(ncname) # depends on [control=['if'], data=['ncname']] else: raise ValueError('Cannot find `%s` in `%s`' % (ncname, self.path)) # Convert scalars to (well) scalars. if hasattr(d[hvar.name], 'shape') and (not d[hvar.name].shape): d[hvar.name] = np.asscalar(d[hvar.name]) # depends on [control=['if'], data=[]] if hvar.name in ('title', 'md5_pseudos', 'codvsn'): # Convert array of numpy bytes to list of strings if hvar.name == 'codvsn': d[hvar.name] = ''.join((bs.decode('utf-8').strip() for bs in d[hvar.name])) # depends on [control=['if'], data=[]] else: d[hvar.name] = [''.join((bs.decode('utf-8') for bs in astr)).strip() for astr in d[hvar.name]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['hvar']] return AbinitHeader(d)
def bsrch(self, domain): """ This function uses the Bloomberg API to retrieve 'bsrch' (Bloomberg SRCH Data) queries. Returns list of tickers. Parameters ---------- domain: string A character string with the name of the domain to execute. It can be a user defined SRCH screen, commodity screen or one of the variety of Bloomberg examples. All domains are in the format <domain>:<search_name>. Example "COMDTY:NGFLOW" Returns ------- data: pandas.DataFrame List of bloomberg tickers from the BSRCH """ logger = _get_logger(self.debug) request = self.exrService.createRequest('ExcelGetGridRequest') request.set('Domain', domain) logger.info('Sending Request:\n{}'.format(request)) self._session.sendRequest(request, identity=self._identity) data = [] for msg in self._receive_events(to_dict=False): for v in msg.getElement("DataRecords").values(): for f in v.getElement("DataFields").values(): data.append(f.getElementAsString("StringValue")) return pd.DataFrame(data)
def function[bsrch, parameter[self, domain]]: constant[ This function uses the Bloomberg API to retrieve 'bsrch' (Bloomberg SRCH Data) queries. Returns list of tickers. Parameters ---------- domain: string A character string with the name of the domain to execute. It can be a user defined SRCH screen, commodity screen or one of the variety of Bloomberg examples. All domains are in the format <domain>:<search_name>. Example "COMDTY:NGFLOW" Returns ------- data: pandas.DataFrame List of bloomberg tickers from the BSRCH ] variable[logger] assign[=] call[name[_get_logger], parameter[name[self].debug]] variable[request] assign[=] call[name[self].exrService.createRequest, parameter[constant[ExcelGetGridRequest]]] call[name[request].set, parameter[constant[Domain], name[domain]]] call[name[logger].info, parameter[call[constant[Sending Request: {}].format, parameter[name[request]]]]] call[name[self]._session.sendRequest, parameter[name[request]]] variable[data] assign[=] list[[]] for taget[name[msg]] in starred[call[name[self]._receive_events, parameter[]]] begin[:] for taget[name[v]] in starred[call[call[name[msg].getElement, parameter[constant[DataRecords]]].values, parameter[]]] begin[:] for taget[name[f]] in starred[call[call[name[v].getElement, parameter[constant[DataFields]]].values, parameter[]]] begin[:] call[name[data].append, parameter[call[name[f].getElementAsString, parameter[constant[StringValue]]]]] return[call[name[pd].DataFrame, parameter[name[data]]]]
keyword[def] identifier[bsrch] ( identifier[self] , identifier[domain] ): literal[string] identifier[logger] = identifier[_get_logger] ( identifier[self] . identifier[debug] ) identifier[request] = identifier[self] . identifier[exrService] . identifier[createRequest] ( literal[string] ) identifier[request] . identifier[set] ( literal[string] , identifier[domain] ) identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[request] )) identifier[self] . identifier[_session] . identifier[sendRequest] ( identifier[request] , identifier[identity] = identifier[self] . identifier[_identity] ) identifier[data] =[] keyword[for] identifier[msg] keyword[in] identifier[self] . identifier[_receive_events] ( identifier[to_dict] = keyword[False] ): keyword[for] identifier[v] keyword[in] identifier[msg] . identifier[getElement] ( literal[string] ). identifier[values] (): keyword[for] identifier[f] keyword[in] identifier[v] . identifier[getElement] ( literal[string] ). identifier[values] (): identifier[data] . identifier[append] ( identifier[f] . identifier[getElementAsString] ( literal[string] )) keyword[return] identifier[pd] . identifier[DataFrame] ( identifier[data] )
def bsrch(self, domain): """ This function uses the Bloomberg API to retrieve 'bsrch' (Bloomberg SRCH Data) queries. Returns list of tickers. Parameters ---------- domain: string A character string with the name of the domain to execute. It can be a user defined SRCH screen, commodity screen or one of the variety of Bloomberg examples. All domains are in the format <domain>:<search_name>. Example "COMDTY:NGFLOW" Returns ------- data: pandas.DataFrame List of bloomberg tickers from the BSRCH """ logger = _get_logger(self.debug) request = self.exrService.createRequest('ExcelGetGridRequest') request.set('Domain', domain) logger.info('Sending Request:\n{}'.format(request)) self._session.sendRequest(request, identity=self._identity) data = [] for msg in self._receive_events(to_dict=False): for v in msg.getElement('DataRecords').values(): for f in v.getElement('DataFields').values(): data.append(f.getElementAsString('StringValue')) # depends on [control=['for'], data=['f']] # depends on [control=['for'], data=['v']] # depends on [control=['for'], data=['msg']] return pd.DataFrame(data)
def chdir(path): """Change the working directory to `path` for the duration of this context manager. :param str path: The path to change to """ cur_cwd = os.getcwd() os.chdir(path) try: yield finally: os.chdir(cur_cwd)
def function[chdir, parameter[path]]: constant[Change the working directory to `path` for the duration of this context manager. :param str path: The path to change to ] variable[cur_cwd] assign[=] call[name[os].getcwd, parameter[]] call[name[os].chdir, parameter[name[path]]] <ast.Try object at 0x7da1b14d1f60>
keyword[def] identifier[chdir] ( identifier[path] ): literal[string] identifier[cur_cwd] = identifier[os] . identifier[getcwd] () identifier[os] . identifier[chdir] ( identifier[path] ) keyword[try] : keyword[yield] keyword[finally] : identifier[os] . identifier[chdir] ( identifier[cur_cwd] )
def chdir(path): """Change the working directory to `path` for the duration of this context manager. :param str path: The path to change to """ cur_cwd = os.getcwd() os.chdir(path) try: yield # depends on [control=['try'], data=[]] finally: os.chdir(cur_cwd)
def remove_collisions(self, min_dist=0.5): """ Remove vnodes that are too close to existing atoms in the structure Args: min_dist(float): The minimum distance that a vertex needs to be from existing atoms. """ vfcoords = [v.frac_coords for v in self.vnodes] sfcoords = self.structure.frac_coords dist_matrix = self.structure.lattice.get_all_distances(vfcoords, sfcoords) all_dist = np.min(dist_matrix, axis=1) new_vnodes = [] for i, v in enumerate(self.vnodes): if all_dist[i] > min_dist: new_vnodes.append(v) self.vnodes = new_vnodes
def function[remove_collisions, parameter[self, min_dist]]: constant[ Remove vnodes that are too close to existing atoms in the structure Args: min_dist(float): The minimum distance that a vertex needs to be from existing atoms. ] variable[vfcoords] assign[=] <ast.ListComp object at 0x7da1b26af490> variable[sfcoords] assign[=] name[self].structure.frac_coords variable[dist_matrix] assign[=] call[name[self].structure.lattice.get_all_distances, parameter[name[vfcoords], name[sfcoords]]] variable[all_dist] assign[=] call[name[np].min, parameter[name[dist_matrix]]] variable[new_vnodes] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da18f810c40>, <ast.Name object at 0x7da18f811f30>]]] in starred[call[name[enumerate], parameter[name[self].vnodes]]] begin[:] if compare[call[name[all_dist]][name[i]] greater[>] name[min_dist]] begin[:] call[name[new_vnodes].append, parameter[name[v]]] name[self].vnodes assign[=] name[new_vnodes]
keyword[def] identifier[remove_collisions] ( identifier[self] , identifier[min_dist] = literal[int] ): literal[string] identifier[vfcoords] =[ identifier[v] . identifier[frac_coords] keyword[for] identifier[v] keyword[in] identifier[self] . identifier[vnodes] ] identifier[sfcoords] = identifier[self] . identifier[structure] . identifier[frac_coords] identifier[dist_matrix] = identifier[self] . identifier[structure] . identifier[lattice] . identifier[get_all_distances] ( identifier[vfcoords] , identifier[sfcoords] ) identifier[all_dist] = identifier[np] . identifier[min] ( identifier[dist_matrix] , identifier[axis] = literal[int] ) identifier[new_vnodes] =[] keyword[for] identifier[i] , identifier[v] keyword[in] identifier[enumerate] ( identifier[self] . identifier[vnodes] ): keyword[if] identifier[all_dist] [ identifier[i] ]> identifier[min_dist] : identifier[new_vnodes] . identifier[append] ( identifier[v] ) identifier[self] . identifier[vnodes] = identifier[new_vnodes]
def remove_collisions(self, min_dist=0.5): """ Remove vnodes that are too close to existing atoms in the structure Args: min_dist(float): The minimum distance that a vertex needs to be from existing atoms. """ vfcoords = [v.frac_coords for v in self.vnodes] sfcoords = self.structure.frac_coords dist_matrix = self.structure.lattice.get_all_distances(vfcoords, sfcoords) all_dist = np.min(dist_matrix, axis=1) new_vnodes = [] for (i, v) in enumerate(self.vnodes): if all_dist[i] > min_dist: new_vnodes.append(v) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] self.vnodes = new_vnodes
def load_positions(positions_path): """Load the positions of an image. Positions correspond to a set of pixels in the lensed source galaxy that are anticipated to come from the same \ multiply-imaged region of the source-plane. Mass models which do not trace the pixels within a threshold value of \ one another are resampled during the non-linear search. Positions are stored in a .dat file, where each line of the file gives a list of list of (y,x) positions which \ correspond to the same region of the source-plane. Thus, multiple source-plane regions can be input over multiple \ lines of the same positions file. Parameters ---------- positions_path : str The path to the positions .dat file containing the positions (e.g. '/path/to/positions.dat') """ with open(positions_path) as f: position_string = f.readlines() positions = [] for line in position_string: position_list = ast.literal_eval(line) positions.append(position_list) return positions
def function[load_positions, parameter[positions_path]]: constant[Load the positions of an image. Positions correspond to a set of pixels in the lensed source galaxy that are anticipated to come from the same multiply-imaged region of the source-plane. Mass models which do not trace the pixels within a threshold value of one another are resampled during the non-linear search. Positions are stored in a .dat file, where each line of the file gives a list of list of (y,x) positions which correspond to the same region of the source-plane. Thus, multiple source-plane regions can be input over multiple lines of the same positions file. Parameters ---------- positions_path : str The path to the positions .dat file containing the positions (e.g. '/path/to/positions.dat') ] with call[name[open], parameter[name[positions_path]]] begin[:] variable[position_string] assign[=] call[name[f].readlines, parameter[]] variable[positions] assign[=] list[[]] for taget[name[line]] in starred[name[position_string]] begin[:] variable[position_list] assign[=] call[name[ast].literal_eval, parameter[name[line]]] call[name[positions].append, parameter[name[position_list]]] return[name[positions]]
keyword[def] identifier[load_positions] ( identifier[positions_path] ): literal[string] keyword[with] identifier[open] ( identifier[positions_path] ) keyword[as] identifier[f] : identifier[position_string] = identifier[f] . identifier[readlines] () identifier[positions] =[] keyword[for] identifier[line] keyword[in] identifier[position_string] : identifier[position_list] = identifier[ast] . identifier[literal_eval] ( identifier[line] ) identifier[positions] . identifier[append] ( identifier[position_list] ) keyword[return] identifier[positions]
def load_positions(positions_path): """Load the positions of an image. Positions correspond to a set of pixels in the lensed source galaxy that are anticipated to come from the same multiply-imaged region of the source-plane. Mass models which do not trace the pixels within a threshold value of one another are resampled during the non-linear search. Positions are stored in a .dat file, where each line of the file gives a list of list of (y,x) positions which correspond to the same region of the source-plane. Thus, multiple source-plane regions can be input over multiple lines of the same positions file. Parameters ---------- positions_path : str The path to the positions .dat file containing the positions (e.g. '/path/to/positions.dat') """ with open(positions_path) as f: position_string = f.readlines() # depends on [control=['with'], data=['f']] positions = [] for line in position_string: position_list = ast.literal_eval(line) positions.append(position_list) # depends on [control=['for'], data=['line']] return positions
def make_new(self, rev): # type: (str) -> RevOptions """ Make a copy of the current instance, but with a new rev. Args: rev: the name of the revision for the new object. """ return self.vcs.make_rev_options(rev, extra_args=self.extra_args)
def function[make_new, parameter[self, rev]]: constant[ Make a copy of the current instance, but with a new rev. Args: rev: the name of the revision for the new object. ] return[call[name[self].vcs.make_rev_options, parameter[name[rev]]]]
keyword[def] identifier[make_new] ( identifier[self] , identifier[rev] ): literal[string] keyword[return] identifier[self] . identifier[vcs] . identifier[make_rev_options] ( identifier[rev] , identifier[extra_args] = identifier[self] . identifier[extra_args] )
def make_new(self, rev): # type: (str) -> RevOptions '\n Make a copy of the current instance, but with a new rev.\n\n Args:\n rev: the name of the revision for the new object.\n ' return self.vcs.make_rev_options(rev, extra_args=self.extra_args)
def _request_activity_data(self, athlete, filename): """Actually do the request for activity filename This call is slow and therefore this method is memory cached. Keyword arguments: athlete -- Full name of athlete filename -- filename of request activity (e.g. \'2015_04_29_09_03_16.json\') """ response = self._get_request(self._activity_endpoint(athlete, filename)).json() activity = pd.DataFrame(response['RIDE']['SAMPLES']) activity = activity.rename(columns=ACTIVITY_COLUMN_TRANSLATION) activity.index = pd.to_timedelta(activity.time, unit='s') activity.drop('time', axis=1, inplace=True) return activity[[i for i in ACTIVITY_COLUMN_ORDER if i in activity.columns]]
def function[_request_activity_data, parameter[self, athlete, filename]]: constant[Actually do the request for activity filename This call is slow and therefore this method is memory cached. Keyword arguments: athlete -- Full name of athlete filename -- filename of request activity (e.g. '2015_04_29_09_03_16.json') ] variable[response] assign[=] call[call[name[self]._get_request, parameter[call[name[self]._activity_endpoint, parameter[name[athlete], name[filename]]]]].json, parameter[]] variable[activity] assign[=] call[name[pd].DataFrame, parameter[call[call[name[response]][constant[RIDE]]][constant[SAMPLES]]]] variable[activity] assign[=] call[name[activity].rename, parameter[]] name[activity].index assign[=] call[name[pd].to_timedelta, parameter[name[activity].time]] call[name[activity].drop, parameter[constant[time]]] return[call[name[activity]][<ast.ListComp object at 0x7da20c6e7e20>]]
keyword[def] identifier[_request_activity_data] ( identifier[self] , identifier[athlete] , identifier[filename] ): literal[string] identifier[response] = identifier[self] . identifier[_get_request] ( identifier[self] . identifier[_activity_endpoint] ( identifier[athlete] , identifier[filename] )). identifier[json] () identifier[activity] = identifier[pd] . identifier[DataFrame] ( identifier[response] [ literal[string] ][ literal[string] ]) identifier[activity] = identifier[activity] . identifier[rename] ( identifier[columns] = identifier[ACTIVITY_COLUMN_TRANSLATION] ) identifier[activity] . identifier[index] = identifier[pd] . identifier[to_timedelta] ( identifier[activity] . identifier[time] , identifier[unit] = literal[string] ) identifier[activity] . identifier[drop] ( literal[string] , identifier[axis] = literal[int] , identifier[inplace] = keyword[True] ) keyword[return] identifier[activity] [[ identifier[i] keyword[for] identifier[i] keyword[in] identifier[ACTIVITY_COLUMN_ORDER] keyword[if] identifier[i] keyword[in] identifier[activity] . identifier[columns] ]]
def _request_activity_data(self, athlete, filename): """Actually do the request for activity filename This call is slow and therefore this method is memory cached. Keyword arguments: athlete -- Full name of athlete filename -- filename of request activity (e.g. '2015_04_29_09_03_16.json') """ response = self._get_request(self._activity_endpoint(athlete, filename)).json() activity = pd.DataFrame(response['RIDE']['SAMPLES']) activity = activity.rename(columns=ACTIVITY_COLUMN_TRANSLATION) activity.index = pd.to_timedelta(activity.time, unit='s') activity.drop('time', axis=1, inplace=True) return activity[[i for i in ACTIVITY_COLUMN_ORDER if i in activity.columns]]
def resp_set_label(self, resp, label=None): """Default callback for get_label/set_label """ if label: self.label=label elif resp: self.label=resp.label.decode().replace("\x00", "")
def function[resp_set_label, parameter[self, resp, label]]: constant[Default callback for get_label/set_label ] if name[label] begin[:] name[self].label assign[=] name[label]
keyword[def] identifier[resp_set_label] ( identifier[self] , identifier[resp] , identifier[label] = keyword[None] ): literal[string] keyword[if] identifier[label] : identifier[self] . identifier[label] = identifier[label] keyword[elif] identifier[resp] : identifier[self] . identifier[label] = identifier[resp] . identifier[label] . identifier[decode] (). identifier[replace] ( literal[string] , literal[string] )
def resp_set_label(self, resp, label=None): """Default callback for get_label/set_label """ if label: self.label = label # depends on [control=['if'], data=[]] elif resp: self.label = resp.label.decode().replace('\x00', '') # depends on [control=['if'], data=[]]
def delete(self, *args, **kwargs): """Delete an object""" self.before_delete(args, kwargs) self.delete_object(kwargs) result = {'meta': {'message': 'Object successfully deleted'}} final_result = self.after_delete(result) return final_result
def function[delete, parameter[self]]: constant[Delete an object] call[name[self].before_delete, parameter[name[args], name[kwargs]]] call[name[self].delete_object, parameter[name[kwargs]]] variable[result] assign[=] dictionary[[<ast.Constant object at 0x7da1b17fb160>], [<ast.Dict object at 0x7da1b17fa260>]] variable[final_result] assign[=] call[name[self].after_delete, parameter[name[result]]] return[name[final_result]]
keyword[def] identifier[delete] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[self] . identifier[before_delete] ( identifier[args] , identifier[kwargs] ) identifier[self] . identifier[delete_object] ( identifier[kwargs] ) identifier[result] ={ literal[string] :{ literal[string] : literal[string] }} identifier[final_result] = identifier[self] . identifier[after_delete] ( identifier[result] ) keyword[return] identifier[final_result]
def delete(self, *args, **kwargs): """Delete an object""" self.before_delete(args, kwargs) self.delete_object(kwargs) result = {'meta': {'message': 'Object successfully deleted'}} final_result = self.after_delete(result) return final_result
def calc_epc_v1(self): """Apply the evaporation correction factors and adjust evaporation to the altitude of the individual zones. Calculate the areal mean of (uncorrected) potential evaporation for the subbasin, adjust it to the individual zones in accordance with their heights and perform some corrections, among which one depends on the actual precipitation. Required control parameters: |NmbZones| |ECorr| |ECAlt| |ZoneZ| |ZRelE| |EPF| Required flux sequences: |EP| |PC| Calculated flux sequences: |EPC| Basic equation: :math:`EPC = EP \\cdot ECorr \\cdot (1+ECAlt \\cdot (ZoneZ-ZRelE)) \\cdot exp(-EPF \\cdot PC)` Examples: Four zones are at an elevation of 200 m. A (uncorrected) potential evaporation value of 2 mm and a (corrected) precipitation value of 5 mm have been determined for each zone beforehand: >>> from hydpy.models.hland import * >>> parameterstep('1d') >>> simulationstep('12h') >>> nmbzones(4) >>> zrele(2.0) >>> zonez(3.0) >>> fluxes.ep = 2.0 >>> fluxes.pc = 5.0 The first three zones illustrate the individual evaporation corrections due to the general evaporation correction factor (|ECorr|, first zone), the altitude correction factor (|ECAlt|, second zone), the precipitation related correction factor (|EPF|, third zone). The fourth zone illustrates the interaction between all corrections: >>> ecorr(1.3, 1.0, 1.0, 1.3) >>> ecalt(0.0, 0.1, 0.0, 0.1) >>> epf(0.0, 0.0, -numpy.log(0.7)/10.0, -numpy.log(0.7)/10.0) >>> model.calc_epc_v1() >>> fluxes.epc epc(2.6, 1.8, 1.4, 1.638) To prevent from calculating negative evaporation values when too large values for parameter |ECAlt| are set, a truncation is performed: >>> ecalt(2.0) >>> model.calc_epc_v1() >>> fluxes.epc epc(0.0, 0.0, 0.0, 0.0) """ con = self.parameters.control.fastaccess flu = self.sequences.fluxes.fastaccess for k in range(con.nmbzones): flu.epc[k] = (flu.ep[k]*con.ecorr[k] * (1. - con.ecalt[k]*(con.zonez[k]-con.zrele))) if flu.epc[k] <= 0.: flu.epc[k] = 0. else: flu.epc[k] *= modelutils.exp(-con.epf[k]*flu.pc[k])
def function[calc_epc_v1, parameter[self]]: constant[Apply the evaporation correction factors and adjust evaporation to the altitude of the individual zones. Calculate the areal mean of (uncorrected) potential evaporation for the subbasin, adjust it to the individual zones in accordance with their heights and perform some corrections, among which one depends on the actual precipitation. Required control parameters: |NmbZones| |ECorr| |ECAlt| |ZoneZ| |ZRelE| |EPF| Required flux sequences: |EP| |PC| Calculated flux sequences: |EPC| Basic equation: :math:`EPC = EP \cdot ECorr \cdot (1+ECAlt \cdot (ZoneZ-ZRelE)) \cdot exp(-EPF \cdot PC)` Examples: Four zones are at an elevation of 200 m. A (uncorrected) potential evaporation value of 2 mm and a (corrected) precipitation value of 5 mm have been determined for each zone beforehand: >>> from hydpy.models.hland import * >>> parameterstep('1d') >>> simulationstep('12h') >>> nmbzones(4) >>> zrele(2.0) >>> zonez(3.0) >>> fluxes.ep = 2.0 >>> fluxes.pc = 5.0 The first three zones illustrate the individual evaporation corrections due to the general evaporation correction factor (|ECorr|, first zone), the altitude correction factor (|ECAlt|, second zone), the precipitation related correction factor (|EPF|, third zone). The fourth zone illustrates the interaction between all corrections: >>> ecorr(1.3, 1.0, 1.0, 1.3) >>> ecalt(0.0, 0.1, 0.0, 0.1) >>> epf(0.0, 0.0, -numpy.log(0.7)/10.0, -numpy.log(0.7)/10.0) >>> model.calc_epc_v1() >>> fluxes.epc epc(2.6, 1.8, 1.4, 1.638) To prevent from calculating negative evaporation values when too large values for parameter |ECAlt| are set, a truncation is performed: >>> ecalt(2.0) >>> model.calc_epc_v1() >>> fluxes.epc epc(0.0, 0.0, 0.0, 0.0) ] variable[con] assign[=] name[self].parameters.control.fastaccess variable[flu] assign[=] name[self].sequences.fluxes.fastaccess for taget[name[k]] in starred[call[name[range], parameter[name[con].nmbzones]]] begin[:] call[name[flu].epc][name[k]] assign[=] binary_operation[binary_operation[call[name[flu].ep][name[k]] * call[name[con].ecorr][name[k]]] * binary_operation[constant[1.0] - binary_operation[call[name[con].ecalt][name[k]] * binary_operation[call[name[con].zonez][name[k]] - name[con].zrele]]]] if compare[call[name[flu].epc][name[k]] less_or_equal[<=] constant[0.0]] begin[:] call[name[flu].epc][name[k]] assign[=] constant[0.0]
keyword[def] identifier[calc_epc_v1] ( identifier[self] ): literal[string] identifier[con] = identifier[self] . identifier[parameters] . identifier[control] . identifier[fastaccess] identifier[flu] = identifier[self] . identifier[sequences] . identifier[fluxes] . identifier[fastaccess] keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[con] . identifier[nmbzones] ): identifier[flu] . identifier[epc] [ identifier[k] ]=( identifier[flu] . identifier[ep] [ identifier[k] ]* identifier[con] . identifier[ecorr] [ identifier[k] ]* ( literal[int] - identifier[con] . identifier[ecalt] [ identifier[k] ]*( identifier[con] . identifier[zonez] [ identifier[k] ]- identifier[con] . identifier[zrele] ))) keyword[if] identifier[flu] . identifier[epc] [ identifier[k] ]<= literal[int] : identifier[flu] . identifier[epc] [ identifier[k] ]= literal[int] keyword[else] : identifier[flu] . identifier[epc] [ identifier[k] ]*= identifier[modelutils] . identifier[exp] (- identifier[con] . identifier[epf] [ identifier[k] ]* identifier[flu] . identifier[pc] [ identifier[k] ])
def calc_epc_v1(self): """Apply the evaporation correction factors and adjust evaporation to the altitude of the individual zones. Calculate the areal mean of (uncorrected) potential evaporation for the subbasin, adjust it to the individual zones in accordance with their heights and perform some corrections, among which one depends on the actual precipitation. Required control parameters: |NmbZones| |ECorr| |ECAlt| |ZoneZ| |ZRelE| |EPF| Required flux sequences: |EP| |PC| Calculated flux sequences: |EPC| Basic equation: :math:`EPC = EP \\cdot ECorr \\cdot (1+ECAlt \\cdot (ZoneZ-ZRelE)) \\cdot exp(-EPF \\cdot PC)` Examples: Four zones are at an elevation of 200 m. A (uncorrected) potential evaporation value of 2 mm and a (corrected) precipitation value of 5 mm have been determined for each zone beforehand: >>> from hydpy.models.hland import * >>> parameterstep('1d') >>> simulationstep('12h') >>> nmbzones(4) >>> zrele(2.0) >>> zonez(3.0) >>> fluxes.ep = 2.0 >>> fluxes.pc = 5.0 The first three zones illustrate the individual evaporation corrections due to the general evaporation correction factor (|ECorr|, first zone), the altitude correction factor (|ECAlt|, second zone), the precipitation related correction factor (|EPF|, third zone). The fourth zone illustrates the interaction between all corrections: >>> ecorr(1.3, 1.0, 1.0, 1.3) >>> ecalt(0.0, 0.1, 0.0, 0.1) >>> epf(0.0, 0.0, -numpy.log(0.7)/10.0, -numpy.log(0.7)/10.0) >>> model.calc_epc_v1() >>> fluxes.epc epc(2.6, 1.8, 1.4, 1.638) To prevent from calculating negative evaporation values when too large values for parameter |ECAlt| are set, a truncation is performed: >>> ecalt(2.0) >>> model.calc_epc_v1() >>> fluxes.epc epc(0.0, 0.0, 0.0, 0.0) """ con = self.parameters.control.fastaccess flu = self.sequences.fluxes.fastaccess for k in range(con.nmbzones): flu.epc[k] = flu.ep[k] * con.ecorr[k] * (1.0 - con.ecalt[k] * (con.zonez[k] - con.zrele)) if flu.epc[k] <= 0.0: flu.epc[k] = 0.0 # depends on [control=['if'], data=[]] else: flu.epc[k] *= modelutils.exp(-con.epf[k] * flu.pc[k]) # depends on [control=['for'], data=['k']]
def on_startup(self, callback: callable, polling=True, webhook=True): """ Register a callback for the startup process :param callback: :param polling: use with polling :param webhook: use with webhook """ self._check_frozen() if not webhook and not polling: warn('This action has no effect!', UserWarning) return if isinstance(callback, (list, tuple, set)): for cb in callback: self.on_startup(cb, polling, webhook) return if polling: self._on_startup_polling.append(callback) if webhook: self._on_startup_webhook.append(callback)
def function[on_startup, parameter[self, callback, polling, webhook]]: constant[ Register a callback for the startup process :param callback: :param polling: use with polling :param webhook: use with webhook ] call[name[self]._check_frozen, parameter[]] if <ast.BoolOp object at 0x7da1b1737220> begin[:] call[name[warn], parameter[constant[This action has no effect!], name[UserWarning]]] return[None] if call[name[isinstance], parameter[name[callback], tuple[[<ast.Name object at 0x7da1b1735120>, <ast.Name object at 0x7da1b17361d0>, <ast.Name object at 0x7da1b1735330>]]]] begin[:] for taget[name[cb]] in starred[name[callback]] begin[:] call[name[self].on_startup, parameter[name[cb], name[polling], name[webhook]]] return[None] if name[polling] begin[:] call[name[self]._on_startup_polling.append, parameter[name[callback]]] if name[webhook] begin[:] call[name[self]._on_startup_webhook.append, parameter[name[callback]]]
keyword[def] identifier[on_startup] ( identifier[self] , identifier[callback] : identifier[callable] , identifier[polling] = keyword[True] , identifier[webhook] = keyword[True] ): literal[string] identifier[self] . identifier[_check_frozen] () keyword[if] keyword[not] identifier[webhook] keyword[and] keyword[not] identifier[polling] : identifier[warn] ( literal[string] , identifier[UserWarning] ) keyword[return] keyword[if] identifier[isinstance] ( identifier[callback] ,( identifier[list] , identifier[tuple] , identifier[set] )): keyword[for] identifier[cb] keyword[in] identifier[callback] : identifier[self] . identifier[on_startup] ( identifier[cb] , identifier[polling] , identifier[webhook] ) keyword[return] keyword[if] identifier[polling] : identifier[self] . identifier[_on_startup_polling] . identifier[append] ( identifier[callback] ) keyword[if] identifier[webhook] : identifier[self] . identifier[_on_startup_webhook] . identifier[append] ( identifier[callback] )
def on_startup(self, callback: callable, polling=True, webhook=True): """ Register a callback for the startup process :param callback: :param polling: use with polling :param webhook: use with webhook """ self._check_frozen() if not webhook and (not polling): warn('This action has no effect!', UserWarning) return # depends on [control=['if'], data=[]] if isinstance(callback, (list, tuple, set)): for cb in callback: self.on_startup(cb, polling, webhook) # depends on [control=['for'], data=['cb']] return # depends on [control=['if'], data=[]] if polling: self._on_startup_polling.append(callback) # depends on [control=['if'], data=[]] if webhook: self._on_startup_webhook.append(callback) # depends on [control=['if'], data=[]]
def visit_wavedrom(self, node): """ Visit the wavedrom node """ format = determine_format(self.builder.supported_image_types) if format is None: raise SphinxError(__("Cannot determine a suitable output format")) # Create random filename bname = "wavedrom-{}".format(uuid4()) outpath = path.join(self.builder.outdir, self.builder.imagedir) # Render the wavedrom image imgname = render_wavedrom(self, node, outpath, bname, format) # Now we unpack the image node again. The file was created at the build destination, # and we can now use the standard visitor for the image node. We add the image node # as a child and then raise a SkipDepature, which will trigger the builder to visit # children. image_node = node['image_node'] image_node['uri'] = os.path.join(self.builder.imgpath, imgname) node.append(image_node) raise nodes.SkipDeparture
def function[visit_wavedrom, parameter[self, node]]: constant[ Visit the wavedrom node ] variable[format] assign[=] call[name[determine_format], parameter[name[self].builder.supported_image_types]] if compare[name[format] is constant[None]] begin[:] <ast.Raise object at 0x7da20c6e7bb0> variable[bname] assign[=] call[constant[wavedrom-{}].format, parameter[call[name[uuid4], parameter[]]]] variable[outpath] assign[=] call[name[path].join, parameter[name[self].builder.outdir, name[self].builder.imagedir]] variable[imgname] assign[=] call[name[render_wavedrom], parameter[name[self], name[node], name[outpath], name[bname], name[format]]] variable[image_node] assign[=] call[name[node]][constant[image_node]] call[name[image_node]][constant[uri]] assign[=] call[name[os].path.join, parameter[name[self].builder.imgpath, name[imgname]]] call[name[node].append, parameter[name[image_node]]] <ast.Raise object at 0x7da20c6e6da0>
keyword[def] identifier[visit_wavedrom] ( identifier[self] , identifier[node] ): literal[string] identifier[format] = identifier[determine_format] ( identifier[self] . identifier[builder] . identifier[supported_image_types] ) keyword[if] identifier[format] keyword[is] keyword[None] : keyword[raise] identifier[SphinxError] ( identifier[__] ( literal[string] )) identifier[bname] = literal[string] . identifier[format] ( identifier[uuid4] ()) identifier[outpath] = identifier[path] . identifier[join] ( identifier[self] . identifier[builder] . identifier[outdir] , identifier[self] . identifier[builder] . identifier[imagedir] ) identifier[imgname] = identifier[render_wavedrom] ( identifier[self] , identifier[node] , identifier[outpath] , identifier[bname] , identifier[format] ) identifier[image_node] = identifier[node] [ literal[string] ] identifier[image_node] [ literal[string] ]= identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[builder] . identifier[imgpath] , identifier[imgname] ) identifier[node] . identifier[append] ( identifier[image_node] ) keyword[raise] identifier[nodes] . identifier[SkipDeparture]
def visit_wavedrom(self, node): """ Visit the wavedrom node """ format = determine_format(self.builder.supported_image_types) if format is None: raise SphinxError(__('Cannot determine a suitable output format')) # depends on [control=['if'], data=[]] # Create random filename bname = 'wavedrom-{}'.format(uuid4()) outpath = path.join(self.builder.outdir, self.builder.imagedir) # Render the wavedrom image imgname = render_wavedrom(self, node, outpath, bname, format) # Now we unpack the image node again. The file was created at the build destination, # and we can now use the standard visitor for the image node. We add the image node # as a child and then raise a SkipDepature, which will trigger the builder to visit # children. image_node = node['image_node'] image_node['uri'] = os.path.join(self.builder.imgpath, imgname) node.append(image_node) raise nodes.SkipDeparture
def elcm_session_delete(irmc_info, session_id, terminate=False): """send an eLCM request to remove a session from the session list :param irmc_info: node info :param session_id: session id :param terminate: a running session must be terminated before removing :raises: ELCMSessionNotFound if the session does not exist :raises: SCCIClientError if SCCI failed """ # Terminate the session first if needs to if terminate: # Get session status to check session = elcm_session_get_status(irmc_info, session_id) status = session['Session']['Status'] # Terminate session if it is activated or running if status == 'running' or status == 'activated': elcm_session_terminate(irmc_info, session_id) # Send DELETE request to the server resp = elcm_request(irmc_info, method='DELETE', path='/sessionInformation/%s/remove' % session_id) if resp.status_code == 200: return elif resp.status_code == 404: raise ELCMSessionNotFound('Session "%s" does not exist' % session_id) else: raise scci.SCCIClientError(('Failed to remove session ' '"%(session)s" with error code %(error)s' % {'session': session_id, 'error': resp.status_code}))
def function[elcm_session_delete, parameter[irmc_info, session_id, terminate]]: constant[send an eLCM request to remove a session from the session list :param irmc_info: node info :param session_id: session id :param terminate: a running session must be terminated before removing :raises: ELCMSessionNotFound if the session does not exist :raises: SCCIClientError if SCCI failed ] if name[terminate] begin[:] variable[session] assign[=] call[name[elcm_session_get_status], parameter[name[irmc_info], name[session_id]]] variable[status] assign[=] call[call[name[session]][constant[Session]]][constant[Status]] if <ast.BoolOp object at 0x7da1b19507f0> begin[:] call[name[elcm_session_terminate], parameter[name[irmc_info], name[session_id]]] variable[resp] assign[=] call[name[elcm_request], parameter[name[irmc_info]]] if compare[name[resp].status_code equal[==] constant[200]] begin[:] return[None]
keyword[def] identifier[elcm_session_delete] ( identifier[irmc_info] , identifier[session_id] , identifier[terminate] = keyword[False] ): literal[string] keyword[if] identifier[terminate] : identifier[session] = identifier[elcm_session_get_status] ( identifier[irmc_info] , identifier[session_id] ) identifier[status] = identifier[session] [ literal[string] ][ literal[string] ] keyword[if] identifier[status] == literal[string] keyword[or] identifier[status] == literal[string] : identifier[elcm_session_terminate] ( identifier[irmc_info] , identifier[session_id] ) identifier[resp] = identifier[elcm_request] ( identifier[irmc_info] , identifier[method] = literal[string] , identifier[path] = literal[string] % identifier[session_id] ) keyword[if] identifier[resp] . identifier[status_code] == literal[int] : keyword[return] keyword[elif] identifier[resp] . identifier[status_code] == literal[int] : keyword[raise] identifier[ELCMSessionNotFound] ( literal[string] % identifier[session_id] ) keyword[else] : keyword[raise] identifier[scci] . identifier[SCCIClientError] (( literal[string] literal[string] % { literal[string] : identifier[session_id] , literal[string] : identifier[resp] . identifier[status_code] }))
def elcm_session_delete(irmc_info, session_id, terminate=False): """send an eLCM request to remove a session from the session list :param irmc_info: node info :param session_id: session id :param terminate: a running session must be terminated before removing :raises: ELCMSessionNotFound if the session does not exist :raises: SCCIClientError if SCCI failed """ # Terminate the session first if needs to if terminate: # Get session status to check session = elcm_session_get_status(irmc_info, session_id) status = session['Session']['Status'] # Terminate session if it is activated or running if status == 'running' or status == 'activated': elcm_session_terminate(irmc_info, session_id) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # Send DELETE request to the server resp = elcm_request(irmc_info, method='DELETE', path='/sessionInformation/%s/remove' % session_id) if resp.status_code == 200: return # depends on [control=['if'], data=[]] elif resp.status_code == 404: raise ELCMSessionNotFound('Session "%s" does not exist' % session_id) # depends on [control=['if'], data=[]] else: raise scci.SCCIClientError('Failed to remove session "%(session)s" with error code %(error)s' % {'session': session_id, 'error': resp.status_code})
def delete_plate(self, plate_id, delete_meta_data=False): """ Delete a plate from the database :param plate_id: The plate id :param delete_meta_data: Optionally delete all meta data associated with this plate as well :return: None """ if plate_id not in self.plates: logging.info("Plate {} not found for deletion".format(plate_id)) return plate = self.plates[plate_id] if delete_meta_data: for pv in plate.values: identifier = ".".join(map(lambda x: "_".join(x), pv)) self.meta_data_manager.delete(identifier=identifier) with switch_db(PlateDefinitionModel, "hyperstream"): try: p = PlateDefinitionModel.objects.get(plate_id=plate_id) p.delete() del self.plates[plate_id] except DoesNotExist as e: logging.warn(e) logging.info("Plate {} deleted".format(plate_id))
def function[delete_plate, parameter[self, plate_id, delete_meta_data]]: constant[ Delete a plate from the database :param plate_id: The plate id :param delete_meta_data: Optionally delete all meta data associated with this plate as well :return: None ] if compare[name[plate_id] <ast.NotIn object at 0x7da2590d7190> name[self].plates] begin[:] call[name[logging].info, parameter[call[constant[Plate {} not found for deletion].format, parameter[name[plate_id]]]]] return[None] variable[plate] assign[=] call[name[self].plates][name[plate_id]] if name[delete_meta_data] begin[:] for taget[name[pv]] in starred[name[plate].values] begin[:] variable[identifier] assign[=] call[constant[.].join, parameter[call[name[map], parameter[<ast.Lambda object at 0x7da1b233dd50>, name[pv]]]]] call[name[self].meta_data_manager.delete, parameter[]] with call[name[switch_db], parameter[name[PlateDefinitionModel], constant[hyperstream]]] begin[:] <ast.Try object at 0x7da1b233c4f0> call[name[logging].info, parameter[call[constant[Plate {} deleted].format, parameter[name[plate_id]]]]]
keyword[def] identifier[delete_plate] ( identifier[self] , identifier[plate_id] , identifier[delete_meta_data] = keyword[False] ): literal[string] keyword[if] identifier[plate_id] keyword[not] keyword[in] identifier[self] . identifier[plates] : identifier[logging] . identifier[info] ( literal[string] . identifier[format] ( identifier[plate_id] )) keyword[return] identifier[plate] = identifier[self] . identifier[plates] [ identifier[plate_id] ] keyword[if] identifier[delete_meta_data] : keyword[for] identifier[pv] keyword[in] identifier[plate] . identifier[values] : identifier[identifier] = literal[string] . identifier[join] ( identifier[map] ( keyword[lambda] identifier[x] : literal[string] . identifier[join] ( identifier[x] ), identifier[pv] )) identifier[self] . identifier[meta_data_manager] . identifier[delete] ( identifier[identifier] = identifier[identifier] ) keyword[with] identifier[switch_db] ( identifier[PlateDefinitionModel] , literal[string] ): keyword[try] : identifier[p] = identifier[PlateDefinitionModel] . identifier[objects] . identifier[get] ( identifier[plate_id] = identifier[plate_id] ) identifier[p] . identifier[delete] () keyword[del] identifier[self] . identifier[plates] [ identifier[plate_id] ] keyword[except] identifier[DoesNotExist] keyword[as] identifier[e] : identifier[logging] . identifier[warn] ( identifier[e] ) identifier[logging] . identifier[info] ( literal[string] . identifier[format] ( identifier[plate_id] ))
def delete_plate(self, plate_id, delete_meta_data=False): """ Delete a plate from the database :param plate_id: The plate id :param delete_meta_data: Optionally delete all meta data associated with this plate as well :return: None """ if plate_id not in self.plates: logging.info('Plate {} not found for deletion'.format(plate_id)) return # depends on [control=['if'], data=['plate_id']] plate = self.plates[plate_id] if delete_meta_data: for pv in plate.values: identifier = '.'.join(map(lambda x: '_'.join(x), pv)) self.meta_data_manager.delete(identifier=identifier) # depends on [control=['for'], data=['pv']] # depends on [control=['if'], data=[]] with switch_db(PlateDefinitionModel, 'hyperstream'): try: p = PlateDefinitionModel.objects.get(plate_id=plate_id) p.delete() del self.plates[plate_id] # depends on [control=['try'], data=[]] except DoesNotExist as e: logging.warn(e) # depends on [control=['except'], data=['e']] # depends on [control=['with'], data=[]] logging.info('Plate {} deleted'.format(plate_id))
def _render_content(self, content, **settings): """ Perform widget rendering, but do not print anything. """ bar_len = int(settings[self.SETTING_BAR_WIDTH]) if not bar_len: bar_len = TERMINAL_WIDTH - 10 percent = content progress = "" progress += str(settings[self.SETTING_BAR_CHAR]) * int(bar_len * percent) s = {k: settings[k] for k in (self.SETTING_FLAG_PLAIN,)} s.update(settings[self.SETTING_BAR_FORMATING]) progress = self.fmt_text(progress, **s) progress += ' ' * int(bar_len - int(bar_len * percent)) return "{:6.2f}% [{:s}]".format(percent * 100, progress)
def function[_render_content, parameter[self, content]]: constant[ Perform widget rendering, but do not print anything. ] variable[bar_len] assign[=] call[name[int], parameter[call[name[settings]][name[self].SETTING_BAR_WIDTH]]] if <ast.UnaryOp object at 0x7da2049615a0> begin[:] variable[bar_len] assign[=] binary_operation[name[TERMINAL_WIDTH] - constant[10]] variable[percent] assign[=] name[content] variable[progress] assign[=] constant[] <ast.AugAssign object at 0x7da2049632e0> variable[s] assign[=] <ast.DictComp object at 0x7da1b2346950> call[name[s].update, parameter[call[name[settings]][name[self].SETTING_BAR_FORMATING]]] variable[progress] assign[=] call[name[self].fmt_text, parameter[name[progress]]] <ast.AugAssign object at 0x7da1b2345ea0> return[call[constant[{:6.2f}% [{:s}]].format, parameter[binary_operation[name[percent] * constant[100]], name[progress]]]]
keyword[def] identifier[_render_content] ( identifier[self] , identifier[content] ,** identifier[settings] ): literal[string] identifier[bar_len] = identifier[int] ( identifier[settings] [ identifier[self] . identifier[SETTING_BAR_WIDTH] ]) keyword[if] keyword[not] identifier[bar_len] : identifier[bar_len] = identifier[TERMINAL_WIDTH] - literal[int] identifier[percent] = identifier[content] identifier[progress] = literal[string] identifier[progress] += identifier[str] ( identifier[settings] [ identifier[self] . identifier[SETTING_BAR_CHAR] ])* identifier[int] ( identifier[bar_len] * identifier[percent] ) identifier[s] ={ identifier[k] : identifier[settings] [ identifier[k] ] keyword[for] identifier[k] keyword[in] ( identifier[self] . identifier[SETTING_FLAG_PLAIN] ,)} identifier[s] . identifier[update] ( identifier[settings] [ identifier[self] . identifier[SETTING_BAR_FORMATING] ]) identifier[progress] = identifier[self] . identifier[fmt_text] ( identifier[progress] ,** identifier[s] ) identifier[progress] += literal[string] * identifier[int] ( identifier[bar_len] - identifier[int] ( identifier[bar_len] * identifier[percent] )) keyword[return] literal[string] . identifier[format] ( identifier[percent] * literal[int] , identifier[progress] )
def _render_content(self, content, **settings): """ Perform widget rendering, but do not print anything. """ bar_len = int(settings[self.SETTING_BAR_WIDTH]) if not bar_len: bar_len = TERMINAL_WIDTH - 10 # depends on [control=['if'], data=[]] percent = content progress = '' progress += str(settings[self.SETTING_BAR_CHAR]) * int(bar_len * percent) s = {k: settings[k] for k in (self.SETTING_FLAG_PLAIN,)} s.update(settings[self.SETTING_BAR_FORMATING]) progress = self.fmt_text(progress, **s) progress += ' ' * int(bar_len - int(bar_len * percent)) return '{:6.2f}% [{:s}]'.format(percent * 100, progress)
def p_ty_funty_complex(self, p): "ty : '(' maybe_arg_types ')' ARROW ty" argument_types=p[2] return_type=p[5] # Check here whether too many kwarg or vararg types are present # Each item in the list uses the dictionary encoding of tagged variants arg_types = [argty['arg_type'] for argty in argument_types if 'arg_type' in argty] vararg_types = [argty['vararg_type'] for argty in argument_types if 'vararg_type' in argty] kwarg_types = [argty['kwarg_type'] for argty in argument_types if 'kwarg_type' in argty] if len(vararg_types) > 1: raise Exception('Argument list with multiple vararg types: %s' % argument_types) if len(kwarg_types) > 1: raise Exception('Argument list with multiple kwarg types: %s' % argument_types) # All the arguments that are not special p[0] = Function(arg_types=arg_types, vararg_type=vararg_types[0] if len(vararg_types) > 0 else None, kwarg_type=kwarg_types[0] if len(kwarg_types) > 0 else None, kwonly_arg_types=None, return_type=return_type)
def function[p_ty_funty_complex, parameter[self, p]]: constant[ty : '(' maybe_arg_types ')' ARROW ty] variable[argument_types] assign[=] call[name[p]][constant[2]] variable[return_type] assign[=] call[name[p]][constant[5]] variable[arg_types] assign[=] <ast.ListComp object at 0x7da18f00d000> variable[vararg_types] assign[=] <ast.ListComp object at 0x7da18f00c340> variable[kwarg_types] assign[=] <ast.ListComp object at 0x7da18f00fe50> if compare[call[name[len], parameter[name[vararg_types]]] greater[>] constant[1]] begin[:] <ast.Raise object at 0x7da1b23472e0> if compare[call[name[len], parameter[name[kwarg_types]]] greater[>] constant[1]] begin[:] <ast.Raise object at 0x7da1b0a4ad40> call[name[p]][constant[0]] assign[=] call[name[Function], parameter[]]
keyword[def] identifier[p_ty_funty_complex] ( identifier[self] , identifier[p] ): literal[string] identifier[argument_types] = identifier[p] [ literal[int] ] identifier[return_type] = identifier[p] [ literal[int] ] identifier[arg_types] =[ identifier[argty] [ literal[string] ] keyword[for] identifier[argty] keyword[in] identifier[argument_types] keyword[if] literal[string] keyword[in] identifier[argty] ] identifier[vararg_types] =[ identifier[argty] [ literal[string] ] keyword[for] identifier[argty] keyword[in] identifier[argument_types] keyword[if] literal[string] keyword[in] identifier[argty] ] identifier[kwarg_types] =[ identifier[argty] [ literal[string] ] keyword[for] identifier[argty] keyword[in] identifier[argument_types] keyword[if] literal[string] keyword[in] identifier[argty] ] keyword[if] identifier[len] ( identifier[vararg_types] )> literal[int] : keyword[raise] identifier[Exception] ( literal[string] % identifier[argument_types] ) keyword[if] identifier[len] ( identifier[kwarg_types] )> literal[int] : keyword[raise] identifier[Exception] ( literal[string] % identifier[argument_types] ) identifier[p] [ literal[int] ]= identifier[Function] ( identifier[arg_types] = identifier[arg_types] , identifier[vararg_type] = identifier[vararg_types] [ literal[int] ] keyword[if] identifier[len] ( identifier[vararg_types] )> literal[int] keyword[else] keyword[None] , identifier[kwarg_type] = identifier[kwarg_types] [ literal[int] ] keyword[if] identifier[len] ( identifier[kwarg_types] )> literal[int] keyword[else] keyword[None] , identifier[kwonly_arg_types] = keyword[None] , identifier[return_type] = identifier[return_type] )
def p_ty_funty_complex(self, p): """ty : '(' maybe_arg_types ')' ARROW ty""" argument_types = p[2] return_type = p[5] # Check here whether too many kwarg or vararg types are present # Each item in the list uses the dictionary encoding of tagged variants arg_types = [argty['arg_type'] for argty in argument_types if 'arg_type' in argty] vararg_types = [argty['vararg_type'] for argty in argument_types if 'vararg_type' in argty] kwarg_types = [argty['kwarg_type'] for argty in argument_types if 'kwarg_type' in argty] if len(vararg_types) > 1: raise Exception('Argument list with multiple vararg types: %s' % argument_types) # depends on [control=['if'], data=[]] if len(kwarg_types) > 1: raise Exception('Argument list with multiple kwarg types: %s' % argument_types) # depends on [control=['if'], data=[]] # All the arguments that are not special p[0] = Function(arg_types=arg_types, vararg_type=vararg_types[0] if len(vararg_types) > 0 else None, kwarg_type=kwarg_types[0] if len(kwarg_types) > 0 else None, kwonly_arg_types=None, return_type=return_type)
def image_list(self, lookup='all'): ''' Return a mapping of all image data for available providers ''' data = {} lookups = self.lookup_providers(lookup) if not lookups: return data for alias, driver in lookups: fun = '{0}.avail_images'.format(driver) if fun not in self.clouds: # The capability to gather images is not supported by this # cloud module log.debug( 'The \'%s\' cloud driver defined under \'%s\' provider ' 'alias is unable to get the images information', driver, alias ) continue if alias not in data: data[alias] = {} try: with salt.utils.context.func_globals_inject( self.clouds[fun], __active_provider_name__=':'.join([alias, driver]) ): data[alias][driver] = self.clouds[fun]() except Exception as err: log.error( 'Failed to get the output of \'%s()\': %s', fun, err, exc_info_on_loglevel=logging.DEBUG ) return data
def function[image_list, parameter[self, lookup]]: constant[ Return a mapping of all image data for available providers ] variable[data] assign[=] dictionary[[], []] variable[lookups] assign[=] call[name[self].lookup_providers, parameter[name[lookup]]] if <ast.UnaryOp object at 0x7da18f8105b0> begin[:] return[name[data]] for taget[tuple[[<ast.Name object at 0x7da18f813340>, <ast.Name object at 0x7da18f812ce0>]]] in starred[name[lookups]] begin[:] variable[fun] assign[=] call[constant[{0}.avail_images].format, parameter[name[driver]]] if compare[name[fun] <ast.NotIn object at 0x7da2590d7190> name[self].clouds] begin[:] call[name[log].debug, parameter[constant[The '%s' cloud driver defined under '%s' provider alias is unable to get the images information], name[driver], name[alias]]] continue if compare[name[alias] <ast.NotIn object at 0x7da2590d7190> name[data]] begin[:] call[name[data]][name[alias]] assign[=] dictionary[[], []] <ast.Try object at 0x7da1b2089210> return[name[data]]
keyword[def] identifier[image_list] ( identifier[self] , identifier[lookup] = literal[string] ): literal[string] identifier[data] ={} identifier[lookups] = identifier[self] . identifier[lookup_providers] ( identifier[lookup] ) keyword[if] keyword[not] identifier[lookups] : keyword[return] identifier[data] keyword[for] identifier[alias] , identifier[driver] keyword[in] identifier[lookups] : identifier[fun] = literal[string] . identifier[format] ( identifier[driver] ) keyword[if] identifier[fun] keyword[not] keyword[in] identifier[self] . identifier[clouds] : identifier[log] . identifier[debug] ( literal[string] literal[string] , identifier[driver] , identifier[alias] ) keyword[continue] keyword[if] identifier[alias] keyword[not] keyword[in] identifier[data] : identifier[data] [ identifier[alias] ]={} keyword[try] : keyword[with] identifier[salt] . identifier[utils] . identifier[context] . identifier[func_globals_inject] ( identifier[self] . identifier[clouds] [ identifier[fun] ], identifier[__active_provider_name__] = literal[string] . identifier[join] ([ identifier[alias] , identifier[driver] ]) ): identifier[data] [ identifier[alias] ][ identifier[driver] ]= identifier[self] . identifier[clouds] [ identifier[fun] ]() keyword[except] identifier[Exception] keyword[as] identifier[err] : identifier[log] . identifier[error] ( literal[string] , identifier[fun] , identifier[err] , identifier[exc_info_on_loglevel] = identifier[logging] . identifier[DEBUG] ) keyword[return] identifier[data]
def image_list(self, lookup='all'): """ Return a mapping of all image data for available providers """ data = {} lookups = self.lookup_providers(lookup) if not lookups: return data # depends on [control=['if'], data=[]] for (alias, driver) in lookups: fun = '{0}.avail_images'.format(driver) if fun not in self.clouds: # The capability to gather images is not supported by this # cloud module log.debug("The '%s' cloud driver defined under '%s' provider alias is unable to get the images information", driver, alias) continue # depends on [control=['if'], data=[]] if alias not in data: data[alias] = {} # depends on [control=['if'], data=['alias', 'data']] try: with salt.utils.context.func_globals_inject(self.clouds[fun], __active_provider_name__=':'.join([alias, driver])): data[alias][driver] = self.clouds[fun]() # depends on [control=['with'], data=[]] # depends on [control=['try'], data=[]] except Exception as err: log.error("Failed to get the output of '%s()': %s", fun, err, exc_info_on_loglevel=logging.DEBUG) # depends on [control=['except'], data=['err']] # depends on [control=['for'], data=[]] return data
def verify(self, keys=None): """ Verify that the assertion is syntactically correct and the signature is correct if present. :param keys: If not the default key file should be used then use one of these. """ try: res = self._verify() except AssertionError as err: logger.error("Verification error on the response: %s", err) raise else: if res is None: return None if not isinstance(self.response, samlp.Response): return self if self.parse_assertion(keys): return self else: logger.error("Could not parse the assertion") return None
def function[verify, parameter[self, keys]]: constant[ Verify that the assertion is syntactically correct and the signature is correct if present. :param keys: If not the default key file should be used then use one of these. ] <ast.Try object at 0x7da18f811c30> if <ast.UnaryOp object at 0x7da18f8114e0> begin[:] return[name[self]] if call[name[self].parse_assertion, parameter[name[keys]]] begin[:] return[name[self]]
keyword[def] identifier[verify] ( identifier[self] , identifier[keys] = keyword[None] ): literal[string] keyword[try] : identifier[res] = identifier[self] . identifier[_verify] () keyword[except] identifier[AssertionError] keyword[as] identifier[err] : identifier[logger] . identifier[error] ( literal[string] , identifier[err] ) keyword[raise] keyword[else] : keyword[if] identifier[res] keyword[is] keyword[None] : keyword[return] keyword[None] keyword[if] keyword[not] identifier[isinstance] ( identifier[self] . identifier[response] , identifier[samlp] . identifier[Response] ): keyword[return] identifier[self] keyword[if] identifier[self] . identifier[parse_assertion] ( identifier[keys] ): keyword[return] identifier[self] keyword[else] : identifier[logger] . identifier[error] ( literal[string] ) keyword[return] keyword[None]
def verify(self, keys=None): """ Verify that the assertion is syntactically correct and the signature is correct if present. :param keys: If not the default key file should be used then use one of these. """ try: res = self._verify() # depends on [control=['try'], data=[]] except AssertionError as err: logger.error('Verification error on the response: %s', err) raise # depends on [control=['except'], data=['err']] else: if res is None: return None # depends on [control=['if'], data=[]] if not isinstance(self.response, samlp.Response): return self # depends on [control=['if'], data=[]] if self.parse_assertion(keys): return self # depends on [control=['if'], data=[]] else: logger.error('Could not parse the assertion') return None
def structure_analysis_summary_report(feature, parent): """Retrieve an HTML structure analysis table report from a multi exposure analysis. """ _ = feature, parent # NOQA analysis_dir = get_analysis_dir(exposure_structure['key']) if analysis_dir: return get_impact_report_as_string(analysis_dir) return None
def function[structure_analysis_summary_report, parameter[feature, parent]]: constant[Retrieve an HTML structure analysis table report from a multi exposure analysis. ] variable[_] assign[=] tuple[[<ast.Name object at 0x7da1b0c53d00>, <ast.Name object at 0x7da1b0c50280>]] variable[analysis_dir] assign[=] call[name[get_analysis_dir], parameter[call[name[exposure_structure]][constant[key]]]] if name[analysis_dir] begin[:] return[call[name[get_impact_report_as_string], parameter[name[analysis_dir]]]] return[constant[None]]
keyword[def] identifier[structure_analysis_summary_report] ( identifier[feature] , identifier[parent] ): literal[string] identifier[_] = identifier[feature] , identifier[parent] identifier[analysis_dir] = identifier[get_analysis_dir] ( identifier[exposure_structure] [ literal[string] ]) keyword[if] identifier[analysis_dir] : keyword[return] identifier[get_impact_report_as_string] ( identifier[analysis_dir] ) keyword[return] keyword[None]
def structure_analysis_summary_report(feature, parent): """Retrieve an HTML structure analysis table report from a multi exposure analysis. """ _ = (feature, parent) # NOQA analysis_dir = get_analysis_dir(exposure_structure['key']) if analysis_dir: return get_impact_report_as_string(analysis_dir) # depends on [control=['if'], data=[]] return None
def consecutive(iterable, n): """ consecutive('ABCDEF', 3) --> ABC BCD CDE DEF consecutive(itertools.cycle(iter), n) to get looped sequence """ iterators = itertools.tee(iterable, n) for i, it in enumerate(iterators): for _ in range(i): next(it, None) return zip(*iterators)
def function[consecutive, parameter[iterable, n]]: constant[ consecutive('ABCDEF', 3) --> ABC BCD CDE DEF consecutive(itertools.cycle(iter), n) to get looped sequence ] variable[iterators] assign[=] call[name[itertools].tee, parameter[name[iterable], name[n]]] for taget[tuple[[<ast.Name object at 0x7da1b2390cd0>, <ast.Name object at 0x7da1b23904c0>]]] in starred[call[name[enumerate], parameter[name[iterators]]]] begin[:] for taget[name[_]] in starred[call[name[range], parameter[name[i]]]] begin[:] call[name[next], parameter[name[it], constant[None]]] return[call[name[zip], parameter[<ast.Starred object at 0x7da1b24ae860>]]]
keyword[def] identifier[consecutive] ( identifier[iterable] , identifier[n] ): literal[string] identifier[iterators] = identifier[itertools] . identifier[tee] ( identifier[iterable] , identifier[n] ) keyword[for] identifier[i] , identifier[it] keyword[in] identifier[enumerate] ( identifier[iterators] ): keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[i] ): identifier[next] ( identifier[it] , keyword[None] ) keyword[return] identifier[zip] (* identifier[iterators] )
def consecutive(iterable, n): """ consecutive('ABCDEF', 3) --> ABC BCD CDE DEF consecutive(itertools.cycle(iter), n) to get looped sequence """ iterators = itertools.tee(iterable, n) for (i, it) in enumerate(iterators): for _ in range(i): next(it, None) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] return zip(*iterators)
def load_from_file_like(cls, flo, format=None): """ Load the object to a given file like object with the given protocol. """ format = self.format if format is None else format load = getattr(cls, "load_%s" % format, None) if load is None: raise ValueError("Unknown format '%s'." % format) return load(flo)
def function[load_from_file_like, parameter[cls, flo, format]]: constant[ Load the object to a given file like object with the given protocol. ] variable[format] assign[=] <ast.IfExp object at 0x7da18f812260> variable[load] assign[=] call[name[getattr], parameter[name[cls], binary_operation[constant[load_%s] <ast.Mod object at 0x7da2590d6920> name[format]], constant[None]]] if compare[name[load] is constant[None]] begin[:] <ast.Raise object at 0x7da18f811f30> return[call[name[load], parameter[name[flo]]]]
keyword[def] identifier[load_from_file_like] ( identifier[cls] , identifier[flo] , identifier[format] = keyword[None] ): literal[string] identifier[format] = identifier[self] . identifier[format] keyword[if] identifier[format] keyword[is] keyword[None] keyword[else] identifier[format] identifier[load] = identifier[getattr] ( identifier[cls] , literal[string] % identifier[format] , keyword[None] ) keyword[if] identifier[load] keyword[is] keyword[None] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[format] ) keyword[return] identifier[load] ( identifier[flo] )
def load_from_file_like(cls, flo, format=None): """ Load the object to a given file like object with the given protocol. """ format = self.format if format is None else format load = getattr(cls, 'load_%s' % format, None) if load is None: raise ValueError("Unknown format '%s'." % format) # depends on [control=['if'], data=[]] return load(flo)
def convert_command_output(*command): """ Command line interface for ``coloredlogs --to-html``. Takes a command (and its arguments) and runs the program under ``script`` (emulating an interactive terminal), intercepts the output of the command and converts ANSI escape sequences in the output to HTML. """ captured_output = capture(command) converted_output = convert(captured_output) if connected_to_terminal(): fd, temporary_file = tempfile.mkstemp(suffix='.html') with open(temporary_file, 'w') as handle: handle.write(converted_output) webbrowser.open(temporary_file) elif captured_output and not captured_output.isspace(): output(converted_output)
def function[convert_command_output, parameter[]]: constant[ Command line interface for ``coloredlogs --to-html``. Takes a command (and its arguments) and runs the program under ``script`` (emulating an interactive terminal), intercepts the output of the command and converts ANSI escape sequences in the output to HTML. ] variable[captured_output] assign[=] call[name[capture], parameter[name[command]]] variable[converted_output] assign[=] call[name[convert], parameter[name[captured_output]]] if call[name[connected_to_terminal], parameter[]] begin[:] <ast.Tuple object at 0x7da1b0749990> assign[=] call[name[tempfile].mkstemp, parameter[]] with call[name[open], parameter[name[temporary_file], constant[w]]] begin[:] call[name[handle].write, parameter[name[converted_output]]] call[name[webbrowser].open, parameter[name[temporary_file]]]
keyword[def] identifier[convert_command_output] (* identifier[command] ): literal[string] identifier[captured_output] = identifier[capture] ( identifier[command] ) identifier[converted_output] = identifier[convert] ( identifier[captured_output] ) keyword[if] identifier[connected_to_terminal] (): identifier[fd] , identifier[temporary_file] = identifier[tempfile] . identifier[mkstemp] ( identifier[suffix] = literal[string] ) keyword[with] identifier[open] ( identifier[temporary_file] , literal[string] ) keyword[as] identifier[handle] : identifier[handle] . identifier[write] ( identifier[converted_output] ) identifier[webbrowser] . identifier[open] ( identifier[temporary_file] ) keyword[elif] identifier[captured_output] keyword[and] keyword[not] identifier[captured_output] . identifier[isspace] (): identifier[output] ( identifier[converted_output] )
def convert_command_output(*command): """ Command line interface for ``coloredlogs --to-html``. Takes a command (and its arguments) and runs the program under ``script`` (emulating an interactive terminal), intercepts the output of the command and converts ANSI escape sequences in the output to HTML. """ captured_output = capture(command) converted_output = convert(captured_output) if connected_to_terminal(): (fd, temporary_file) = tempfile.mkstemp(suffix='.html') with open(temporary_file, 'w') as handle: handle.write(converted_output) # depends on [control=['with'], data=['handle']] webbrowser.open(temporary_file) # depends on [control=['if'], data=[]] elif captured_output and (not captured_output.isspace()): output(converted_output) # depends on [control=['if'], data=[]]
def collect_columns(self): """ Collect columns information from a given model. a column info contains the py3 informations exclude Should the column be excluded from the current context ? name the name of the key in the resulting py3o context of the column __col__ The original column object __prop__ In case of a relationship, the SqlaContext wrapping the given object """ res = [] for prop in self.get_sorted_columns(): info_dict = self.get_info_field(prop) export_infos = info_dict.get('export', {}).copy() main_infos = export_infos.get(self.config_key, {}).copy() if export_infos.get('exclude'): if main_infos.get('exclude', True): continue infos = export_infos infos.update(main_infos) # Si la clé name n'est pas définit on la met au nom de la colonne # par défaut infos.setdefault('name', prop.key) infos['__col__'] = prop if isinstance(prop, RelationshipProperty): join = str(prop.primaryjoin) if join in self.rels: continue else: self.rels.append(str(join)) infos['__prop__'] = SqlaContext( prop.mapper, rels=self.rels[:] ) res.append(infos) return res
def function[collect_columns, parameter[self]]: constant[ Collect columns information from a given model. a column info contains the py3 informations exclude Should the column be excluded from the current context ? name the name of the key in the resulting py3o context of the column __col__ The original column object __prop__ In case of a relationship, the SqlaContext wrapping the given object ] variable[res] assign[=] list[[]] for taget[name[prop]] in starred[call[name[self].get_sorted_columns, parameter[]]] begin[:] variable[info_dict] assign[=] call[name[self].get_info_field, parameter[name[prop]]] variable[export_infos] assign[=] call[call[name[info_dict].get, parameter[constant[export], dictionary[[], []]]].copy, parameter[]] variable[main_infos] assign[=] call[call[name[export_infos].get, parameter[name[self].config_key, dictionary[[], []]]].copy, parameter[]] if call[name[export_infos].get, parameter[constant[exclude]]] begin[:] if call[name[main_infos].get, parameter[constant[exclude], constant[True]]] begin[:] continue variable[infos] assign[=] name[export_infos] call[name[infos].update, parameter[name[main_infos]]] call[name[infos].setdefault, parameter[constant[name], name[prop].key]] call[name[infos]][constant[__col__]] assign[=] name[prop] if call[name[isinstance], parameter[name[prop], name[RelationshipProperty]]] begin[:] variable[join] assign[=] call[name[str], parameter[name[prop].primaryjoin]] if compare[name[join] in name[self].rels] begin[:] continue call[name[res].append, parameter[name[infos]]] return[name[res]]
keyword[def] identifier[collect_columns] ( identifier[self] ): literal[string] identifier[res] =[] keyword[for] identifier[prop] keyword[in] identifier[self] . identifier[get_sorted_columns] (): identifier[info_dict] = identifier[self] . identifier[get_info_field] ( identifier[prop] ) identifier[export_infos] = identifier[info_dict] . identifier[get] ( literal[string] ,{}). identifier[copy] () identifier[main_infos] = identifier[export_infos] . identifier[get] ( identifier[self] . identifier[config_key] ,{}). identifier[copy] () keyword[if] identifier[export_infos] . identifier[get] ( literal[string] ): keyword[if] identifier[main_infos] . identifier[get] ( literal[string] , keyword[True] ): keyword[continue] identifier[infos] = identifier[export_infos] identifier[infos] . identifier[update] ( identifier[main_infos] ) identifier[infos] . identifier[setdefault] ( literal[string] , identifier[prop] . identifier[key] ) identifier[infos] [ literal[string] ]= identifier[prop] keyword[if] identifier[isinstance] ( identifier[prop] , identifier[RelationshipProperty] ): identifier[join] = identifier[str] ( identifier[prop] . identifier[primaryjoin] ) keyword[if] identifier[join] keyword[in] identifier[self] . identifier[rels] : keyword[continue] keyword[else] : identifier[self] . identifier[rels] . identifier[append] ( identifier[str] ( identifier[join] )) identifier[infos] [ literal[string] ]= identifier[SqlaContext] ( identifier[prop] . identifier[mapper] , identifier[rels] = identifier[self] . identifier[rels] [:] ) identifier[res] . identifier[append] ( identifier[infos] ) keyword[return] identifier[res]
def collect_columns(self): """ Collect columns information from a given model. a column info contains the py3 informations exclude Should the column be excluded from the current context ? name the name of the key in the resulting py3o context of the column __col__ The original column object __prop__ In case of a relationship, the SqlaContext wrapping the given object """ res = [] for prop in self.get_sorted_columns(): info_dict = self.get_info_field(prop) export_infos = info_dict.get('export', {}).copy() main_infos = export_infos.get(self.config_key, {}).copy() if export_infos.get('exclude'): if main_infos.get('exclude', True): continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] infos = export_infos infos.update(main_infos) # Si la clé name n'est pas définit on la met au nom de la colonne # par défaut infos.setdefault('name', prop.key) infos['__col__'] = prop if isinstance(prop, RelationshipProperty): join = str(prop.primaryjoin) if join in self.rels: continue # depends on [control=['if'], data=[]] else: self.rels.append(str(join)) infos['__prop__'] = SqlaContext(prop.mapper, rels=self.rels[:]) # depends on [control=['if'], data=[]] res.append(infos) # depends on [control=['for'], data=['prop']] return res
def load_db(self): """Load the taxonomy into a sqlite3 database. This will set ``self.db`` to a sqlite3 database which contains all of the taxonomic information in the reference package. """ db = taxdb.Taxdb() db.create_tables() reader = csv.DictReader(self.open_resource('taxonomy', 'rU')) db.insert_from_taxtable(lambda: reader._fieldnames, reader) curs = db.cursor() reader = csv.DictReader(self.open_resource('seq_info', 'rU')) curs.executemany("INSERT INTO sequences VALUES (?, ?)", ((row['seqname'], row['tax_id']) for row in reader)) db.commit() self.db = db
def function[load_db, parameter[self]]: constant[Load the taxonomy into a sqlite3 database. This will set ``self.db`` to a sqlite3 database which contains all of the taxonomic information in the reference package. ] variable[db] assign[=] call[name[taxdb].Taxdb, parameter[]] call[name[db].create_tables, parameter[]] variable[reader] assign[=] call[name[csv].DictReader, parameter[call[name[self].open_resource, parameter[constant[taxonomy], constant[rU]]]]] call[name[db].insert_from_taxtable, parameter[<ast.Lambda object at 0x7da1b1a8c040>, name[reader]]] variable[curs] assign[=] call[name[db].cursor, parameter[]] variable[reader] assign[=] call[name[csv].DictReader, parameter[call[name[self].open_resource, parameter[constant[seq_info], constant[rU]]]]] call[name[curs].executemany, parameter[constant[INSERT INTO sequences VALUES (?, ?)], <ast.GeneratorExp object at 0x7da1b1a8ce20>]] call[name[db].commit, parameter[]] name[self].db assign[=] name[db]
keyword[def] identifier[load_db] ( identifier[self] ): literal[string] identifier[db] = identifier[taxdb] . identifier[Taxdb] () identifier[db] . identifier[create_tables] () identifier[reader] = identifier[csv] . identifier[DictReader] ( identifier[self] . identifier[open_resource] ( literal[string] , literal[string] )) identifier[db] . identifier[insert_from_taxtable] ( keyword[lambda] : identifier[reader] . identifier[_fieldnames] , identifier[reader] ) identifier[curs] = identifier[db] . identifier[cursor] () identifier[reader] = identifier[csv] . identifier[DictReader] ( identifier[self] . identifier[open_resource] ( literal[string] , literal[string] )) identifier[curs] . identifier[executemany] ( literal[string] , (( identifier[row] [ literal[string] ], identifier[row] [ literal[string] ]) keyword[for] identifier[row] keyword[in] identifier[reader] )) identifier[db] . identifier[commit] () identifier[self] . identifier[db] = identifier[db]
def load_db(self): """Load the taxonomy into a sqlite3 database. This will set ``self.db`` to a sqlite3 database which contains all of the taxonomic information in the reference package. """ db = taxdb.Taxdb() db.create_tables() reader = csv.DictReader(self.open_resource('taxonomy', 'rU')) db.insert_from_taxtable(lambda : reader._fieldnames, reader) curs = db.cursor() reader = csv.DictReader(self.open_resource('seq_info', 'rU')) curs.executemany('INSERT INTO sequences VALUES (?, ?)', ((row['seqname'], row['tax_id']) for row in reader)) db.commit() self.db = db
def is_address_reserved(self, address): """ Determines if an address belongs to a reserved page. @note: Returns always C{False} for kernel mode addresses. @type address: int @param address: Memory address to query. @rtype: bool @return: C{True} if the address belongs to a reserved page. @raise WindowsError: An exception is raised on error. """ try: mbi = self.mquery(address) except WindowsError: e = sys.exc_info()[1] if e.winerror == win32.ERROR_INVALID_PARAMETER: return False raise return mbi.is_reserved()
def function[is_address_reserved, parameter[self, address]]: constant[ Determines if an address belongs to a reserved page. @note: Returns always C{False} for kernel mode addresses. @type address: int @param address: Memory address to query. @rtype: bool @return: C{True} if the address belongs to a reserved page. @raise WindowsError: An exception is raised on error. ] <ast.Try object at 0x7da204623a00> return[call[name[mbi].is_reserved, parameter[]]]
keyword[def] identifier[is_address_reserved] ( identifier[self] , identifier[address] ): literal[string] keyword[try] : identifier[mbi] = identifier[self] . identifier[mquery] ( identifier[address] ) keyword[except] identifier[WindowsError] : identifier[e] = identifier[sys] . identifier[exc_info] ()[ literal[int] ] keyword[if] identifier[e] . identifier[winerror] == identifier[win32] . identifier[ERROR_INVALID_PARAMETER] : keyword[return] keyword[False] keyword[raise] keyword[return] identifier[mbi] . identifier[is_reserved] ()
def is_address_reserved(self, address): """ Determines if an address belongs to a reserved page. @note: Returns always C{False} for kernel mode addresses. @type address: int @param address: Memory address to query. @rtype: bool @return: C{True} if the address belongs to a reserved page. @raise WindowsError: An exception is raised on error. """ try: mbi = self.mquery(address) # depends on [control=['try'], data=[]] except WindowsError: e = sys.exc_info()[1] if e.winerror == win32.ERROR_INVALID_PARAMETER: return False # depends on [control=['if'], data=[]] raise # depends on [control=['except'], data=[]] return mbi.is_reserved()
def per_base(x, windows, is_accessible=None, fill=np.nan): """Calculate the per-base value of a windowed statistic. Parameters ---------- x : array_like, shape (n_windows,) The statistic to average per-base. windows : array_like, int, shape (n_windows, 2) The windows used, as an array of (window_start, window_stop) positions using 1-based coordinates. is_accessible : array_like, bool, shape (len(contig),), optional Boolean array indicating accessibility status for all positions in the chromosome/contig. fill : object, optional Use this value where there are no accessible bases in a window. Returns ------- y : ndarray, float, shape (n_windows,) The input array divided by the number of (accessible) bases in each window. n_bases : ndarray, int, shape (n_windows,) The number of (accessible) bases in each window """ # calculate window sizes if is_accessible is None: # N.B., window stops are included n_bases = np.diff(windows, axis=1).reshape(-1) + 1 else: n_bases = np.array([np.count_nonzero(is_accessible[i-1:j]) for i, j in windows]) # deal with multidimensional x if x.ndim == 1: pass elif x.ndim == 2: n_bases = n_bases[:, None] else: raise NotImplementedError('only arrays of 1 or 2 dimensions supported') # calculate density per-base with ignore_invalid(): y = np.where(n_bases > 0, x / n_bases, fill) # restore to 1-dimensional if n_bases.ndim > 1: n_bases = n_bases.reshape(-1) return y, n_bases
def function[per_base, parameter[x, windows, is_accessible, fill]]: constant[Calculate the per-base value of a windowed statistic. Parameters ---------- x : array_like, shape (n_windows,) The statistic to average per-base. windows : array_like, int, shape (n_windows, 2) The windows used, as an array of (window_start, window_stop) positions using 1-based coordinates. is_accessible : array_like, bool, shape (len(contig),), optional Boolean array indicating accessibility status for all positions in the chromosome/contig. fill : object, optional Use this value where there are no accessible bases in a window. Returns ------- y : ndarray, float, shape (n_windows,) The input array divided by the number of (accessible) bases in each window. n_bases : ndarray, int, shape (n_windows,) The number of (accessible) bases in each window ] if compare[name[is_accessible] is constant[None]] begin[:] variable[n_bases] assign[=] binary_operation[call[call[name[np].diff, parameter[name[windows]]].reshape, parameter[<ast.UnaryOp object at 0x7da20c6c5d80>]] + constant[1]] if compare[name[x].ndim equal[==] constant[1]] begin[:] pass with call[name[ignore_invalid], parameter[]] begin[:] variable[y] assign[=] call[name[np].where, parameter[compare[name[n_bases] greater[>] constant[0]], binary_operation[name[x] / name[n_bases]], name[fill]]] if compare[name[n_bases].ndim greater[>] constant[1]] begin[:] variable[n_bases] assign[=] call[name[n_bases].reshape, parameter[<ast.UnaryOp object at 0x7da20e955330>]] return[tuple[[<ast.Name object at 0x7da18eb55780>, <ast.Name object at 0x7da18eb54280>]]]
keyword[def] identifier[per_base] ( identifier[x] , identifier[windows] , identifier[is_accessible] = keyword[None] , identifier[fill] = identifier[np] . identifier[nan] ): literal[string] keyword[if] identifier[is_accessible] keyword[is] keyword[None] : identifier[n_bases] = identifier[np] . identifier[diff] ( identifier[windows] , identifier[axis] = literal[int] ). identifier[reshape] (- literal[int] )+ literal[int] keyword[else] : identifier[n_bases] = identifier[np] . identifier[array] ([ identifier[np] . identifier[count_nonzero] ( identifier[is_accessible] [ identifier[i] - literal[int] : identifier[j] ]) keyword[for] identifier[i] , identifier[j] keyword[in] identifier[windows] ]) keyword[if] identifier[x] . identifier[ndim] == literal[int] : keyword[pass] keyword[elif] identifier[x] . identifier[ndim] == literal[int] : identifier[n_bases] = identifier[n_bases] [:, keyword[None] ] keyword[else] : keyword[raise] identifier[NotImplementedError] ( literal[string] ) keyword[with] identifier[ignore_invalid] (): identifier[y] = identifier[np] . identifier[where] ( identifier[n_bases] > literal[int] , identifier[x] / identifier[n_bases] , identifier[fill] ) keyword[if] identifier[n_bases] . identifier[ndim] > literal[int] : identifier[n_bases] = identifier[n_bases] . identifier[reshape] (- literal[int] ) keyword[return] identifier[y] , identifier[n_bases]
def per_base(x, windows, is_accessible=None, fill=np.nan): """Calculate the per-base value of a windowed statistic. Parameters ---------- x : array_like, shape (n_windows,) The statistic to average per-base. windows : array_like, int, shape (n_windows, 2) The windows used, as an array of (window_start, window_stop) positions using 1-based coordinates. is_accessible : array_like, bool, shape (len(contig),), optional Boolean array indicating accessibility status for all positions in the chromosome/contig. fill : object, optional Use this value where there are no accessible bases in a window. Returns ------- y : ndarray, float, shape (n_windows,) The input array divided by the number of (accessible) bases in each window. n_bases : ndarray, int, shape (n_windows,) The number of (accessible) bases in each window """ # calculate window sizes if is_accessible is None: # N.B., window stops are included n_bases = np.diff(windows, axis=1).reshape(-1) + 1 # depends on [control=['if'], data=[]] else: n_bases = np.array([np.count_nonzero(is_accessible[i - 1:j]) for (i, j) in windows]) # deal with multidimensional x if x.ndim == 1: pass # depends on [control=['if'], data=[]] elif x.ndim == 2: n_bases = n_bases[:, None] # depends on [control=['if'], data=[]] else: raise NotImplementedError('only arrays of 1 or 2 dimensions supported') # calculate density per-base with ignore_invalid(): y = np.where(n_bases > 0, x / n_bases, fill) # depends on [control=['with'], data=[]] # restore to 1-dimensional if n_bases.ndim > 1: n_bases = n_bases.reshape(-1) # depends on [control=['if'], data=[]] return (y, n_bases)
def default_links_factory(pid, record=None, **kwargs): """Factory for record links generation. :param pid: A Persistent Identifier instance. :returns: Dictionary containing a list of useful links for the record. """ endpoint = '.{0}_item'.format( current_records_rest.default_endpoint_prefixes[pid.pid_type]) links = dict(self=url_for(endpoint, pid_value=pid.pid_value, _external=True)) return links
def function[default_links_factory, parameter[pid, record]]: constant[Factory for record links generation. :param pid: A Persistent Identifier instance. :returns: Dictionary containing a list of useful links for the record. ] variable[endpoint] assign[=] call[constant[.{0}_item].format, parameter[call[name[current_records_rest].default_endpoint_prefixes][name[pid].pid_type]]] variable[links] assign[=] call[name[dict], parameter[]] return[name[links]]
keyword[def] identifier[default_links_factory] ( identifier[pid] , identifier[record] = keyword[None] ,** identifier[kwargs] ): literal[string] identifier[endpoint] = literal[string] . identifier[format] ( identifier[current_records_rest] . identifier[default_endpoint_prefixes] [ identifier[pid] . identifier[pid_type] ]) identifier[links] = identifier[dict] ( identifier[self] = identifier[url_for] ( identifier[endpoint] , identifier[pid_value] = identifier[pid] . identifier[pid_value] , identifier[_external] = keyword[True] )) keyword[return] identifier[links]
def default_links_factory(pid, record=None, **kwargs): """Factory for record links generation. :param pid: A Persistent Identifier instance. :returns: Dictionary containing a list of useful links for the record. """ endpoint = '.{0}_item'.format(current_records_rest.default_endpoint_prefixes[pid.pid_type]) links = dict(self=url_for(endpoint, pid_value=pid.pid_value, _external=True)) return links
def get_symbol_train(network, num_classes, from_layers, num_filters, strides, pads, sizes, ratios, normalizations=-1, steps=[], min_filter=128, nms_thresh=0.5, force_suppress=False, nms_topk=400, **kwargs): """Build network symbol for training SSD Parameters ---------- network : str base network symbol name num_classes : int number of object classes not including background from_layers : list of str feature extraction layers, use '' for add extra layers For example: from_layers = ['relu4_3', 'fc7', '', '', '', ''] which means extract feature from relu4_3 and fc7, adding 4 extra layers on top of fc7 num_filters : list of int number of filters for extra layers, you can use -1 for extracted features, however, if normalization and scale is applied, the number of filter for that layer must be provided. For example: num_filters = [512, -1, 512, 256, 256, 256] strides : list of int strides for the 3x3 convolution appended, -1 can be used for extracted feature layers pads : list of int paddings for the 3x3 convolution, -1 can be used for extracted layers sizes : list or list of list [min_size, max_size] for all layers or [[], [], []...] for specific layers ratios : list or list of list [ratio1, ratio2...] for all layers or [[], [], ...] for specific layers normalizations : int or list of int use normalizations value for all layers or [...] for specific layers, -1 indicate no normalizations and scales steps : list specify steps for each MultiBoxPrior layer, leave empty, it will calculate according to layer dimensions min_filter : int minimum number of filters used in 1x1 convolution nms_thresh : float non-maximum suppression threshold force_suppress : boolean whether suppress different class objects nms_topk : int apply NMS to top K detections Returns ------- mx.Symbol """ label = mx.sym.Variable('label') body = import_module(network).get_symbol(num_classes, **kwargs) layers = multi_layer_feature(body, from_layers, num_filters, strides, pads, min_filter=min_filter) loc_preds, cls_preds, anchor_boxes = multibox_layer(layers, \ num_classes, sizes=sizes, ratios=ratios, normalization=normalizations, \ num_channels=num_filters, clip=False, interm_layer=0, steps=steps) tmp = mx.symbol.contrib.MultiBoxTarget( *[anchor_boxes, label, cls_preds], overlap_threshold=.5, \ ignore_label=-1, negative_mining_ratio=3, minimum_negative_samples=0, \ negative_mining_thresh=.5, variances=(0.1, 0.1, 0.2, 0.2), name="multibox_target") loc_target = tmp[0] loc_target_mask = tmp[1] cls_target = tmp[2] cls_prob = mx.symbol.SoftmaxOutput(data=cls_preds, label=cls_target, \ ignore_label=-1, use_ignore=True, grad_scale=1., multi_output=True, \ normalization='valid', name="cls_prob") loc_loss_ = mx.symbol.smooth_l1(name="loc_loss_", \ data=loc_target_mask * (loc_preds - loc_target), scalar=1.0) loc_loss = mx.symbol.MakeLoss(loc_loss_, grad_scale=1., \ normalization='valid', name="loc_loss") # monitoring training status cls_label = mx.symbol.MakeLoss(data=cls_target, grad_scale=0, name="cls_label") det = mx.symbol.contrib.MultiBoxDetection(*[cls_prob, loc_preds, anchor_boxes], \ name="detection", nms_threshold=nms_thresh, force_suppress=force_suppress, variances=(0.1, 0.1, 0.2, 0.2), nms_topk=nms_topk) det = mx.symbol.MakeLoss(data=det, grad_scale=0, name="det_out") # group output out = mx.symbol.Group([cls_prob, loc_loss, cls_label, det]) return out
def function[get_symbol_train, parameter[network, num_classes, from_layers, num_filters, strides, pads, sizes, ratios, normalizations, steps, min_filter, nms_thresh, force_suppress, nms_topk]]: constant[Build network symbol for training SSD Parameters ---------- network : str base network symbol name num_classes : int number of object classes not including background from_layers : list of str feature extraction layers, use '' for add extra layers For example: from_layers = ['relu4_3', 'fc7', '', '', '', ''] which means extract feature from relu4_3 and fc7, adding 4 extra layers on top of fc7 num_filters : list of int number of filters for extra layers, you can use -1 for extracted features, however, if normalization and scale is applied, the number of filter for that layer must be provided. For example: num_filters = [512, -1, 512, 256, 256, 256] strides : list of int strides for the 3x3 convolution appended, -1 can be used for extracted feature layers pads : list of int paddings for the 3x3 convolution, -1 can be used for extracted layers sizes : list or list of list [min_size, max_size] for all layers or [[], [], []...] for specific layers ratios : list or list of list [ratio1, ratio2...] for all layers or [[], [], ...] for specific layers normalizations : int or list of int use normalizations value for all layers or [...] for specific layers, -1 indicate no normalizations and scales steps : list specify steps for each MultiBoxPrior layer, leave empty, it will calculate according to layer dimensions min_filter : int minimum number of filters used in 1x1 convolution nms_thresh : float non-maximum suppression threshold force_suppress : boolean whether suppress different class objects nms_topk : int apply NMS to top K detections Returns ------- mx.Symbol ] variable[label] assign[=] call[name[mx].sym.Variable, parameter[constant[label]]] variable[body] assign[=] call[call[name[import_module], parameter[name[network]]].get_symbol, parameter[name[num_classes]]] variable[layers] assign[=] call[name[multi_layer_feature], parameter[name[body], name[from_layers], name[num_filters], name[strides], name[pads]]] <ast.Tuple object at 0x7da1b20fa680> assign[=] call[name[multibox_layer], parameter[name[layers], name[num_classes]]] variable[tmp] assign[=] call[name[mx].symbol.contrib.MultiBoxTarget, parameter[<ast.Starred object at 0x7da1b200d720>]] variable[loc_target] assign[=] call[name[tmp]][constant[0]] variable[loc_target_mask] assign[=] call[name[tmp]][constant[1]] variable[cls_target] assign[=] call[name[tmp]][constant[2]] variable[cls_prob] assign[=] call[name[mx].symbol.SoftmaxOutput, parameter[]] variable[loc_loss_] assign[=] call[name[mx].symbol.smooth_l1, parameter[]] variable[loc_loss] assign[=] call[name[mx].symbol.MakeLoss, parameter[name[loc_loss_]]] variable[cls_label] assign[=] call[name[mx].symbol.MakeLoss, parameter[]] variable[det] assign[=] call[name[mx].symbol.contrib.MultiBoxDetection, parameter[<ast.Starred object at 0x7da1b2064b80>]] variable[det] assign[=] call[name[mx].symbol.MakeLoss, parameter[]] variable[out] assign[=] call[name[mx].symbol.Group, parameter[list[[<ast.Name object at 0x7da1b2066e00>, <ast.Name object at 0x7da1b2065cc0>, <ast.Name object at 0x7da1b2065360>, <ast.Name object at 0x7da1b2066650>]]]] return[name[out]]
keyword[def] identifier[get_symbol_train] ( identifier[network] , identifier[num_classes] , identifier[from_layers] , identifier[num_filters] , identifier[strides] , identifier[pads] , identifier[sizes] , identifier[ratios] , identifier[normalizations] =- literal[int] , identifier[steps] =[], identifier[min_filter] = literal[int] , identifier[nms_thresh] = literal[int] , identifier[force_suppress] = keyword[False] , identifier[nms_topk] = literal[int] ,** identifier[kwargs] ): literal[string] identifier[label] = identifier[mx] . identifier[sym] . identifier[Variable] ( literal[string] ) identifier[body] = identifier[import_module] ( identifier[network] ). identifier[get_symbol] ( identifier[num_classes] ,** identifier[kwargs] ) identifier[layers] = identifier[multi_layer_feature] ( identifier[body] , identifier[from_layers] , identifier[num_filters] , identifier[strides] , identifier[pads] , identifier[min_filter] = identifier[min_filter] ) identifier[loc_preds] , identifier[cls_preds] , identifier[anchor_boxes] = identifier[multibox_layer] ( identifier[layers] , identifier[num_classes] , identifier[sizes] = identifier[sizes] , identifier[ratios] = identifier[ratios] , identifier[normalization] = identifier[normalizations] , identifier[num_channels] = identifier[num_filters] , identifier[clip] = keyword[False] , identifier[interm_layer] = literal[int] , identifier[steps] = identifier[steps] ) identifier[tmp] = identifier[mx] . identifier[symbol] . identifier[contrib] . identifier[MultiBoxTarget] ( *[ identifier[anchor_boxes] , identifier[label] , identifier[cls_preds] ], identifier[overlap_threshold] = literal[int] , identifier[ignore_label] =- literal[int] , identifier[negative_mining_ratio] = literal[int] , identifier[minimum_negative_samples] = literal[int] , identifier[negative_mining_thresh] = literal[int] , identifier[variances] =( literal[int] , literal[int] , literal[int] , literal[int] ), identifier[name] = literal[string] ) identifier[loc_target] = identifier[tmp] [ literal[int] ] identifier[loc_target_mask] = identifier[tmp] [ literal[int] ] identifier[cls_target] = identifier[tmp] [ literal[int] ] identifier[cls_prob] = identifier[mx] . identifier[symbol] . identifier[SoftmaxOutput] ( identifier[data] = identifier[cls_preds] , identifier[label] = identifier[cls_target] , identifier[ignore_label] =- literal[int] , identifier[use_ignore] = keyword[True] , identifier[grad_scale] = literal[int] , identifier[multi_output] = keyword[True] , identifier[normalization] = literal[string] , identifier[name] = literal[string] ) identifier[loc_loss_] = identifier[mx] . identifier[symbol] . identifier[smooth_l1] ( identifier[name] = literal[string] , identifier[data] = identifier[loc_target_mask] *( identifier[loc_preds] - identifier[loc_target] ), identifier[scalar] = literal[int] ) identifier[loc_loss] = identifier[mx] . identifier[symbol] . identifier[MakeLoss] ( identifier[loc_loss_] , identifier[grad_scale] = literal[int] , identifier[normalization] = literal[string] , identifier[name] = literal[string] ) identifier[cls_label] = identifier[mx] . identifier[symbol] . identifier[MakeLoss] ( identifier[data] = identifier[cls_target] , identifier[grad_scale] = literal[int] , identifier[name] = literal[string] ) identifier[det] = identifier[mx] . identifier[symbol] . identifier[contrib] . identifier[MultiBoxDetection] (*[ identifier[cls_prob] , identifier[loc_preds] , identifier[anchor_boxes] ], identifier[name] = literal[string] , identifier[nms_threshold] = identifier[nms_thresh] , identifier[force_suppress] = identifier[force_suppress] , identifier[variances] =( literal[int] , literal[int] , literal[int] , literal[int] ), identifier[nms_topk] = identifier[nms_topk] ) identifier[det] = identifier[mx] . identifier[symbol] . identifier[MakeLoss] ( identifier[data] = identifier[det] , identifier[grad_scale] = literal[int] , identifier[name] = literal[string] ) identifier[out] = identifier[mx] . identifier[symbol] . identifier[Group] ([ identifier[cls_prob] , identifier[loc_loss] , identifier[cls_label] , identifier[det] ]) keyword[return] identifier[out]
def get_symbol_train(network, num_classes, from_layers, num_filters, strides, pads, sizes, ratios, normalizations=-1, steps=[], min_filter=128, nms_thresh=0.5, force_suppress=False, nms_topk=400, **kwargs): """Build network symbol for training SSD Parameters ---------- network : str base network symbol name num_classes : int number of object classes not including background from_layers : list of str feature extraction layers, use '' for add extra layers For example: from_layers = ['relu4_3', 'fc7', '', '', '', ''] which means extract feature from relu4_3 and fc7, adding 4 extra layers on top of fc7 num_filters : list of int number of filters for extra layers, you can use -1 for extracted features, however, if normalization and scale is applied, the number of filter for that layer must be provided. For example: num_filters = [512, -1, 512, 256, 256, 256] strides : list of int strides for the 3x3 convolution appended, -1 can be used for extracted feature layers pads : list of int paddings for the 3x3 convolution, -1 can be used for extracted layers sizes : list or list of list [min_size, max_size] for all layers or [[], [], []...] for specific layers ratios : list or list of list [ratio1, ratio2...] for all layers or [[], [], ...] for specific layers normalizations : int or list of int use normalizations value for all layers or [...] for specific layers, -1 indicate no normalizations and scales steps : list specify steps for each MultiBoxPrior layer, leave empty, it will calculate according to layer dimensions min_filter : int minimum number of filters used in 1x1 convolution nms_thresh : float non-maximum suppression threshold force_suppress : boolean whether suppress different class objects nms_topk : int apply NMS to top K detections Returns ------- mx.Symbol """ label = mx.sym.Variable('label') body = import_module(network).get_symbol(num_classes, **kwargs) layers = multi_layer_feature(body, from_layers, num_filters, strides, pads, min_filter=min_filter) (loc_preds, cls_preds, anchor_boxes) = multibox_layer(layers, num_classes, sizes=sizes, ratios=ratios, normalization=normalizations, num_channels=num_filters, clip=False, interm_layer=0, steps=steps) tmp = mx.symbol.contrib.MultiBoxTarget(*[anchor_boxes, label, cls_preds], overlap_threshold=0.5, ignore_label=-1, negative_mining_ratio=3, minimum_negative_samples=0, negative_mining_thresh=0.5, variances=(0.1, 0.1, 0.2, 0.2), name='multibox_target') loc_target = tmp[0] loc_target_mask = tmp[1] cls_target = tmp[2] cls_prob = mx.symbol.SoftmaxOutput(data=cls_preds, label=cls_target, ignore_label=-1, use_ignore=True, grad_scale=1.0, multi_output=True, normalization='valid', name='cls_prob') loc_loss_ = mx.symbol.smooth_l1(name='loc_loss_', data=loc_target_mask * (loc_preds - loc_target), scalar=1.0) loc_loss = mx.symbol.MakeLoss(loc_loss_, grad_scale=1.0, normalization='valid', name='loc_loss') # monitoring training status cls_label = mx.symbol.MakeLoss(data=cls_target, grad_scale=0, name='cls_label') det = mx.symbol.contrib.MultiBoxDetection(*[cls_prob, loc_preds, anchor_boxes], name='detection', nms_threshold=nms_thresh, force_suppress=force_suppress, variances=(0.1, 0.1, 0.2, 0.2), nms_topk=nms_topk) det = mx.symbol.MakeLoss(data=det, grad_scale=0, name='det_out') # group output out = mx.symbol.Group([cls_prob, loc_loss, cls_label, det]) return out
def fit(self, features, class_labels): """Constructs the MDR feature map from the provided training data. Parameters ---------- features: array-like {n_samples, n_features} Feature matrix class_labels: array-like {n_samples} List of true class labels Returns ------- self: A copy of the fitted model """ unique_labels = sorted(np.unique(class_labels)) if len(unique_labels) != 2: raise ValueError('MDR only supports binary endpoints.') # Count the distribution of classes that fall into each MDR grid cell self.class_count_matrix = defaultdict(lambda: defaultdict(int)) for row_i in range(features.shape[0]): feature_instance = tuple(features[row_i]) self.class_count_matrix[feature_instance][class_labels[row_i]] += 1 self.class_count_matrix = dict(self.class_count_matrix) # Only applies to binary classification overall_class_fraction = float(sum(class_labels == unique_labels[0])) / class_labels.size # If one class is more abundant in a MDR grid cell than it is overall, then assign the cell to that class self.feature_map = {} for feature_instance in self.class_count_matrix: counts = self.class_count_matrix[feature_instance] fraction = float(counts[unique_labels[0]]) / np.sum(list(counts.values())) if fraction > overall_class_fraction: self.feature_map[feature_instance] = unique_labels[0] elif fraction == overall_class_fraction: self.feature_map[feature_instance] = self.tie_break else: self.feature_map[feature_instance] = unique_labels[1] return self
def function[fit, parameter[self, features, class_labels]]: constant[Constructs the MDR feature map from the provided training data. Parameters ---------- features: array-like {n_samples, n_features} Feature matrix class_labels: array-like {n_samples} List of true class labels Returns ------- self: A copy of the fitted model ] variable[unique_labels] assign[=] call[name[sorted], parameter[call[name[np].unique, parameter[name[class_labels]]]]] if compare[call[name[len], parameter[name[unique_labels]]] not_equal[!=] constant[2]] begin[:] <ast.Raise object at 0x7da1b03495a0> name[self].class_count_matrix assign[=] call[name[defaultdict], parameter[<ast.Lambda object at 0x7da1b03497b0>]] for taget[name[row_i]] in starred[call[name[range], parameter[call[name[features].shape][constant[0]]]]] begin[:] variable[feature_instance] assign[=] call[name[tuple], parameter[call[name[features]][name[row_i]]]] <ast.AugAssign object at 0x7da1b0348af0> name[self].class_count_matrix assign[=] call[name[dict], parameter[name[self].class_count_matrix]] variable[overall_class_fraction] assign[=] binary_operation[call[name[float], parameter[call[name[sum], parameter[compare[name[class_labels] equal[==] call[name[unique_labels]][constant[0]]]]]]] / name[class_labels].size] name[self].feature_map assign[=] dictionary[[], []] for taget[name[feature_instance]] in starred[name[self].class_count_matrix] begin[:] variable[counts] assign[=] call[name[self].class_count_matrix][name[feature_instance]] variable[fraction] assign[=] binary_operation[call[name[float], parameter[call[name[counts]][call[name[unique_labels]][constant[0]]]]] / call[name[np].sum, parameter[call[name[list], parameter[call[name[counts].values, parameter[]]]]]]] if compare[name[fraction] greater[>] name[overall_class_fraction]] begin[:] call[name[self].feature_map][name[feature_instance]] assign[=] call[name[unique_labels]][constant[0]] return[name[self]]
keyword[def] identifier[fit] ( identifier[self] , identifier[features] , identifier[class_labels] ): literal[string] identifier[unique_labels] = identifier[sorted] ( identifier[np] . identifier[unique] ( identifier[class_labels] )) keyword[if] identifier[len] ( identifier[unique_labels] )!= literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[self] . identifier[class_count_matrix] = identifier[defaultdict] ( keyword[lambda] : identifier[defaultdict] ( identifier[int] )) keyword[for] identifier[row_i] keyword[in] identifier[range] ( identifier[features] . identifier[shape] [ literal[int] ]): identifier[feature_instance] = identifier[tuple] ( identifier[features] [ identifier[row_i] ]) identifier[self] . identifier[class_count_matrix] [ identifier[feature_instance] ][ identifier[class_labels] [ identifier[row_i] ]]+= literal[int] identifier[self] . identifier[class_count_matrix] = identifier[dict] ( identifier[self] . identifier[class_count_matrix] ) identifier[overall_class_fraction] = identifier[float] ( identifier[sum] ( identifier[class_labels] == identifier[unique_labels] [ literal[int] ]))/ identifier[class_labels] . identifier[size] identifier[self] . identifier[feature_map] ={} keyword[for] identifier[feature_instance] keyword[in] identifier[self] . identifier[class_count_matrix] : identifier[counts] = identifier[self] . identifier[class_count_matrix] [ identifier[feature_instance] ] identifier[fraction] = identifier[float] ( identifier[counts] [ identifier[unique_labels] [ literal[int] ]])/ identifier[np] . identifier[sum] ( identifier[list] ( identifier[counts] . identifier[values] ())) keyword[if] identifier[fraction] > identifier[overall_class_fraction] : identifier[self] . identifier[feature_map] [ identifier[feature_instance] ]= identifier[unique_labels] [ literal[int] ] keyword[elif] identifier[fraction] == identifier[overall_class_fraction] : identifier[self] . identifier[feature_map] [ identifier[feature_instance] ]= identifier[self] . identifier[tie_break] keyword[else] : identifier[self] . identifier[feature_map] [ identifier[feature_instance] ]= identifier[unique_labels] [ literal[int] ] keyword[return] identifier[self]
def fit(self, features, class_labels): """Constructs the MDR feature map from the provided training data. Parameters ---------- features: array-like {n_samples, n_features} Feature matrix class_labels: array-like {n_samples} List of true class labels Returns ------- self: A copy of the fitted model """ unique_labels = sorted(np.unique(class_labels)) if len(unique_labels) != 2: raise ValueError('MDR only supports binary endpoints.') # depends on [control=['if'], data=[]] # Count the distribution of classes that fall into each MDR grid cell self.class_count_matrix = defaultdict(lambda : defaultdict(int)) for row_i in range(features.shape[0]): feature_instance = tuple(features[row_i]) self.class_count_matrix[feature_instance][class_labels[row_i]] += 1 # depends on [control=['for'], data=['row_i']] self.class_count_matrix = dict(self.class_count_matrix) # Only applies to binary classification overall_class_fraction = float(sum(class_labels == unique_labels[0])) / class_labels.size # If one class is more abundant in a MDR grid cell than it is overall, then assign the cell to that class self.feature_map = {} for feature_instance in self.class_count_matrix: counts = self.class_count_matrix[feature_instance] fraction = float(counts[unique_labels[0]]) / np.sum(list(counts.values())) if fraction > overall_class_fraction: self.feature_map[feature_instance] = unique_labels[0] # depends on [control=['if'], data=[]] elif fraction == overall_class_fraction: self.feature_map[feature_instance] = self.tie_break # depends on [control=['if'], data=[]] else: self.feature_map[feature_instance] = unique_labels[1] # depends on [control=['for'], data=['feature_instance']] return self
def setitem_via_pathlist(ol,value,pathlist): ''' from elist.elist import * y = ['a',['b',["bb"]],'c'] y[1][1] setitem_via_pathlist(y,"500",[1,1]) y ''' this = ol for i in range(0,pathlist.__len__()-1): key = pathlist[i] this = this.__getitem__(key) this.__setitem__(pathlist[-1],value) return(ol)
def function[setitem_via_pathlist, parameter[ol, value, pathlist]]: constant[ from elist.elist import * y = ['a',['b',["bb"]],'c'] y[1][1] setitem_via_pathlist(y,"500",[1,1]) y ] variable[this] assign[=] name[ol] for taget[name[i]] in starred[call[name[range], parameter[constant[0], binary_operation[call[name[pathlist].__len__, parameter[]] - constant[1]]]]] begin[:] variable[key] assign[=] call[name[pathlist]][name[i]] variable[this] assign[=] call[name[this].__getitem__, parameter[name[key]]] call[name[this].__setitem__, parameter[call[name[pathlist]][<ast.UnaryOp object at 0x7da20c6aa5c0>], name[value]]] return[name[ol]]
keyword[def] identifier[setitem_via_pathlist] ( identifier[ol] , identifier[value] , identifier[pathlist] ): literal[string] identifier[this] = identifier[ol] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[pathlist] . identifier[__len__] ()- literal[int] ): identifier[key] = identifier[pathlist] [ identifier[i] ] identifier[this] = identifier[this] . identifier[__getitem__] ( identifier[key] ) identifier[this] . identifier[__setitem__] ( identifier[pathlist] [- literal[int] ], identifier[value] ) keyword[return] ( identifier[ol] )
def setitem_via_pathlist(ol, value, pathlist): """ from elist.elist import * y = ['a',['b',["bb"]],'c'] y[1][1] setitem_via_pathlist(y,"500",[1,1]) y """ this = ol for i in range(0, pathlist.__len__() - 1): key = pathlist[i] this = this.__getitem__(key) # depends on [control=['for'], data=['i']] this.__setitem__(pathlist[-1], value) return ol
def _resolve_capability(self, atom): """Return a terminal code for a capname or a sugary name, or an empty Unicode. The return value is always Unicode, because otherwise it is clumsy (especially in Python 3) to concatenate with real (Unicode) strings. """ code = tigetstr(self._sugar.get(atom, atom)) if code: # See the comment in ParametrizingString for why this is latin1. return code.decode('latin1') return u''
def function[_resolve_capability, parameter[self, atom]]: constant[Return a terminal code for a capname or a sugary name, or an empty Unicode. The return value is always Unicode, because otherwise it is clumsy (especially in Python 3) to concatenate with real (Unicode) strings. ] variable[code] assign[=] call[name[tigetstr], parameter[call[name[self]._sugar.get, parameter[name[atom], name[atom]]]]] if name[code] begin[:] return[call[name[code].decode, parameter[constant[latin1]]]] return[constant[]]
keyword[def] identifier[_resolve_capability] ( identifier[self] , identifier[atom] ): literal[string] identifier[code] = identifier[tigetstr] ( identifier[self] . identifier[_sugar] . identifier[get] ( identifier[atom] , identifier[atom] )) keyword[if] identifier[code] : keyword[return] identifier[code] . identifier[decode] ( literal[string] ) keyword[return] literal[string]
def _resolve_capability(self, atom): """Return a terminal code for a capname or a sugary name, or an empty Unicode. The return value is always Unicode, because otherwise it is clumsy (especially in Python 3) to concatenate with real (Unicode) strings. """ code = tigetstr(self._sugar.get(atom, atom)) if code: # See the comment in ParametrizingString for why this is latin1. return code.decode('latin1') # depends on [control=['if'], data=[]] return u''
def from_dicts(cls, mesh_name, vert_dict, normal_dict): """Returns a wavefront .obj string using pre-triangulated vertex dict and normal dict as reference.""" # Put header in string wavefront_str = "o {name}\n".format(name=mesh_name) # Write Vertex data from vert_dict for wall in vert_dict: for vert in vert_dict[wall]: wavefront_str += "v {0} {1} {2}\n".format(*vert) # Write (false) UV Texture data wavefront_str += "vt 1.0 1.0\n" # Write Normal data from normal_dict for wall, norm in normal_dict.items(): wavefront_str += "vn {0} {1} {2}\n".format(*norm) # Write Face Indices (1-indexed) vert_idx = 0 for wall in vert_dict: for _ in range(0, len(vert_dict[wall]), 3): wavefront_str += 'f ' for vert in range(3): # 3 vertices in each face vert_idx += 1 wavefront_str += "{v}/1/{n} ".format(v=vert_idx, n=wall+1) wavefront_str = wavefront_str[:-1] + '\n' # Cutoff trailing space and add a newline. # Return Wavefront string return WavefrontWriter(string=wavefront_str)
def function[from_dicts, parameter[cls, mesh_name, vert_dict, normal_dict]]: constant[Returns a wavefront .obj string using pre-triangulated vertex dict and normal dict as reference.] variable[wavefront_str] assign[=] call[constant[o {name} ].format, parameter[]] for taget[name[wall]] in starred[name[vert_dict]] begin[:] for taget[name[vert]] in starred[call[name[vert_dict]][name[wall]]] begin[:] <ast.AugAssign object at 0x7da2044c1390> <ast.AugAssign object at 0x7da2044c0df0> for taget[tuple[[<ast.Name object at 0x7da2044c1780>, <ast.Name object at 0x7da2044c04c0>]]] in starred[call[name[normal_dict].items, parameter[]]] begin[:] <ast.AugAssign object at 0x7da2044c2770> variable[vert_idx] assign[=] constant[0] for taget[name[wall]] in starred[name[vert_dict]] begin[:] for taget[name[_]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[call[name[vert_dict]][name[wall]]]], constant[3]]]] begin[:] <ast.AugAssign object at 0x7da18f00e0e0> for taget[name[vert]] in starred[call[name[range], parameter[constant[3]]]] begin[:] <ast.AugAssign object at 0x7da18f00d930> <ast.AugAssign object at 0x7da18f00eb00> variable[wavefront_str] assign[=] binary_operation[call[name[wavefront_str]][<ast.Slice object at 0x7da18f00db40>] + constant[ ]] return[call[name[WavefrontWriter], parameter[]]]
keyword[def] identifier[from_dicts] ( identifier[cls] , identifier[mesh_name] , identifier[vert_dict] , identifier[normal_dict] ): literal[string] identifier[wavefront_str] = literal[string] . identifier[format] ( identifier[name] = identifier[mesh_name] ) keyword[for] identifier[wall] keyword[in] identifier[vert_dict] : keyword[for] identifier[vert] keyword[in] identifier[vert_dict] [ identifier[wall] ]: identifier[wavefront_str] += literal[string] . identifier[format] (* identifier[vert] ) identifier[wavefront_str] += literal[string] keyword[for] identifier[wall] , identifier[norm] keyword[in] identifier[normal_dict] . identifier[items] (): identifier[wavefront_str] += literal[string] . identifier[format] (* identifier[norm] ) identifier[vert_idx] = literal[int] keyword[for] identifier[wall] keyword[in] identifier[vert_dict] : keyword[for] identifier[_] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[vert_dict] [ identifier[wall] ]), literal[int] ): identifier[wavefront_str] += literal[string] keyword[for] identifier[vert] keyword[in] identifier[range] ( literal[int] ): identifier[vert_idx] += literal[int] identifier[wavefront_str] += literal[string] . identifier[format] ( identifier[v] = identifier[vert_idx] , identifier[n] = identifier[wall] + literal[int] ) identifier[wavefront_str] = identifier[wavefront_str] [:- literal[int] ]+ literal[string] keyword[return] identifier[WavefrontWriter] ( identifier[string] = identifier[wavefront_str] )
def from_dicts(cls, mesh_name, vert_dict, normal_dict): """Returns a wavefront .obj string using pre-triangulated vertex dict and normal dict as reference.""" # Put header in string wavefront_str = 'o {name}\n'.format(name=mesh_name) # Write Vertex data from vert_dict for wall in vert_dict: for vert in vert_dict[wall]: wavefront_str += 'v {0} {1} {2}\n'.format(*vert) # depends on [control=['for'], data=['vert']] # depends on [control=['for'], data=['wall']] # Write (false) UV Texture data wavefront_str += 'vt 1.0 1.0\n' # Write Normal data from normal_dict for (wall, norm) in normal_dict.items(): wavefront_str += 'vn {0} {1} {2}\n'.format(*norm) # depends on [control=['for'], data=[]] # Write Face Indices (1-indexed) vert_idx = 0 for wall in vert_dict: for _ in range(0, len(vert_dict[wall]), 3): wavefront_str += 'f ' for vert in range(3): # 3 vertices in each face vert_idx += 1 wavefront_str += '{v}/1/{n} '.format(v=vert_idx, n=wall + 1) # depends on [control=['for'], data=[]] wavefront_str = wavefront_str[:-1] + '\n' # Cutoff trailing space and add a newline. # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['wall']] # Return Wavefront string return WavefrontWriter(string=wavefront_str)
def hangul_to_jamo(hangul_string): """Convert a string of Hangul to jamo. Arguments may be iterables of characters. hangul_to_jamo should split every Hangul character into U+11xx jamo characters for any given string. Non-hangul characters are not changed. hangul_to_jamo is the generator version of h2j, the string version. """ return (_ for _ in chain.from_iterable(_hangul_char_to_jamo(_) for _ in hangul_string))
def function[hangul_to_jamo, parameter[hangul_string]]: constant[Convert a string of Hangul to jamo. Arguments may be iterables of characters. hangul_to_jamo should split every Hangul character into U+11xx jamo characters for any given string. Non-hangul characters are not changed. hangul_to_jamo is the generator version of h2j, the string version. ] return[<ast.GeneratorExp object at 0x7da20e954640>]
keyword[def] identifier[hangul_to_jamo] ( identifier[hangul_string] ): literal[string] keyword[return] ( identifier[_] keyword[for] identifier[_] keyword[in] identifier[chain] . identifier[from_iterable] ( identifier[_hangul_char_to_jamo] ( identifier[_] ) keyword[for] identifier[_] keyword[in] identifier[hangul_string] ))
def hangul_to_jamo(hangul_string): """Convert a string of Hangul to jamo. Arguments may be iterables of characters. hangul_to_jamo should split every Hangul character into U+11xx jamo characters for any given string. Non-hangul characters are not changed. hangul_to_jamo is the generator version of h2j, the string version. """ return (_ for _ in chain.from_iterable((_hangul_char_to_jamo(_) for _ in hangul_string)))
def per_month(start: datetime, end: datetime, n: int=1): """ Iterates over time range in one month steps. Clamps to number of days in given month. :param start: Start of time range (inclusive) :param end: End of time range (exclusive) :param n: Number of months to step. Default is 1. :return: Iterable collection of [(month+0, month+1), (month+1, month+2), ..., end) """ curr = start.replace(day=1, hour=0, minute=0, second=0, microsecond=0) while curr < end: curr_end = add_month(curr, n) yield curr, curr_end curr = curr_end
def function[per_month, parameter[start, end, n]]: constant[ Iterates over time range in one month steps. Clamps to number of days in given month. :param start: Start of time range (inclusive) :param end: End of time range (exclusive) :param n: Number of months to step. Default is 1. :return: Iterable collection of [(month+0, month+1), (month+1, month+2), ..., end) ] variable[curr] assign[=] call[name[start].replace, parameter[]] while compare[name[curr] less[<] name[end]] begin[:] variable[curr_end] assign[=] call[name[add_month], parameter[name[curr], name[n]]] <ast.Yield object at 0x7da1b1050c40> variable[curr] assign[=] name[curr_end]
keyword[def] identifier[per_month] ( identifier[start] : identifier[datetime] , identifier[end] : identifier[datetime] , identifier[n] : identifier[int] = literal[int] ): literal[string] identifier[curr] = identifier[start] . identifier[replace] ( identifier[day] = literal[int] , identifier[hour] = literal[int] , identifier[minute] = literal[int] , identifier[second] = literal[int] , identifier[microsecond] = literal[int] ) keyword[while] identifier[curr] < identifier[end] : identifier[curr_end] = identifier[add_month] ( identifier[curr] , identifier[n] ) keyword[yield] identifier[curr] , identifier[curr_end] identifier[curr] = identifier[curr_end]
def per_month(start: datetime, end: datetime, n: int=1): """ Iterates over time range in one month steps. Clamps to number of days in given month. :param start: Start of time range (inclusive) :param end: End of time range (exclusive) :param n: Number of months to step. Default is 1. :return: Iterable collection of [(month+0, month+1), (month+1, month+2), ..., end) """ curr = start.replace(day=1, hour=0, minute=0, second=0, microsecond=0) while curr < end: curr_end = add_month(curr, n) yield (curr, curr_end) curr = curr_end # depends on [control=['while'], data=['curr']]
def get_principal_dictionary(graph_client, object_ids, raise_on_graph_call_error=False): """Retrieves Azure AD Objects for corresponding object ids passed. :param graph_client: A client for Microsoft Graph. :param object_ids: The object ids to retrieve Azure AD objects for. :param raise_on_graph_call_error: A boolean indicate whether an error should be raised if the underlying Microsoft Graph call fails. :return: A dictionary keyed by object id with the Azure AD object as the value. Note: empty Azure AD objects could be returned if not found in the graph. """ if not object_ids: return {} object_params = GetObjectsParameters( include_directory_object_references=True, object_ids=object_ids) principal_dics = {object_id: DirectoryObject() for object_id in object_ids} aad_objects = graph_client.objects.get_objects_by_object_ids(object_params) try: for aad_object in aad_objects: principal_dics[aad_object.object_id] = aad_object except CloudError as e: if e.status_code in [403, 401]: GraphHelper.log.warning( 'Credentials not authorized for access to read from Microsoft Graph. \n ' 'Can not query on principalName, displayName, or aadType. \n') else: GraphHelper.log.error( 'Exception in call to Microsoft Graph. \n ' 'Can not query on principalName, displayName, or aadType. \n' 'Error: {0}'.format(e)) if raise_on_graph_call_error: raise return principal_dics
def function[get_principal_dictionary, parameter[graph_client, object_ids, raise_on_graph_call_error]]: constant[Retrieves Azure AD Objects for corresponding object ids passed. :param graph_client: A client for Microsoft Graph. :param object_ids: The object ids to retrieve Azure AD objects for. :param raise_on_graph_call_error: A boolean indicate whether an error should be raised if the underlying Microsoft Graph call fails. :return: A dictionary keyed by object id with the Azure AD object as the value. Note: empty Azure AD objects could be returned if not found in the graph. ] if <ast.UnaryOp object at 0x7da1b1fc8f40> begin[:] return[dictionary[[], []]] variable[object_params] assign[=] call[name[GetObjectsParameters], parameter[]] variable[principal_dics] assign[=] <ast.DictComp object at 0x7da1b1fcb580> variable[aad_objects] assign[=] call[name[graph_client].objects.get_objects_by_object_ids, parameter[name[object_params]]] <ast.Try object at 0x7da1b1fcb6a0> return[name[principal_dics]]
keyword[def] identifier[get_principal_dictionary] ( identifier[graph_client] , identifier[object_ids] , identifier[raise_on_graph_call_error] = keyword[False] ): literal[string] keyword[if] keyword[not] identifier[object_ids] : keyword[return] {} identifier[object_params] = identifier[GetObjectsParameters] ( identifier[include_directory_object_references] = keyword[True] , identifier[object_ids] = identifier[object_ids] ) identifier[principal_dics] ={ identifier[object_id] : identifier[DirectoryObject] () keyword[for] identifier[object_id] keyword[in] identifier[object_ids] } identifier[aad_objects] = identifier[graph_client] . identifier[objects] . identifier[get_objects_by_object_ids] ( identifier[object_params] ) keyword[try] : keyword[for] identifier[aad_object] keyword[in] identifier[aad_objects] : identifier[principal_dics] [ identifier[aad_object] . identifier[object_id] ]= identifier[aad_object] keyword[except] identifier[CloudError] keyword[as] identifier[e] : keyword[if] identifier[e] . identifier[status_code] keyword[in] [ literal[int] , literal[int] ]: identifier[GraphHelper] . identifier[log] . identifier[warning] ( literal[string] literal[string] ) keyword[else] : identifier[GraphHelper] . identifier[log] . identifier[error] ( literal[string] literal[string] literal[string] . identifier[format] ( identifier[e] )) keyword[if] identifier[raise_on_graph_call_error] : keyword[raise] keyword[return] identifier[principal_dics]
def get_principal_dictionary(graph_client, object_ids, raise_on_graph_call_error=False): """Retrieves Azure AD Objects for corresponding object ids passed. :param graph_client: A client for Microsoft Graph. :param object_ids: The object ids to retrieve Azure AD objects for. :param raise_on_graph_call_error: A boolean indicate whether an error should be raised if the underlying Microsoft Graph call fails. :return: A dictionary keyed by object id with the Azure AD object as the value. Note: empty Azure AD objects could be returned if not found in the graph. """ if not object_ids: return {} # depends on [control=['if'], data=[]] object_params = GetObjectsParameters(include_directory_object_references=True, object_ids=object_ids) principal_dics = {object_id: DirectoryObject() for object_id in object_ids} aad_objects = graph_client.objects.get_objects_by_object_ids(object_params) try: for aad_object in aad_objects: principal_dics[aad_object.object_id] = aad_object # depends on [control=['for'], data=['aad_object']] # depends on [control=['try'], data=[]] except CloudError as e: if e.status_code in [403, 401]: GraphHelper.log.warning('Credentials not authorized for access to read from Microsoft Graph. \n Can not query on principalName, displayName, or aadType. \n') # depends on [control=['if'], data=[]] else: GraphHelper.log.error('Exception in call to Microsoft Graph. \n Can not query on principalName, displayName, or aadType. \nError: {0}'.format(e)) if raise_on_graph_call_error: raise # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['e']] return principal_dics
def date(self, date): """ Returns the date of file creation as a python date object """ self.creation_year = date.year self.creation_day_of_year = date.timetuple().tm_yday
def function[date, parameter[self, date]]: constant[ Returns the date of file creation as a python date object ] name[self].creation_year assign[=] name[date].year name[self].creation_day_of_year assign[=] call[name[date].timetuple, parameter[]].tm_yday
keyword[def] identifier[date] ( identifier[self] , identifier[date] ): literal[string] identifier[self] . identifier[creation_year] = identifier[date] . identifier[year] identifier[self] . identifier[creation_day_of_year] = identifier[date] . identifier[timetuple] (). identifier[tm_yday]
def date(self, date): """ Returns the date of file creation as a python date object """ self.creation_year = date.year self.creation_day_of_year = date.timetuple().tm_yday
def eConnect(self, host, port, clientId=0, extraAuth=False): """eConnect(EClientSocketBase self, char const * host, unsigned int port, int clientId=0, bool extraAuth=False) -> bool""" return _swigibpy.EClientSocketBase_eConnect(self, host, port, clientId, extraAuth)
def function[eConnect, parameter[self, host, port, clientId, extraAuth]]: constant[eConnect(EClientSocketBase self, char const * host, unsigned int port, int clientId=0, bool extraAuth=False) -> bool] return[call[name[_swigibpy].EClientSocketBase_eConnect, parameter[name[self], name[host], name[port], name[clientId], name[extraAuth]]]]
keyword[def] identifier[eConnect] ( identifier[self] , identifier[host] , identifier[port] , identifier[clientId] = literal[int] , identifier[extraAuth] = keyword[False] ): literal[string] keyword[return] identifier[_swigibpy] . identifier[EClientSocketBase_eConnect] ( identifier[self] , identifier[host] , identifier[port] , identifier[clientId] , identifier[extraAuth] )
def eConnect(self, host, port, clientId=0, extraAuth=False): """eConnect(EClientSocketBase self, char const * host, unsigned int port, int clientId=0, bool extraAuth=False) -> bool""" return _swigibpy.EClientSocketBase_eConnect(self, host, port, clientId, extraAuth)
def set(self, x, y, value): """ Set the cell value from the specified location :param x: The column (x coord) of the character. :param y: The row (y coord) of the character. :param value: A 5-tuple of (unicode, foreground, attributes, background, width). """ self._double_buffer[y][x] = value
def function[set, parameter[self, x, y, value]]: constant[ Set the cell value from the specified location :param x: The column (x coord) of the character. :param y: The row (y coord) of the character. :param value: A 5-tuple of (unicode, foreground, attributes, background, width). ] call[call[name[self]._double_buffer][name[y]]][name[x]] assign[=] name[value]
keyword[def] identifier[set] ( identifier[self] , identifier[x] , identifier[y] , identifier[value] ): literal[string] identifier[self] . identifier[_double_buffer] [ identifier[y] ][ identifier[x] ]= identifier[value]
def set(self, x, y, value): """ Set the cell value from the specified location :param x: The column (x coord) of the character. :param y: The row (y coord) of the character. :param value: A 5-tuple of (unicode, foreground, attributes, background, width). """ self._double_buffer[y][x] = value
def headerData(self, section, orientation, role=Qt.DisplayRole): """Override Qt method""" if role == Qt.TextAlignmentRole: if orientation == Qt.Horizontal: return to_qvariant(int(Qt.AlignHCenter | Qt.AlignVCenter)) return to_qvariant(int(Qt.AlignRight | Qt.AlignVCenter)) elif role == Qt.DisplayRole and orientation == Qt.Horizontal: if section == C.COL_PACKAGE_TYPE: return to_qvariant(_("T")) if section == C.COL_NAME: return to_qvariant(_("Name")) elif section == C.COL_VERSION: return to_qvariant(_("Version")) elif section == C.COL_DESCRIPTION: return to_qvariant(_("Description")) elif section == C.COL_STATUS: return to_qvariant(_("Status")) else: return to_qvariant()
def function[headerData, parameter[self, section, orientation, role]]: constant[Override Qt method] if compare[name[role] equal[==] name[Qt].TextAlignmentRole] begin[:] if compare[name[orientation] equal[==] name[Qt].Horizontal] begin[:] return[call[name[to_qvariant], parameter[call[name[int], parameter[binary_operation[name[Qt].AlignHCenter <ast.BitOr object at 0x7da2590d6aa0> name[Qt].AlignVCenter]]]]]] return[call[name[to_qvariant], parameter[call[name[int], parameter[binary_operation[name[Qt].AlignRight <ast.BitOr object at 0x7da2590d6aa0> name[Qt].AlignVCenter]]]]]]
keyword[def] identifier[headerData] ( identifier[self] , identifier[section] , identifier[orientation] , identifier[role] = identifier[Qt] . identifier[DisplayRole] ): literal[string] keyword[if] identifier[role] == identifier[Qt] . identifier[TextAlignmentRole] : keyword[if] identifier[orientation] == identifier[Qt] . identifier[Horizontal] : keyword[return] identifier[to_qvariant] ( identifier[int] ( identifier[Qt] . identifier[AlignHCenter] | identifier[Qt] . identifier[AlignVCenter] )) keyword[return] identifier[to_qvariant] ( identifier[int] ( identifier[Qt] . identifier[AlignRight] | identifier[Qt] . identifier[AlignVCenter] )) keyword[elif] identifier[role] == identifier[Qt] . identifier[DisplayRole] keyword[and] identifier[orientation] == identifier[Qt] . identifier[Horizontal] : keyword[if] identifier[section] == identifier[C] . identifier[COL_PACKAGE_TYPE] : keyword[return] identifier[to_qvariant] ( identifier[_] ( literal[string] )) keyword[if] identifier[section] == identifier[C] . identifier[COL_NAME] : keyword[return] identifier[to_qvariant] ( identifier[_] ( literal[string] )) keyword[elif] identifier[section] == identifier[C] . identifier[COL_VERSION] : keyword[return] identifier[to_qvariant] ( identifier[_] ( literal[string] )) keyword[elif] identifier[section] == identifier[C] . identifier[COL_DESCRIPTION] : keyword[return] identifier[to_qvariant] ( identifier[_] ( literal[string] )) keyword[elif] identifier[section] == identifier[C] . identifier[COL_STATUS] : keyword[return] identifier[to_qvariant] ( identifier[_] ( literal[string] )) keyword[else] : keyword[return] identifier[to_qvariant] ()
def headerData(self, section, orientation, role=Qt.DisplayRole): """Override Qt method""" if role == Qt.TextAlignmentRole: if orientation == Qt.Horizontal: return to_qvariant(int(Qt.AlignHCenter | Qt.AlignVCenter)) # depends on [control=['if'], data=[]] return to_qvariant(int(Qt.AlignRight | Qt.AlignVCenter)) # depends on [control=['if'], data=[]] elif role == Qt.DisplayRole and orientation == Qt.Horizontal: if section == C.COL_PACKAGE_TYPE: return to_qvariant(_('T')) # depends on [control=['if'], data=[]] if section == C.COL_NAME: return to_qvariant(_('Name')) # depends on [control=['if'], data=[]] elif section == C.COL_VERSION: return to_qvariant(_('Version')) # depends on [control=['if'], data=[]] elif section == C.COL_DESCRIPTION: return to_qvariant(_('Description')) # depends on [control=['if'], data=[]] elif section == C.COL_STATUS: return to_qvariant(_('Status')) # depends on [control=['if'], data=[]] else: return to_qvariant() # depends on [control=['if'], data=[]]
def blk_nd(blk, shape): """Iterate through the blocks that cover an array. This function first iterates trough the blocks that recover the part of the array given by max_blk_coverage and then iterates with smaller blocks for the rest of the array. :param blk: the N-dimensional shape of the block :param shape: the N-dimensional shape of the array :return: a generator that yields the blocks Example: >>> result = list(blk_nd(blk=(5,3), shape=(11, 11))) >>> result[0] (slice(0, 5, None), slice(0, 3, None)) >>> result[1] (slice(0, 5, None), slice(3, 6, None)) >>> result[-1] (slice(10, 11, None), slice(9, 11, None)) The generator yields blocks of size blk until it covers the part of the array given by :func:`max_blk_coverage` and then yields smaller blocks until it covers the full array. .. seealso:: :py:func:`blk_nd_short` Yields blocks of fixed size """ internals = (blk_1d(b, s) for b, s in zip(blk, shape)) return product(*internals)
def function[blk_nd, parameter[blk, shape]]: constant[Iterate through the blocks that cover an array. This function first iterates trough the blocks that recover the part of the array given by max_blk_coverage and then iterates with smaller blocks for the rest of the array. :param blk: the N-dimensional shape of the block :param shape: the N-dimensional shape of the array :return: a generator that yields the blocks Example: >>> result = list(blk_nd(blk=(5,3), shape=(11, 11))) >>> result[0] (slice(0, 5, None), slice(0, 3, None)) >>> result[1] (slice(0, 5, None), slice(3, 6, None)) >>> result[-1] (slice(10, 11, None), slice(9, 11, None)) The generator yields blocks of size blk until it covers the part of the array given by :func:`max_blk_coverage` and then yields smaller blocks until it covers the full array. .. seealso:: :py:func:`blk_nd_short` Yields blocks of fixed size ] variable[internals] assign[=] <ast.GeneratorExp object at 0x7da1b2356710> return[call[name[product], parameter[<ast.Starred object at 0x7da1b2357d30>]]]
keyword[def] identifier[blk_nd] ( identifier[blk] , identifier[shape] ): literal[string] identifier[internals] =( identifier[blk_1d] ( identifier[b] , identifier[s] ) keyword[for] identifier[b] , identifier[s] keyword[in] identifier[zip] ( identifier[blk] , identifier[shape] )) keyword[return] identifier[product] (* identifier[internals] )
def blk_nd(blk, shape): """Iterate through the blocks that cover an array. This function first iterates trough the blocks that recover the part of the array given by max_blk_coverage and then iterates with smaller blocks for the rest of the array. :param blk: the N-dimensional shape of the block :param shape: the N-dimensional shape of the array :return: a generator that yields the blocks Example: >>> result = list(blk_nd(blk=(5,3), shape=(11, 11))) >>> result[0] (slice(0, 5, None), slice(0, 3, None)) >>> result[1] (slice(0, 5, None), slice(3, 6, None)) >>> result[-1] (slice(10, 11, None), slice(9, 11, None)) The generator yields blocks of size blk until it covers the part of the array given by :func:`max_blk_coverage` and then yields smaller blocks until it covers the full array. .. seealso:: :py:func:`blk_nd_short` Yields blocks of fixed size """ internals = (blk_1d(b, s) for (b, s) in zip(blk, shape)) return product(*internals)
def optional(_object): """ This decorator has a double functionality, it can wrap validators and make them optional or it can wrap keys and make that entry optional. **Optional Validator:** Allows to have validators work only when there is a value that contains some data, otherwise it will just not pass the information to the actual validator and will not fail as a result. As any normal decorator, it can be used corectly with the decorator syntax or in the actual schema. This is how it would look in a schema:: ('key', optional(my_validator)) Where ``my_validator`` can be any validator that accepts a single argument. In case a class based validator is being used (like the ``recursive`` or ``iterables`` then it would look like:: ('key', optional(class_validator(('key', 'value')))) Of course, the schema should vary depending on your needs, it is just the way of constructing the validator call that should be important. **Optional Keys:** Sometimes a given data structure may present optional entries. For example this data:: data = {'required': 1, 'optional': 2} To represent this, you will need to declare the `optional` key in the schema but by wrapping the key with this decorator you will basically tell the validation engine that if that key is present it should be validated, otherwise, it should be skipped. This is how the schema would look:: schema = (('required', 1), (optional('optional'), 1)) The above schema would allow data that is missing the ``optional`` key. The data below would pass validation without any issues:: data = {'required': 1} """ if is_callable(_object): validator = _object @wraps(validator) def decorated(value): if value: return validator(value) return return decorated else: def optional(*args): return _object optional.is_optional = True optional._object = _object return optional
def function[optional, parameter[_object]]: constant[ This decorator has a double functionality, it can wrap validators and make them optional or it can wrap keys and make that entry optional. **Optional Validator:** Allows to have validators work only when there is a value that contains some data, otherwise it will just not pass the information to the actual validator and will not fail as a result. As any normal decorator, it can be used corectly with the decorator syntax or in the actual schema. This is how it would look in a schema:: ('key', optional(my_validator)) Where ``my_validator`` can be any validator that accepts a single argument. In case a class based validator is being used (like the ``recursive`` or ``iterables`` then it would look like:: ('key', optional(class_validator(('key', 'value')))) Of course, the schema should vary depending on your needs, it is just the way of constructing the validator call that should be important. **Optional Keys:** Sometimes a given data structure may present optional entries. For example this data:: data = {'required': 1, 'optional': 2} To represent this, you will need to declare the `optional` key in the schema but by wrapping the key with this decorator you will basically tell the validation engine that if that key is present it should be validated, otherwise, it should be skipped. This is how the schema would look:: schema = (('required', 1), (optional('optional'), 1)) The above schema would allow data that is missing the ``optional`` key. The data below would pass validation without any issues:: data = {'required': 1} ] if call[name[is_callable], parameter[name[_object]]] begin[:] variable[validator] assign[=] name[_object] def function[decorated, parameter[value]]: if name[value] begin[:] return[call[name[validator], parameter[name[value]]]] return[None] return[name[decorated]]
keyword[def] identifier[optional] ( identifier[_object] ): literal[string] keyword[if] identifier[is_callable] ( identifier[_object] ): identifier[validator] = identifier[_object] @ identifier[wraps] ( identifier[validator] ) keyword[def] identifier[decorated] ( identifier[value] ): keyword[if] identifier[value] : keyword[return] identifier[validator] ( identifier[value] ) keyword[return] keyword[return] identifier[decorated] keyword[else] : keyword[def] identifier[optional] (* identifier[args] ): keyword[return] identifier[_object] identifier[optional] . identifier[is_optional] = keyword[True] identifier[optional] . identifier[_object] = identifier[_object] keyword[return] identifier[optional]
def optional(_object): """ This decorator has a double functionality, it can wrap validators and make them optional or it can wrap keys and make that entry optional. **Optional Validator:** Allows to have validators work only when there is a value that contains some data, otherwise it will just not pass the information to the actual validator and will not fail as a result. As any normal decorator, it can be used corectly with the decorator syntax or in the actual schema. This is how it would look in a schema:: ('key', optional(my_validator)) Where ``my_validator`` can be any validator that accepts a single argument. In case a class based validator is being used (like the ``recursive`` or ``iterables`` then it would look like:: ('key', optional(class_validator(('key', 'value')))) Of course, the schema should vary depending on your needs, it is just the way of constructing the validator call that should be important. **Optional Keys:** Sometimes a given data structure may present optional entries. For example this data:: data = {'required': 1, 'optional': 2} To represent this, you will need to declare the `optional` key in the schema but by wrapping the key with this decorator you will basically tell the validation engine that if that key is present it should be validated, otherwise, it should be skipped. This is how the schema would look:: schema = (('required', 1), (optional('optional'), 1)) The above schema would allow data that is missing the ``optional`` key. The data below would pass validation without any issues:: data = {'required': 1} """ if is_callable(_object): validator = _object @wraps(validator) def decorated(value): if value: return validator(value) # depends on [control=['if'], data=[]] return return decorated # depends on [control=['if'], data=[]] else: def optional(*args): return _object optional.is_optional = True optional._object = _object return optional
def write_classdesc(self, obj, parent=None): """ Writes a class description :param obj: Class description to write :param parent: """ if obj not in self.references: # Add reference self.references.append(obj) logging.debug( "*** Adding ref 0x%X for classdesc %s", len(self.references) - 1 + self.BASE_REFERENCE_IDX, obj.name, ) self._writeStruct(">B", 1, (self.TC_CLASSDESC,)) self._writeString(obj.name) self._writeStruct(">qB", 1, (obj.serialVersionUID, obj.flags)) self._writeStruct(">H", 1, (len(obj.fields_names),)) for field_name, field_type in zip(obj.fields_names, obj.fields_types): self._writeStruct(">B", 1, (self._convert_type_to_char(field_type),)) self._writeString(field_name) if field_type[0] in (self.TYPE_OBJECT, self.TYPE_ARRAY): try: idx = self.references.index(field_type) except ValueError: # First appearance of the type self.references.append(field_type) logging.debug( "*** Adding ref 0x%X for field type %s", len(self.references) - 1 + self.BASE_REFERENCE_IDX, field_type, ) self.write_string(field_type, False) else: # Write a reference to the previous type logging.debug( "*** Reusing ref 0x%X for %s (%s)", idx + self.BASE_REFERENCE_IDX, field_type, field_name, ) self.write_reference(idx) self._writeStruct(">B", 1, (self.TC_ENDBLOCKDATA,)) if obj.superclass: self.write_classdesc(obj.superclass) else: self.write_null() else: # Use reference self.write_reference(self.references.index(obj))
def function[write_classdesc, parameter[self, obj, parent]]: constant[ Writes a class description :param obj: Class description to write :param parent: ] if compare[name[obj] <ast.NotIn object at 0x7da2590d7190> name[self].references] begin[:] call[name[self].references.append, parameter[name[obj]]] call[name[logging].debug, parameter[constant[*** Adding ref 0x%X for classdesc %s], binary_operation[binary_operation[call[name[len], parameter[name[self].references]] - constant[1]] + name[self].BASE_REFERENCE_IDX], name[obj].name]] call[name[self]._writeStruct, parameter[constant[>B], constant[1], tuple[[<ast.Attribute object at 0x7da18f09dde0>]]]] call[name[self]._writeString, parameter[name[obj].name]] call[name[self]._writeStruct, parameter[constant[>qB], constant[1], tuple[[<ast.Attribute object at 0x7da18f09dcc0>, <ast.Attribute object at 0x7da18f09e350>]]]] call[name[self]._writeStruct, parameter[constant[>H], constant[1], tuple[[<ast.Call object at 0x7da18f09c2b0>]]]] for taget[tuple[[<ast.Name object at 0x7da18f09d510>, <ast.Name object at 0x7da18f09d660>]]] in starred[call[name[zip], parameter[name[obj].fields_names, name[obj].fields_types]]] begin[:] call[name[self]._writeStruct, parameter[constant[>B], constant[1], tuple[[<ast.Call object at 0x7da18f09e500>]]]] call[name[self]._writeString, parameter[name[field_name]]] if compare[call[name[field_type]][constant[0]] in tuple[[<ast.Attribute object at 0x7da18f09e6b0>, <ast.Attribute object at 0x7da18f09e230>]]] begin[:] <ast.Try object at 0x7da18f09ec50> call[name[self]._writeStruct, parameter[constant[>B], constant[1], tuple[[<ast.Attribute object at 0x7da20c76c670>]]]] if name[obj].superclass begin[:] call[name[self].write_classdesc, parameter[name[obj].superclass]]
keyword[def] identifier[write_classdesc] ( identifier[self] , identifier[obj] , identifier[parent] = keyword[None] ): literal[string] keyword[if] identifier[obj] keyword[not] keyword[in] identifier[self] . identifier[references] : identifier[self] . identifier[references] . identifier[append] ( identifier[obj] ) identifier[logging] . identifier[debug] ( literal[string] , identifier[len] ( identifier[self] . identifier[references] )- literal[int] + identifier[self] . identifier[BASE_REFERENCE_IDX] , identifier[obj] . identifier[name] , ) identifier[self] . identifier[_writeStruct] ( literal[string] , literal[int] ,( identifier[self] . identifier[TC_CLASSDESC] ,)) identifier[self] . identifier[_writeString] ( identifier[obj] . identifier[name] ) identifier[self] . identifier[_writeStruct] ( literal[string] , literal[int] ,( identifier[obj] . identifier[serialVersionUID] , identifier[obj] . identifier[flags] )) identifier[self] . identifier[_writeStruct] ( literal[string] , literal[int] ,( identifier[len] ( identifier[obj] . identifier[fields_names] ),)) keyword[for] identifier[field_name] , identifier[field_type] keyword[in] identifier[zip] ( identifier[obj] . identifier[fields_names] , identifier[obj] . identifier[fields_types] ): identifier[self] . identifier[_writeStruct] ( literal[string] , literal[int] ,( identifier[self] . identifier[_convert_type_to_char] ( identifier[field_type] ),)) identifier[self] . identifier[_writeString] ( identifier[field_name] ) keyword[if] identifier[field_type] [ literal[int] ] keyword[in] ( identifier[self] . identifier[TYPE_OBJECT] , identifier[self] . identifier[TYPE_ARRAY] ): keyword[try] : identifier[idx] = identifier[self] . identifier[references] . identifier[index] ( identifier[field_type] ) keyword[except] identifier[ValueError] : identifier[self] . identifier[references] . identifier[append] ( identifier[field_type] ) identifier[logging] . identifier[debug] ( literal[string] , identifier[len] ( identifier[self] . identifier[references] )- literal[int] + identifier[self] . identifier[BASE_REFERENCE_IDX] , identifier[field_type] , ) identifier[self] . identifier[write_string] ( identifier[field_type] , keyword[False] ) keyword[else] : identifier[logging] . identifier[debug] ( literal[string] , identifier[idx] + identifier[self] . identifier[BASE_REFERENCE_IDX] , identifier[field_type] , identifier[field_name] , ) identifier[self] . identifier[write_reference] ( identifier[idx] ) identifier[self] . identifier[_writeStruct] ( literal[string] , literal[int] ,( identifier[self] . identifier[TC_ENDBLOCKDATA] ,)) keyword[if] identifier[obj] . identifier[superclass] : identifier[self] . identifier[write_classdesc] ( identifier[obj] . identifier[superclass] ) keyword[else] : identifier[self] . identifier[write_null] () keyword[else] : identifier[self] . identifier[write_reference] ( identifier[self] . identifier[references] . identifier[index] ( identifier[obj] ))
def write_classdesc(self, obj, parent=None): """ Writes a class description :param obj: Class description to write :param parent: """ if obj not in self.references: # Add reference self.references.append(obj) logging.debug('*** Adding ref 0x%X for classdesc %s', len(self.references) - 1 + self.BASE_REFERENCE_IDX, obj.name) self._writeStruct('>B', 1, (self.TC_CLASSDESC,)) self._writeString(obj.name) self._writeStruct('>qB', 1, (obj.serialVersionUID, obj.flags)) self._writeStruct('>H', 1, (len(obj.fields_names),)) for (field_name, field_type) in zip(obj.fields_names, obj.fields_types): self._writeStruct('>B', 1, (self._convert_type_to_char(field_type),)) self._writeString(field_name) if field_type[0] in (self.TYPE_OBJECT, self.TYPE_ARRAY): try: idx = self.references.index(field_type) # depends on [control=['try'], data=[]] except ValueError: # First appearance of the type self.references.append(field_type) logging.debug('*** Adding ref 0x%X for field type %s', len(self.references) - 1 + self.BASE_REFERENCE_IDX, field_type) self.write_string(field_type, False) # depends on [control=['except'], data=[]] else: # Write a reference to the previous type logging.debug('*** Reusing ref 0x%X for %s (%s)', idx + self.BASE_REFERENCE_IDX, field_type, field_name) self.write_reference(idx) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] self._writeStruct('>B', 1, (self.TC_ENDBLOCKDATA,)) if obj.superclass: self.write_classdesc(obj.superclass) # depends on [control=['if'], data=[]] else: self.write_null() # depends on [control=['if'], data=['obj']] else: # Use reference self.write_reference(self.references.index(obj))
def _index(self, model): ''' Elasticsearch multi types has been removed Use multi index unless set __msearch_index__. ''' doc_type = model if not isinstance(model, str): doc_type = model.__table__.name index_name = doc_type if hasattr(model, "__msearch_index__"): index_name = model.__msearch_index__ if doc_type not in self._indexs: self._indexs[doc_type] = Index(self._client, index_name, doc_type) return self._indexs[doc_type]
def function[_index, parameter[self, model]]: constant[ Elasticsearch multi types has been removed Use multi index unless set __msearch_index__. ] variable[doc_type] assign[=] name[model] if <ast.UnaryOp object at 0x7da18bccb520> begin[:] variable[doc_type] assign[=] name[model].__table__.name variable[index_name] assign[=] name[doc_type] if call[name[hasattr], parameter[name[model], constant[__msearch_index__]]] begin[:] variable[index_name] assign[=] name[model].__msearch_index__ if compare[name[doc_type] <ast.NotIn object at 0x7da2590d7190> name[self]._indexs] begin[:] call[name[self]._indexs][name[doc_type]] assign[=] call[name[Index], parameter[name[self]._client, name[index_name], name[doc_type]]] return[call[name[self]._indexs][name[doc_type]]]
keyword[def] identifier[_index] ( identifier[self] , identifier[model] ): literal[string] identifier[doc_type] = identifier[model] keyword[if] keyword[not] identifier[isinstance] ( identifier[model] , identifier[str] ): identifier[doc_type] = identifier[model] . identifier[__table__] . identifier[name] identifier[index_name] = identifier[doc_type] keyword[if] identifier[hasattr] ( identifier[model] , literal[string] ): identifier[index_name] = identifier[model] . identifier[__msearch_index__] keyword[if] identifier[doc_type] keyword[not] keyword[in] identifier[self] . identifier[_indexs] : identifier[self] . identifier[_indexs] [ identifier[doc_type] ]= identifier[Index] ( identifier[self] . identifier[_client] , identifier[index_name] , identifier[doc_type] ) keyword[return] identifier[self] . identifier[_indexs] [ identifier[doc_type] ]
def _index(self, model): """ Elasticsearch multi types has been removed Use multi index unless set __msearch_index__. """ doc_type = model if not isinstance(model, str): doc_type = model.__table__.name # depends on [control=['if'], data=[]] index_name = doc_type if hasattr(model, '__msearch_index__'): index_name = model.__msearch_index__ # depends on [control=['if'], data=[]] if doc_type not in self._indexs: self._indexs[doc_type] = Index(self._client, index_name, doc_type) # depends on [control=['if'], data=['doc_type']] return self._indexs[doc_type]
def delete(self, key): """Delete a document by id.""" assert key, "A key must be supplied for delete operations" self._collection.remove(spec_or_id={'_id': key}) LOG.debug("DB REMOVE: %s.%s", self.collection_name, key)
def function[delete, parameter[self, key]]: constant[Delete a document by id.] assert[name[key]] call[name[self]._collection.remove, parameter[]] call[name[LOG].debug, parameter[constant[DB REMOVE: %s.%s], name[self].collection_name, name[key]]]
keyword[def] identifier[delete] ( identifier[self] , identifier[key] ): literal[string] keyword[assert] identifier[key] , literal[string] identifier[self] . identifier[_collection] . identifier[remove] ( identifier[spec_or_id] ={ literal[string] : identifier[key] }) identifier[LOG] . identifier[debug] ( literal[string] , identifier[self] . identifier[collection_name] , identifier[key] )
def delete(self, key): """Delete a document by id.""" assert key, 'A key must be supplied for delete operations' self._collection.remove(spec_or_id={'_id': key}) LOG.debug('DB REMOVE: %s.%s', self.collection_name, key)
def fold_enrichment(self): """(property) Returns the fold enrichment at the XL-mHG cutoff.""" return self.k / (self.K*(self.cutoff/float(self.N)))
def function[fold_enrichment, parameter[self]]: constant[(property) Returns the fold enrichment at the XL-mHG cutoff.] return[binary_operation[name[self].k / binary_operation[name[self].K * binary_operation[name[self].cutoff / call[name[float], parameter[name[self].N]]]]]]
keyword[def] identifier[fold_enrichment] ( identifier[self] ): literal[string] keyword[return] identifier[self] . identifier[k] /( identifier[self] . identifier[K] *( identifier[self] . identifier[cutoff] / identifier[float] ( identifier[self] . identifier[N] )))
def fold_enrichment(self): """(property) Returns the fold enrichment at the XL-mHG cutoff.""" return self.k / (self.K * (self.cutoff / float(self.N)))
def fetch_document(url=None, host=None, path="/", timeout=10, raise_ssl_errors=True, extra_headers=None): """Helper method to fetch remote document. Must be given either the ``url`` or ``host``. If ``url`` is given, only that will be tried without falling back to http from https. If ``host`` given, `path` will be added to it. Will fall back to http on non-success status code. :arg url: Full url to fetch, including protocol :arg host: Domain part only without path or protocol :arg path: Path without domain (defaults to "/") :arg timeout: Seconds to wait for response (defaults to 10) :arg raise_ssl_errors: Pass False if you want to try HTTP even for sites with SSL errors (default True) :returns: Tuple of document (str or None), status code (int or None) and error (an exception class instance or None) :raises ValueError: If neither url nor host are given as parameters """ if not url and not host: raise ValueError("Need url or host.") logger.debug("fetch_document: url=%s, host=%s, path=%s, timeout=%s, raise_ssl_errors=%s", url, host, path, timeout, raise_ssl_errors) headers = {'user-agent': USER_AGENT} if extra_headers: headers.update(extra_headers) if url: # Use url since it was given logger.debug("fetch_document: trying %s", url) try: response = requests.get(url, timeout=timeout, headers=headers) logger.debug("fetch_document: found document, code %s", response.status_code) return response.text, response.status_code, None except RequestException as ex: logger.debug("fetch_document: exception %s", ex) return None, None, ex # Build url with some little sanitizing host_string = host.replace("http://", "").replace("https://", "").strip("/") path_string = path if path.startswith("/") else "/%s" % path url = "https://%s%s" % (host_string, path_string) logger.debug("fetch_document: trying %s", url) try: response = requests.get(url, timeout=timeout, headers=headers) logger.debug("fetch_document: found document, code %s", response.status_code) response.raise_for_status() return response.text, response.status_code, None except (HTTPError, SSLError, ConnectionError) as ex: if isinstance(ex, SSLError) and raise_ssl_errors: logger.debug("fetch_document: exception %s", ex) return None, None, ex # Try http then url = url.replace("https://", "http://") logger.debug("fetch_document: trying %s", url) try: response = requests.get(url, timeout=timeout, headers=headers) logger.debug("fetch_document: found document, code %s", response.status_code) response.raise_for_status() return response.text, response.status_code, None except RequestException as ex: logger.debug("fetch_document: exception %s", ex) return None, None, ex except RequestException as ex: logger.debug("fetch_document: exception %s", ex) return None, None, ex
def function[fetch_document, parameter[url, host, path, timeout, raise_ssl_errors, extra_headers]]: constant[Helper method to fetch remote document. Must be given either the ``url`` or ``host``. If ``url`` is given, only that will be tried without falling back to http from https. If ``host`` given, `path` will be added to it. Will fall back to http on non-success status code. :arg url: Full url to fetch, including protocol :arg host: Domain part only without path or protocol :arg path: Path without domain (defaults to "/") :arg timeout: Seconds to wait for response (defaults to 10) :arg raise_ssl_errors: Pass False if you want to try HTTP even for sites with SSL errors (default True) :returns: Tuple of document (str or None), status code (int or None) and error (an exception class instance or None) :raises ValueError: If neither url nor host are given as parameters ] if <ast.BoolOp object at 0x7da1b031e320> begin[:] <ast.Raise object at 0x7da1b031e560> call[name[logger].debug, parameter[constant[fetch_document: url=%s, host=%s, path=%s, timeout=%s, raise_ssl_errors=%s], name[url], name[host], name[path], name[timeout], name[raise_ssl_errors]]] variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da1b031dd80>], [<ast.Name object at 0x7da1b031d990>]] if name[extra_headers] begin[:] call[name[headers].update, parameter[name[extra_headers]]] if name[url] begin[:] call[name[logger].debug, parameter[constant[fetch_document: trying %s], name[url]]] <ast.Try object at 0x7da1b031d960> variable[host_string] assign[=] call[call[call[name[host].replace, parameter[constant[http://], constant[]]].replace, parameter[constant[https://], constant[]]].strip, parameter[constant[/]]] variable[path_string] assign[=] <ast.IfExp object at 0x7da1b031dc60> variable[url] assign[=] binary_operation[constant[https://%s%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b031c5b0>, <ast.Name object at 0x7da1b031d780>]]] call[name[logger].debug, parameter[constant[fetch_document: trying %s], name[url]]] <ast.Try object at 0x7da1b031c460>
keyword[def] identifier[fetch_document] ( identifier[url] = keyword[None] , identifier[host] = keyword[None] , identifier[path] = literal[string] , identifier[timeout] = literal[int] , identifier[raise_ssl_errors] = keyword[True] , identifier[extra_headers] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[url] keyword[and] keyword[not] identifier[host] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[logger] . identifier[debug] ( literal[string] , identifier[url] , identifier[host] , identifier[path] , identifier[timeout] , identifier[raise_ssl_errors] ) identifier[headers] ={ literal[string] : identifier[USER_AGENT] } keyword[if] identifier[extra_headers] : identifier[headers] . identifier[update] ( identifier[extra_headers] ) keyword[if] identifier[url] : identifier[logger] . identifier[debug] ( literal[string] , identifier[url] ) keyword[try] : identifier[response] = identifier[requests] . identifier[get] ( identifier[url] , identifier[timeout] = identifier[timeout] , identifier[headers] = identifier[headers] ) identifier[logger] . identifier[debug] ( literal[string] , identifier[response] . identifier[status_code] ) keyword[return] identifier[response] . identifier[text] , identifier[response] . identifier[status_code] , keyword[None] keyword[except] identifier[RequestException] keyword[as] identifier[ex] : identifier[logger] . identifier[debug] ( literal[string] , identifier[ex] ) keyword[return] keyword[None] , keyword[None] , identifier[ex] identifier[host_string] = identifier[host] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[strip] ( literal[string] ) identifier[path_string] = identifier[path] keyword[if] identifier[path] . identifier[startswith] ( literal[string] ) keyword[else] literal[string] % identifier[path] identifier[url] = literal[string] %( identifier[host_string] , identifier[path_string] ) identifier[logger] . identifier[debug] ( literal[string] , identifier[url] ) keyword[try] : identifier[response] = identifier[requests] . identifier[get] ( identifier[url] , identifier[timeout] = identifier[timeout] , identifier[headers] = identifier[headers] ) identifier[logger] . identifier[debug] ( literal[string] , identifier[response] . identifier[status_code] ) identifier[response] . identifier[raise_for_status] () keyword[return] identifier[response] . identifier[text] , identifier[response] . identifier[status_code] , keyword[None] keyword[except] ( identifier[HTTPError] , identifier[SSLError] , identifier[ConnectionError] ) keyword[as] identifier[ex] : keyword[if] identifier[isinstance] ( identifier[ex] , identifier[SSLError] ) keyword[and] identifier[raise_ssl_errors] : identifier[logger] . identifier[debug] ( literal[string] , identifier[ex] ) keyword[return] keyword[None] , keyword[None] , identifier[ex] identifier[url] = identifier[url] . identifier[replace] ( literal[string] , literal[string] ) identifier[logger] . identifier[debug] ( literal[string] , identifier[url] ) keyword[try] : identifier[response] = identifier[requests] . identifier[get] ( identifier[url] , identifier[timeout] = identifier[timeout] , identifier[headers] = identifier[headers] ) identifier[logger] . identifier[debug] ( literal[string] , identifier[response] . identifier[status_code] ) identifier[response] . identifier[raise_for_status] () keyword[return] identifier[response] . identifier[text] , identifier[response] . identifier[status_code] , keyword[None] keyword[except] identifier[RequestException] keyword[as] identifier[ex] : identifier[logger] . identifier[debug] ( literal[string] , identifier[ex] ) keyword[return] keyword[None] , keyword[None] , identifier[ex] keyword[except] identifier[RequestException] keyword[as] identifier[ex] : identifier[logger] . identifier[debug] ( literal[string] , identifier[ex] ) keyword[return] keyword[None] , keyword[None] , identifier[ex]
def fetch_document(url=None, host=None, path='/', timeout=10, raise_ssl_errors=True, extra_headers=None): """Helper method to fetch remote document. Must be given either the ``url`` or ``host``. If ``url`` is given, only that will be tried without falling back to http from https. If ``host`` given, `path` will be added to it. Will fall back to http on non-success status code. :arg url: Full url to fetch, including protocol :arg host: Domain part only without path or protocol :arg path: Path without domain (defaults to "/") :arg timeout: Seconds to wait for response (defaults to 10) :arg raise_ssl_errors: Pass False if you want to try HTTP even for sites with SSL errors (default True) :returns: Tuple of document (str or None), status code (int or None) and error (an exception class instance or None) :raises ValueError: If neither url nor host are given as parameters """ if not url and (not host): raise ValueError('Need url or host.') # depends on [control=['if'], data=[]] logger.debug('fetch_document: url=%s, host=%s, path=%s, timeout=%s, raise_ssl_errors=%s', url, host, path, timeout, raise_ssl_errors) headers = {'user-agent': USER_AGENT} if extra_headers: headers.update(extra_headers) # depends on [control=['if'], data=[]] if url: # Use url since it was given logger.debug('fetch_document: trying %s', url) try: response = requests.get(url, timeout=timeout, headers=headers) logger.debug('fetch_document: found document, code %s', response.status_code) return (response.text, response.status_code, None) # depends on [control=['try'], data=[]] except RequestException as ex: logger.debug('fetch_document: exception %s', ex) return (None, None, ex) # depends on [control=['except'], data=['ex']] # depends on [control=['if'], data=[]] # Build url with some little sanitizing host_string = host.replace('http://', '').replace('https://', '').strip('/') path_string = path if path.startswith('/') else '/%s' % path url = 'https://%s%s' % (host_string, path_string) logger.debug('fetch_document: trying %s', url) try: response = requests.get(url, timeout=timeout, headers=headers) logger.debug('fetch_document: found document, code %s', response.status_code) response.raise_for_status() return (response.text, response.status_code, None) # depends on [control=['try'], data=[]] except (HTTPError, SSLError, ConnectionError) as ex: if isinstance(ex, SSLError) and raise_ssl_errors: logger.debug('fetch_document: exception %s', ex) return (None, None, ex) # depends on [control=['if'], data=[]] # Try http then url = url.replace('https://', 'http://') logger.debug('fetch_document: trying %s', url) try: response = requests.get(url, timeout=timeout, headers=headers) logger.debug('fetch_document: found document, code %s', response.status_code) response.raise_for_status() return (response.text, response.status_code, None) # depends on [control=['try'], data=[]] except RequestException as ex: logger.debug('fetch_document: exception %s', ex) return (None, None, ex) # depends on [control=['except'], data=['ex']] # depends on [control=['except'], data=['ex']] except RequestException as ex: logger.debug('fetch_document: exception %s', ex) return (None, None, ex) # depends on [control=['except'], data=['ex']]
def frameworkMessage(self, driver, executorId, agentId, message): """ Invoked when an executor sends a message. """ # Take it out of base 64 encoding from Protobuf message = decode_data(message) log.debug('Got framework message from executor %s running on agent %s: %s', executorId.value, agentId.value, message) message = ast.literal_eval(message) assert isinstance(message, dict) # Handle the mandatory fields of a message nodeAddress = message.pop('address') executor = self._registerNode(nodeAddress, agentId.value) # Handle optional message fields for k, v in iteritems(message): if k == 'nodeInfo': assert isinstance(v, dict) resources = [taskData for taskData in itervalues(self.runningJobMap) if taskData.executorID == executorId.value] requestedCores = sum(taskData.cores for taskData in resources) requestedMemory = sum(taskData.memory for taskData in resources) executor.nodeInfo = NodeInfo(requestedCores=requestedCores, requestedMemory=requestedMemory, **v) self.executors[nodeAddress] = executor else: raise RuntimeError("Unknown message field '%s'." % k)
def function[frameworkMessage, parameter[self, driver, executorId, agentId, message]]: constant[ Invoked when an executor sends a message. ] variable[message] assign[=] call[name[decode_data], parameter[name[message]]] call[name[log].debug, parameter[constant[Got framework message from executor %s running on agent %s: %s], name[executorId].value, name[agentId].value, name[message]]] variable[message] assign[=] call[name[ast].literal_eval, parameter[name[message]]] assert[call[name[isinstance], parameter[name[message], name[dict]]]] variable[nodeAddress] assign[=] call[name[message].pop, parameter[constant[address]]] variable[executor] assign[=] call[name[self]._registerNode, parameter[name[nodeAddress], name[agentId].value]] for taget[tuple[[<ast.Name object at 0x7da1b1eef3a0>, <ast.Name object at 0x7da1b1eef9a0>]]] in starred[call[name[iteritems], parameter[name[message]]]] begin[:] if compare[name[k] equal[==] constant[nodeInfo]] begin[:] assert[call[name[isinstance], parameter[name[v], name[dict]]]] variable[resources] assign[=] <ast.ListComp object at 0x7da1b1eede70> variable[requestedCores] assign[=] call[name[sum], parameter[<ast.GeneratorExp object at 0x7da1b1eed570>]] variable[requestedMemory] assign[=] call[name[sum], parameter[<ast.GeneratorExp object at 0x7da18f58c670>]] name[executor].nodeInfo assign[=] call[name[NodeInfo], parameter[]] call[name[self].executors][name[nodeAddress]] assign[=] name[executor]
keyword[def] identifier[frameworkMessage] ( identifier[self] , identifier[driver] , identifier[executorId] , identifier[agentId] , identifier[message] ): literal[string] identifier[message] = identifier[decode_data] ( identifier[message] ) identifier[log] . identifier[debug] ( literal[string] , identifier[executorId] . identifier[value] , identifier[agentId] . identifier[value] , identifier[message] ) identifier[message] = identifier[ast] . identifier[literal_eval] ( identifier[message] ) keyword[assert] identifier[isinstance] ( identifier[message] , identifier[dict] ) identifier[nodeAddress] = identifier[message] . identifier[pop] ( literal[string] ) identifier[executor] = identifier[self] . identifier[_registerNode] ( identifier[nodeAddress] , identifier[agentId] . identifier[value] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[iteritems] ( identifier[message] ): keyword[if] identifier[k] == literal[string] : keyword[assert] identifier[isinstance] ( identifier[v] , identifier[dict] ) identifier[resources] =[ identifier[taskData] keyword[for] identifier[taskData] keyword[in] identifier[itervalues] ( identifier[self] . identifier[runningJobMap] ) keyword[if] identifier[taskData] . identifier[executorID] == identifier[executorId] . identifier[value] ] identifier[requestedCores] = identifier[sum] ( identifier[taskData] . identifier[cores] keyword[for] identifier[taskData] keyword[in] identifier[resources] ) identifier[requestedMemory] = identifier[sum] ( identifier[taskData] . identifier[memory] keyword[for] identifier[taskData] keyword[in] identifier[resources] ) identifier[executor] . identifier[nodeInfo] = identifier[NodeInfo] ( identifier[requestedCores] = identifier[requestedCores] , identifier[requestedMemory] = identifier[requestedMemory] ,** identifier[v] ) identifier[self] . identifier[executors] [ identifier[nodeAddress] ]= identifier[executor] keyword[else] : keyword[raise] identifier[RuntimeError] ( literal[string] % identifier[k] )
def frameworkMessage(self, driver, executorId, agentId, message): """ Invoked when an executor sends a message. """ # Take it out of base 64 encoding from Protobuf message = decode_data(message) log.debug('Got framework message from executor %s running on agent %s: %s', executorId.value, agentId.value, message) message = ast.literal_eval(message) assert isinstance(message, dict) # Handle the mandatory fields of a message nodeAddress = message.pop('address') executor = self._registerNode(nodeAddress, agentId.value) # Handle optional message fields for (k, v) in iteritems(message): if k == 'nodeInfo': assert isinstance(v, dict) resources = [taskData for taskData in itervalues(self.runningJobMap) if taskData.executorID == executorId.value] requestedCores = sum((taskData.cores for taskData in resources)) requestedMemory = sum((taskData.memory for taskData in resources)) executor.nodeInfo = NodeInfo(requestedCores=requestedCores, requestedMemory=requestedMemory, **v) self.executors[nodeAddress] = executor # depends on [control=['if'], data=[]] else: raise RuntimeError("Unknown message field '%s'." % k) # depends on [control=['for'], data=[]]
def pt_fingerprint(query): """ Takes a query (in a string) and returns its 'fingerprint' """ if not have_program('pt-fingerprint'): # pragma: no cover raise OSError("pt-fingerprint doesn't appear to be installed") thread = PTFingerprintThread.get_thread() thread.in_queue.put(query) return thread.out_queue.get()
def function[pt_fingerprint, parameter[query]]: constant[ Takes a query (in a string) and returns its 'fingerprint' ] if <ast.UnaryOp object at 0x7da1b061abf0> begin[:] <ast.Raise object at 0x7da1b0618940> variable[thread] assign[=] call[name[PTFingerprintThread].get_thread, parameter[]] call[name[thread].in_queue.put, parameter[name[query]]] return[call[name[thread].out_queue.get, parameter[]]]
keyword[def] identifier[pt_fingerprint] ( identifier[query] ): literal[string] keyword[if] keyword[not] identifier[have_program] ( literal[string] ): keyword[raise] identifier[OSError] ( literal[string] ) identifier[thread] = identifier[PTFingerprintThread] . identifier[get_thread] () identifier[thread] . identifier[in_queue] . identifier[put] ( identifier[query] ) keyword[return] identifier[thread] . identifier[out_queue] . identifier[get] ()
def pt_fingerprint(query): """ Takes a query (in a string) and returns its 'fingerprint' """ if not have_program('pt-fingerprint'): # pragma: no cover raise OSError("pt-fingerprint doesn't appear to be installed") # depends on [control=['if'], data=[]] thread = PTFingerprintThread.get_thread() thread.in_queue.put(query) return thread.out_queue.get()
def create_store_credit_transaction(cls, store_credit_transaction, **kwargs): """Create StoreCreditTransaction Create a new StoreCreditTransaction This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.create_store_credit_transaction(store_credit_transaction, async=True) >>> result = thread.get() :param async bool :param StoreCreditTransaction store_credit_transaction: Attributes of storeCreditTransaction to create (required) :return: StoreCreditTransaction If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async'): return cls._create_store_credit_transaction_with_http_info(store_credit_transaction, **kwargs) else: (data) = cls._create_store_credit_transaction_with_http_info(store_credit_transaction, **kwargs) return data
def function[create_store_credit_transaction, parameter[cls, store_credit_transaction]]: constant[Create StoreCreditTransaction Create a new StoreCreditTransaction This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.create_store_credit_transaction(store_credit_transaction, async=True) >>> result = thread.get() :param async bool :param StoreCreditTransaction store_credit_transaction: Attributes of storeCreditTransaction to create (required) :return: StoreCreditTransaction If the method is called asynchronously, returns the request thread. ] call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True] if call[name[kwargs].get, parameter[constant[async]]] begin[:] return[call[name[cls]._create_store_credit_transaction_with_http_info, parameter[name[store_credit_transaction]]]]
keyword[def] identifier[create_store_credit_transaction] ( identifier[cls] , identifier[store_credit_transaction] ,** identifier[kwargs] ): literal[string] identifier[kwargs] [ literal[string] ]= keyword[True] keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ): keyword[return] identifier[cls] . identifier[_create_store_credit_transaction_with_http_info] ( identifier[store_credit_transaction] ,** identifier[kwargs] ) keyword[else] : ( identifier[data] )= identifier[cls] . identifier[_create_store_credit_transaction_with_http_info] ( identifier[store_credit_transaction] ,** identifier[kwargs] ) keyword[return] identifier[data]
def create_store_credit_transaction(cls, store_credit_transaction, **kwargs): """Create StoreCreditTransaction Create a new StoreCreditTransaction This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.create_store_credit_transaction(store_credit_transaction, async=True) >>> result = thread.get() :param async bool :param StoreCreditTransaction store_credit_transaction: Attributes of storeCreditTransaction to create (required) :return: StoreCreditTransaction If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async'): return cls._create_store_credit_transaction_with_http_info(store_credit_transaction, **kwargs) # depends on [control=['if'], data=[]] else: data = cls._create_store_credit_transaction_with_http_info(store_credit_transaction, **kwargs) return data
def dim(self, dim): """Adjusts contrast to dim the display if dim is True, otherwise sets the contrast to normal brightness if dim is False. """ # Assume dim display. contrast = 0 # Adjust contrast based on VCC if not dimming. if not dim: if self._vccstate == SSD1306_EXTERNALVCC: contrast = 0x9F else: contrast = 0xCF
def function[dim, parameter[self, dim]]: constant[Adjusts contrast to dim the display if dim is True, otherwise sets the contrast to normal brightness if dim is False. ] variable[contrast] assign[=] constant[0] if <ast.UnaryOp object at 0x7da1b1d36bc0> begin[:] if compare[name[self]._vccstate equal[==] name[SSD1306_EXTERNALVCC]] begin[:] variable[contrast] assign[=] constant[159]
keyword[def] identifier[dim] ( identifier[self] , identifier[dim] ): literal[string] identifier[contrast] = literal[int] keyword[if] keyword[not] identifier[dim] : keyword[if] identifier[self] . identifier[_vccstate] == identifier[SSD1306_EXTERNALVCC] : identifier[contrast] = literal[int] keyword[else] : identifier[contrast] = literal[int]
def dim(self, dim): """Adjusts contrast to dim the display if dim is True, otherwise sets the contrast to normal brightness if dim is False. """ # Assume dim display. contrast = 0 # Adjust contrast based on VCC if not dimming. if not dim: if self._vccstate == SSD1306_EXTERNALVCC: contrast = 159 # depends on [control=['if'], data=[]] else: contrast = 207 # depends on [control=['if'], data=[]]
def int_to_bytes(int_, width = None): """ .. _int_to_bytes: Converts the ``int`` ``int_`` to a ``bytes`` object. ``len(result) == width``. If ``width`` is None, a number of bytes that is able to hold the number is choosen, depending on ``int_.bit_length()``. See also: bytes_to_int_ """ if(width == None): width = int_.bit_length() byts = math.ceil(width / 8) return bytes([ (int_ >> (shift * 8)) & 0xff for shift in range(byts)])
def function[int_to_bytes, parameter[int_, width]]: constant[ .. _int_to_bytes: Converts the ``int`` ``int_`` to a ``bytes`` object. ``len(result) == width``. If ``width`` is None, a number of bytes that is able to hold the number is choosen, depending on ``int_.bit_length()``. See also: bytes_to_int_ ] if compare[name[width] equal[==] constant[None]] begin[:] variable[width] assign[=] call[name[int_].bit_length, parameter[]] variable[byts] assign[=] call[name[math].ceil, parameter[binary_operation[name[width] / constant[8]]]] return[call[name[bytes], parameter[<ast.ListComp object at 0x7da1b1418370>]]]
keyword[def] identifier[int_to_bytes] ( identifier[int_] , identifier[width] = keyword[None] ): literal[string] keyword[if] ( identifier[width] == keyword[None] ): identifier[width] = identifier[int_] . identifier[bit_length] () identifier[byts] = identifier[math] . identifier[ceil] ( identifier[width] / literal[int] ) keyword[return] identifier[bytes] ([( identifier[int_] >>( identifier[shift] * literal[int] ))& literal[int] keyword[for] identifier[shift] keyword[in] identifier[range] ( identifier[byts] )])
def int_to_bytes(int_, width=None): """ .. _int_to_bytes: Converts the ``int`` ``int_`` to a ``bytes`` object. ``len(result) == width``. If ``width`` is None, a number of bytes that is able to hold the number is choosen, depending on ``int_.bit_length()``. See also: bytes_to_int_ """ if width == None: width = int_.bit_length() # depends on [control=['if'], data=['width']] byts = math.ceil(width / 8) return bytes([int_ >> shift * 8 & 255 for shift in range(byts)])
def create_pool(dsn=None, *, min_size=10, max_size=10, max_queries=50000, max_inactive_connection_lifetime=300.0, setup=None, init=None, loop=None, connection_class=connection.Connection, **connect_kwargs): r"""Create a connection pool. Can be used either with an ``async with`` block: .. code-block:: python async with asyncpg.create_pool(user='postgres', command_timeout=60) as pool: async with pool.acquire() as con: await con.fetch('SELECT 1') Or directly with ``await``: .. code-block:: python pool = await asyncpg.create_pool(user='postgres', command_timeout=60) con = await pool.acquire() try: await con.fetch('SELECT 1') finally: await pool.release(con) .. warning:: Prepared statements and cursors returned by :meth:`Connection.prepare() <connection.Connection.prepare>` and :meth:`Connection.cursor() <connection.Connection.cursor>` become invalid once the connection is released. Likewise, all notification and log listeners are removed, and ``asyncpg`` will issue a warning if there are any listener callbacks registered on a connection that is being released to the pool. :param str dsn: Connection arguments specified using as a single string in the following format: ``postgres://user:pass@host:port/database?option=value``. :param \*\*connect_kwargs: Keyword arguments for the :func:`~asyncpg.connection.connect` function. :param Connection connection_class: The class to use for connections. Must be a subclass of :class:`~asyncpg.connection.Connection`. :param int min_size: Number of connection the pool will be initialized with. :param int max_size: Max number of connections in the pool. :param int max_queries: Number of queries after a connection is closed and replaced with a new connection. :param float max_inactive_connection_lifetime: Number of seconds after which inactive connections in the pool will be closed. Pass ``0`` to disable this mechanism. :param coroutine setup: A coroutine to prepare a connection right before it is returned from :meth:`Pool.acquire() <pool.Pool.acquire>`. An example use case would be to automatically set up notifications listeners for all connections of a pool. :param coroutine init: A coroutine to initialize a connection when it is created. An example use case would be to setup type codecs with :meth:`Connection.set_builtin_type_codec() <\ asyncpg.connection.Connection.set_builtin_type_codec>` or :meth:`Connection.set_type_codec() <\ asyncpg.connection.Connection.set_type_codec>`. :param loop: An asyncio event loop instance. If ``None``, the default event loop will be used. :return: An instance of :class:`~asyncpg.pool.Pool`. .. versionchanged:: 0.10.0 An :exc:`~asyncpg.exceptions.InterfaceError` will be raised on any attempted operation on a released connection. .. versionchanged:: 0.13.0 An :exc:`~asyncpg.exceptions.InterfaceError` will be raised on any attempted operation on a prepared statement or a cursor created on a connection that has been released to the pool. .. versionchanged:: 0.13.0 An :exc:`~asyncpg.exceptions.InterfaceWarning` will be produced if there are any active listeners (added via :meth:`Connection.add_listener() <connection.Connection.add_listener>` or :meth:`Connection.add_log_listener() <connection.Connection.add_log_listener>`) present on the connection at the moment of its release to the pool. """ return Pool( dsn, connection_class=connection_class, min_size=min_size, max_size=max_size, max_queries=max_queries, loop=loop, setup=setup, init=init, max_inactive_connection_lifetime=max_inactive_connection_lifetime, **connect_kwargs)
def function[create_pool, parameter[dsn]]: constant[Create a connection pool. Can be used either with an ``async with`` block: .. code-block:: python async with asyncpg.create_pool(user='postgres', command_timeout=60) as pool: async with pool.acquire() as con: await con.fetch('SELECT 1') Or directly with ``await``: .. code-block:: python pool = await asyncpg.create_pool(user='postgres', command_timeout=60) con = await pool.acquire() try: await con.fetch('SELECT 1') finally: await pool.release(con) .. warning:: Prepared statements and cursors returned by :meth:`Connection.prepare() <connection.Connection.prepare>` and :meth:`Connection.cursor() <connection.Connection.cursor>` become invalid once the connection is released. Likewise, all notification and log listeners are removed, and ``asyncpg`` will issue a warning if there are any listener callbacks registered on a connection that is being released to the pool. :param str dsn: Connection arguments specified using as a single string in the following format: ``postgres://user:pass@host:port/database?option=value``. :param \*\*connect_kwargs: Keyword arguments for the :func:`~asyncpg.connection.connect` function. :param Connection connection_class: The class to use for connections. Must be a subclass of :class:`~asyncpg.connection.Connection`. :param int min_size: Number of connection the pool will be initialized with. :param int max_size: Max number of connections in the pool. :param int max_queries: Number of queries after a connection is closed and replaced with a new connection. :param float max_inactive_connection_lifetime: Number of seconds after which inactive connections in the pool will be closed. Pass ``0`` to disable this mechanism. :param coroutine setup: A coroutine to prepare a connection right before it is returned from :meth:`Pool.acquire() <pool.Pool.acquire>`. An example use case would be to automatically set up notifications listeners for all connections of a pool. :param coroutine init: A coroutine to initialize a connection when it is created. An example use case would be to setup type codecs with :meth:`Connection.set_builtin_type_codec() <\ asyncpg.connection.Connection.set_builtin_type_codec>` or :meth:`Connection.set_type_codec() <\ asyncpg.connection.Connection.set_type_codec>`. :param loop: An asyncio event loop instance. If ``None``, the default event loop will be used. :return: An instance of :class:`~asyncpg.pool.Pool`. .. versionchanged:: 0.10.0 An :exc:`~asyncpg.exceptions.InterfaceError` will be raised on any attempted operation on a released connection. .. versionchanged:: 0.13.0 An :exc:`~asyncpg.exceptions.InterfaceError` will be raised on any attempted operation on a prepared statement or a cursor created on a connection that has been released to the pool. .. versionchanged:: 0.13.0 An :exc:`~asyncpg.exceptions.InterfaceWarning` will be produced if there are any active listeners (added via :meth:`Connection.add_listener() <connection.Connection.add_listener>` or :meth:`Connection.add_log_listener() <connection.Connection.add_log_listener>`) present on the connection at the moment of its release to the pool. ] return[call[name[Pool], parameter[name[dsn]]]]
keyword[def] identifier[create_pool] ( identifier[dsn] = keyword[None] ,*, identifier[min_size] = literal[int] , identifier[max_size] = literal[int] , identifier[max_queries] = literal[int] , identifier[max_inactive_connection_lifetime] = literal[int] , identifier[setup] = keyword[None] , identifier[init] = keyword[None] , identifier[loop] = keyword[None] , identifier[connection_class] = identifier[connection] . identifier[Connection] , ** identifier[connect_kwargs] ): literal[string] keyword[return] identifier[Pool] ( identifier[dsn] , identifier[connection_class] = identifier[connection_class] , identifier[min_size] = identifier[min_size] , identifier[max_size] = identifier[max_size] , identifier[max_queries] = identifier[max_queries] , identifier[loop] = identifier[loop] , identifier[setup] = identifier[setup] , identifier[init] = identifier[init] , identifier[max_inactive_connection_lifetime] = identifier[max_inactive_connection_lifetime] , ** identifier[connect_kwargs] )
def create_pool(dsn=None, *, min_size=10, max_size=10, max_queries=50000, max_inactive_connection_lifetime=300.0, setup=None, init=None, loop=None, connection_class=connection.Connection, **connect_kwargs): """Create a connection pool. Can be used either with an ``async with`` block: .. code-block:: python async with asyncpg.create_pool(user='postgres', command_timeout=60) as pool: async with pool.acquire() as con: await con.fetch('SELECT 1') Or directly with ``await``: .. code-block:: python pool = await asyncpg.create_pool(user='postgres', command_timeout=60) con = await pool.acquire() try: await con.fetch('SELECT 1') finally: await pool.release(con) .. warning:: Prepared statements and cursors returned by :meth:`Connection.prepare() <connection.Connection.prepare>` and :meth:`Connection.cursor() <connection.Connection.cursor>` become invalid once the connection is released. Likewise, all notification and log listeners are removed, and ``asyncpg`` will issue a warning if there are any listener callbacks registered on a connection that is being released to the pool. :param str dsn: Connection arguments specified using as a single string in the following format: ``postgres://user:pass@host:port/database?option=value``. :param \\*\\*connect_kwargs: Keyword arguments for the :func:`~asyncpg.connection.connect` function. :param Connection connection_class: The class to use for connections. Must be a subclass of :class:`~asyncpg.connection.Connection`. :param int min_size: Number of connection the pool will be initialized with. :param int max_size: Max number of connections in the pool. :param int max_queries: Number of queries after a connection is closed and replaced with a new connection. :param float max_inactive_connection_lifetime: Number of seconds after which inactive connections in the pool will be closed. Pass ``0`` to disable this mechanism. :param coroutine setup: A coroutine to prepare a connection right before it is returned from :meth:`Pool.acquire() <pool.Pool.acquire>`. An example use case would be to automatically set up notifications listeners for all connections of a pool. :param coroutine init: A coroutine to initialize a connection when it is created. An example use case would be to setup type codecs with :meth:`Connection.set_builtin_type_codec() <\\ asyncpg.connection.Connection.set_builtin_type_codec>` or :meth:`Connection.set_type_codec() <\\ asyncpg.connection.Connection.set_type_codec>`. :param loop: An asyncio event loop instance. If ``None``, the default event loop will be used. :return: An instance of :class:`~asyncpg.pool.Pool`. .. versionchanged:: 0.10.0 An :exc:`~asyncpg.exceptions.InterfaceError` will be raised on any attempted operation on a released connection. .. versionchanged:: 0.13.0 An :exc:`~asyncpg.exceptions.InterfaceError` will be raised on any attempted operation on a prepared statement or a cursor created on a connection that has been released to the pool. .. versionchanged:: 0.13.0 An :exc:`~asyncpg.exceptions.InterfaceWarning` will be produced if there are any active listeners (added via :meth:`Connection.add_listener() <connection.Connection.add_listener>` or :meth:`Connection.add_log_listener() <connection.Connection.add_log_listener>`) present on the connection at the moment of its release to the pool. """ return Pool(dsn, connection_class=connection_class, min_size=min_size, max_size=max_size, max_queries=max_queries, loop=loop, setup=setup, init=init, max_inactive_connection_lifetime=max_inactive_connection_lifetime, **connect_kwargs)
def substring_search(word, collection): """Find all matches in the `collection` for the specified `word`. If `word` is empty, returns all items in `collection`. :type word: str :param word: The substring to search for. :type collection: collection, usually a list :param collection: A collection of words to match. :rtype: list of strings :return: A sorted list of matching words from collection. """ return [item for item in sorted(collection) if item.startswith(word)]
def function[substring_search, parameter[word, collection]]: constant[Find all matches in the `collection` for the specified `word`. If `word` is empty, returns all items in `collection`. :type word: str :param word: The substring to search for. :type collection: collection, usually a list :param collection: A collection of words to match. :rtype: list of strings :return: A sorted list of matching words from collection. ] return[<ast.ListComp object at 0x7da18c4cf550>]
keyword[def] identifier[substring_search] ( identifier[word] , identifier[collection] ): literal[string] keyword[return] [ identifier[item] keyword[for] identifier[item] keyword[in] identifier[sorted] ( identifier[collection] ) keyword[if] identifier[item] . identifier[startswith] ( identifier[word] )]
def substring_search(word, collection): """Find all matches in the `collection` for the specified `word`. If `word` is empty, returns all items in `collection`. :type word: str :param word: The substring to search for. :type collection: collection, usually a list :param collection: A collection of words to match. :rtype: list of strings :return: A sorted list of matching words from collection. """ return [item for item in sorted(collection) if item.startswith(word)]
def _makeLocationElement(self, locationObject, name=None): """ Convert Location object to an locationElement.""" locElement = ET.Element("location") if name is not None: locElement.attrib['name'] = name for dimensionName, dimensionValue in locationObject.items(): dimElement = ET.Element('dimension') dimElement.attrib['name'] = dimensionName if type(dimensionValue)==tuple: dimElement.attrib['xvalue'] = "%f"%dimensionValue[0] dimElement.attrib['yvalue'] = "%f"%dimensionValue[1] else: dimElement.attrib['xvalue'] = "%f"%dimensionValue locElement.append(dimElement) return locElement
def function[_makeLocationElement, parameter[self, locationObject, name]]: constant[ Convert Location object to an locationElement.] variable[locElement] assign[=] call[name[ET].Element, parameter[constant[location]]] if compare[name[name] is_not constant[None]] begin[:] call[name[locElement].attrib][constant[name]] assign[=] name[name] for taget[tuple[[<ast.Name object at 0x7da204623ee0>, <ast.Name object at 0x7da2046219c0>]]] in starred[call[name[locationObject].items, parameter[]]] begin[:] variable[dimElement] assign[=] call[name[ET].Element, parameter[constant[dimension]]] call[name[dimElement].attrib][constant[name]] assign[=] name[dimensionName] if compare[call[name[type], parameter[name[dimensionValue]]] equal[==] name[tuple]] begin[:] call[name[dimElement].attrib][constant[xvalue]] assign[=] binary_operation[constant[%f] <ast.Mod object at 0x7da2590d6920> call[name[dimensionValue]][constant[0]]] call[name[dimElement].attrib][constant[yvalue]] assign[=] binary_operation[constant[%f] <ast.Mod object at 0x7da2590d6920> call[name[dimensionValue]][constant[1]]] call[name[locElement].append, parameter[name[dimElement]]] return[name[locElement]]
keyword[def] identifier[_makeLocationElement] ( identifier[self] , identifier[locationObject] , identifier[name] = keyword[None] ): literal[string] identifier[locElement] = identifier[ET] . identifier[Element] ( literal[string] ) keyword[if] identifier[name] keyword[is] keyword[not] keyword[None] : identifier[locElement] . identifier[attrib] [ literal[string] ]= identifier[name] keyword[for] identifier[dimensionName] , identifier[dimensionValue] keyword[in] identifier[locationObject] . identifier[items] (): identifier[dimElement] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[dimElement] . identifier[attrib] [ literal[string] ]= identifier[dimensionName] keyword[if] identifier[type] ( identifier[dimensionValue] )== identifier[tuple] : identifier[dimElement] . identifier[attrib] [ literal[string] ]= literal[string] % identifier[dimensionValue] [ literal[int] ] identifier[dimElement] . identifier[attrib] [ literal[string] ]= literal[string] % identifier[dimensionValue] [ literal[int] ] keyword[else] : identifier[dimElement] . identifier[attrib] [ literal[string] ]= literal[string] % identifier[dimensionValue] identifier[locElement] . identifier[append] ( identifier[dimElement] ) keyword[return] identifier[locElement]
def _makeLocationElement(self, locationObject, name=None): """ Convert Location object to an locationElement.""" locElement = ET.Element('location') if name is not None: locElement.attrib['name'] = name # depends on [control=['if'], data=['name']] for (dimensionName, dimensionValue) in locationObject.items(): dimElement = ET.Element('dimension') dimElement.attrib['name'] = dimensionName if type(dimensionValue) == tuple: dimElement.attrib['xvalue'] = '%f' % dimensionValue[0] dimElement.attrib['yvalue'] = '%f' % dimensionValue[1] # depends on [control=['if'], data=[]] else: dimElement.attrib['xvalue'] = '%f' % dimensionValue locElement.append(dimElement) # depends on [control=['for'], data=[]] return locElement
def makePlot(gmag, pdf=False, png=False, rvs=False): """ Make a plot of a Mv vs (V-I) colour magnitude diagram containing lines of constant distance for stars at G=20. This will give an idea of the reach of Gaia. Parameters ---------- args - command line arguments """ vmini = np.linspace(-0.5,4.0,100) if (rvs): gminv = -vminGrvsFromVmini(vmini) else: gminv = gminvFromVmini(vmini) mvlimit100pc = gmag-5.0*np.log10(100.0)+5.0-gminv mvlimit1kpc = gmag-5.0*np.log10(1000.0)+5.0-gminv mvlimit10kpc = gmag-5.0*np.log10(10000.0)+5.0-gminv fig=plt.figure(figsize=(8,8)) plt.plot(vmini,mvlimit100pc,'b') plt.text(vmini[50]-0.4,mvlimit100pc[50],"$d=100$ pc", horizontalalignment='right', va='top') plt.plot(vmini,mvlimit1kpc,'r') plt.text(vmini[50]-0.4,mvlimit1kpc[50],"$d=1000$ pc", horizontalalignment='right', va='top') plt.plot(vmini,mvlimit10kpc,'g') plt.text(vmini[50]-0.4,mvlimit10kpc[50],"$d=10000$ pc", horizontalalignment='right', va='top') ax=plt.gca() ax.set_ylim(ax.get_ylim()[::-1]) plt.xlabel("$(V-I)$") plt.ylabel("$M_V$") if (rvs): plt.title("Distance limits for $G_\\mathrm{RVS}"+"={0}$".format(gmag)) else: plt.title("Distance limits for $G={0}$".format(gmag)) if (args['pdfOutput']): plt.savefig('GaiaSurveyLimits.pdf') elif (args['pngOutput']): plt.savefig('GaiaSurveyLimits.png') else: plt.show()
def function[makePlot, parameter[gmag, pdf, png, rvs]]: constant[ Make a plot of a Mv vs (V-I) colour magnitude diagram containing lines of constant distance for stars at G=20. This will give an idea of the reach of Gaia. Parameters ---------- args - command line arguments ] variable[vmini] assign[=] call[name[np].linspace, parameter[<ast.UnaryOp object at 0x7da1b26ae5f0>, constant[4.0], constant[100]]] if name[rvs] begin[:] variable[gminv] assign[=] <ast.UnaryOp object at 0x7da1b26ae140> variable[mvlimit100pc] assign[=] binary_operation[binary_operation[binary_operation[name[gmag] - binary_operation[constant[5.0] * call[name[np].log10, parameter[constant[100.0]]]]] + constant[5.0]] - name[gminv]] variable[mvlimit1kpc] assign[=] binary_operation[binary_operation[binary_operation[name[gmag] - binary_operation[constant[5.0] * call[name[np].log10, parameter[constant[1000.0]]]]] + constant[5.0]] - name[gminv]] variable[mvlimit10kpc] assign[=] binary_operation[binary_operation[binary_operation[name[gmag] - binary_operation[constant[5.0] * call[name[np].log10, parameter[constant[10000.0]]]]] + constant[5.0]] - name[gminv]] variable[fig] assign[=] call[name[plt].figure, parameter[]] call[name[plt].plot, parameter[name[vmini], name[mvlimit100pc], constant[b]]] call[name[plt].text, parameter[binary_operation[call[name[vmini]][constant[50]] - constant[0.4]], call[name[mvlimit100pc]][constant[50]], constant[$d=100$ pc]]] call[name[plt].plot, parameter[name[vmini], name[mvlimit1kpc], constant[r]]] call[name[plt].text, parameter[binary_operation[call[name[vmini]][constant[50]] - constant[0.4]], call[name[mvlimit1kpc]][constant[50]], constant[$d=1000$ pc]]] call[name[plt].plot, parameter[name[vmini], name[mvlimit10kpc], constant[g]]] call[name[plt].text, parameter[binary_operation[call[name[vmini]][constant[50]] - constant[0.4]], call[name[mvlimit10kpc]][constant[50]], constant[$d=10000$ pc]]] variable[ax] assign[=] call[name[plt].gca, parameter[]] call[name[ax].set_ylim, parameter[call[call[name[ax].get_ylim, parameter[]]][<ast.Slice object at 0x7da1b26afa00>]]] call[name[plt].xlabel, parameter[constant[$(V-I)$]]] call[name[plt].ylabel, parameter[constant[$M_V$]]] if name[rvs] begin[:] call[name[plt].title, parameter[binary_operation[constant[Distance limits for $G_\mathrm{RVS}] + call[constant[={0}$].format, parameter[name[gmag]]]]]] if call[name[args]][constant[pdfOutput]] begin[:] call[name[plt].savefig, parameter[constant[GaiaSurveyLimits.pdf]]]
keyword[def] identifier[makePlot] ( identifier[gmag] , identifier[pdf] = keyword[False] , identifier[png] = keyword[False] , identifier[rvs] = keyword[False] ): literal[string] identifier[vmini] = identifier[np] . identifier[linspace] (- literal[int] , literal[int] , literal[int] ) keyword[if] ( identifier[rvs] ): identifier[gminv] =- identifier[vminGrvsFromVmini] ( identifier[vmini] ) keyword[else] : identifier[gminv] = identifier[gminvFromVmini] ( identifier[vmini] ) identifier[mvlimit100pc] = identifier[gmag] - literal[int] * identifier[np] . identifier[log10] ( literal[int] )+ literal[int] - identifier[gminv] identifier[mvlimit1kpc] = identifier[gmag] - literal[int] * identifier[np] . identifier[log10] ( literal[int] )+ literal[int] - identifier[gminv] identifier[mvlimit10kpc] = identifier[gmag] - literal[int] * identifier[np] . identifier[log10] ( literal[int] )+ literal[int] - identifier[gminv] identifier[fig] = identifier[plt] . identifier[figure] ( identifier[figsize] =( literal[int] , literal[int] )) identifier[plt] . identifier[plot] ( identifier[vmini] , identifier[mvlimit100pc] , literal[string] ) identifier[plt] . identifier[text] ( identifier[vmini] [ literal[int] ]- literal[int] , identifier[mvlimit100pc] [ literal[int] ], literal[string] , identifier[horizontalalignment] = literal[string] , identifier[va] = literal[string] ) identifier[plt] . identifier[plot] ( identifier[vmini] , identifier[mvlimit1kpc] , literal[string] ) identifier[plt] . identifier[text] ( identifier[vmini] [ literal[int] ]- literal[int] , identifier[mvlimit1kpc] [ literal[int] ], literal[string] , identifier[horizontalalignment] = literal[string] , identifier[va] = literal[string] ) identifier[plt] . identifier[plot] ( identifier[vmini] , identifier[mvlimit10kpc] , literal[string] ) identifier[plt] . identifier[text] ( identifier[vmini] [ literal[int] ]- literal[int] , identifier[mvlimit10kpc] [ literal[int] ], literal[string] , identifier[horizontalalignment] = literal[string] , identifier[va] = literal[string] ) identifier[ax] = identifier[plt] . identifier[gca] () identifier[ax] . identifier[set_ylim] ( identifier[ax] . identifier[get_ylim] ()[::- literal[int] ]) identifier[plt] . identifier[xlabel] ( literal[string] ) identifier[plt] . identifier[ylabel] ( literal[string] ) keyword[if] ( identifier[rvs] ): identifier[plt] . identifier[title] ( literal[string] + literal[string] . identifier[format] ( identifier[gmag] )) keyword[else] : identifier[plt] . identifier[title] ( literal[string] . identifier[format] ( identifier[gmag] )) keyword[if] ( identifier[args] [ literal[string] ]): identifier[plt] . identifier[savefig] ( literal[string] ) keyword[elif] ( identifier[args] [ literal[string] ]): identifier[plt] . identifier[savefig] ( literal[string] ) keyword[else] : identifier[plt] . identifier[show] ()
def makePlot(gmag, pdf=False, png=False, rvs=False): """ Make a plot of a Mv vs (V-I) colour magnitude diagram containing lines of constant distance for stars at G=20. This will give an idea of the reach of Gaia. Parameters ---------- args - command line arguments """ vmini = np.linspace(-0.5, 4.0, 100) if rvs: gminv = -vminGrvsFromVmini(vmini) # depends on [control=['if'], data=[]] else: gminv = gminvFromVmini(vmini) mvlimit100pc = gmag - 5.0 * np.log10(100.0) + 5.0 - gminv mvlimit1kpc = gmag - 5.0 * np.log10(1000.0) + 5.0 - gminv mvlimit10kpc = gmag - 5.0 * np.log10(10000.0) + 5.0 - gminv fig = plt.figure(figsize=(8, 8)) plt.plot(vmini, mvlimit100pc, 'b') plt.text(vmini[50] - 0.4, mvlimit100pc[50], '$d=100$ pc', horizontalalignment='right', va='top') plt.plot(vmini, mvlimit1kpc, 'r') plt.text(vmini[50] - 0.4, mvlimit1kpc[50], '$d=1000$ pc', horizontalalignment='right', va='top') plt.plot(vmini, mvlimit10kpc, 'g') plt.text(vmini[50] - 0.4, mvlimit10kpc[50], '$d=10000$ pc', horizontalalignment='right', va='top') ax = plt.gca() ax.set_ylim(ax.get_ylim()[::-1]) plt.xlabel('$(V-I)$') plt.ylabel('$M_V$') if rvs: plt.title('Distance limits for $G_\\mathrm{RVS}' + '={0}$'.format(gmag)) # depends on [control=['if'], data=[]] else: plt.title('Distance limits for $G={0}$'.format(gmag)) if args['pdfOutput']: plt.savefig('GaiaSurveyLimits.pdf') # depends on [control=['if'], data=[]] elif args['pngOutput']: plt.savefig('GaiaSurveyLimits.png') # depends on [control=['if'], data=[]] else: plt.show()
def load_module(self, name): """ Load the ``pygal.maps.name`` module from the previously loaded plugin """ if name not in sys.modules: sys.modules[name] = getattr(maps, name.split('.')[2]) return sys.modules[name]
def function[load_module, parameter[self, name]]: constant[ Load the ``pygal.maps.name`` module from the previously loaded plugin ] if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[sys].modules] begin[:] call[name[sys].modules][name[name]] assign[=] call[name[getattr], parameter[name[maps], call[call[name[name].split, parameter[constant[.]]]][constant[2]]]] return[call[name[sys].modules][name[name]]]
keyword[def] identifier[load_module] ( identifier[self] , identifier[name] ): literal[string] keyword[if] identifier[name] keyword[not] keyword[in] identifier[sys] . identifier[modules] : identifier[sys] . identifier[modules] [ identifier[name] ]= identifier[getattr] ( identifier[maps] , identifier[name] . identifier[split] ( literal[string] )[ literal[int] ]) keyword[return] identifier[sys] . identifier[modules] [ identifier[name] ]
def load_module(self, name): """ Load the ``pygal.maps.name`` module from the previously loaded plugin """ if name not in sys.modules: sys.modules[name] = getattr(maps, name.split('.')[2]) # depends on [control=['if'], data=['name']] return sys.modules[name]
def generate(self, batch_size, length, samples=1, fix_static=False, fix_dynamic=False): """Generate new sequences. Args: batch_size: Number of sequences to generate. length: Number of timesteps to generate for each sequence. samples: Number of samples to draw from the latent distributions. fix_static: Boolean for whether or not to share the same random sample of the static latent variable `f` from its prior across all examples. fix_dynamic: Boolean for whether or not to share the same random sample of the dynamic latent variable `z_{1:T}` from its prior across all examples. Returns: A batched Independent distribution wrapping a set of Normal distributions over the pixels of the generated sequences, where the Independent distribution has event shape [height, width, channels], batch shape [samples, batch_size, timesteps], and sample shape [sample_shape, samples, batch_size, timesteps, height, width, channels]. """ static_sample, _ = self.sample_static_prior(samples, batch_size, fix_static) dynamic_sample, _ = self.sample_dynamic_prior(samples, batch_size, length, fix_dynamic) likelihood = self.decoder((dynamic_sample, static_sample)) return likelihood
def function[generate, parameter[self, batch_size, length, samples, fix_static, fix_dynamic]]: constant[Generate new sequences. Args: batch_size: Number of sequences to generate. length: Number of timesteps to generate for each sequence. samples: Number of samples to draw from the latent distributions. fix_static: Boolean for whether or not to share the same random sample of the static latent variable `f` from its prior across all examples. fix_dynamic: Boolean for whether or not to share the same random sample of the dynamic latent variable `z_{1:T}` from its prior across all examples. Returns: A batched Independent distribution wrapping a set of Normal distributions over the pixels of the generated sequences, where the Independent distribution has event shape [height, width, channels], batch shape [samples, batch_size, timesteps], and sample shape [sample_shape, samples, batch_size, timesteps, height, width, channels]. ] <ast.Tuple object at 0x7da1b0356680> assign[=] call[name[self].sample_static_prior, parameter[name[samples], name[batch_size], name[fix_static]]] <ast.Tuple object at 0x7da1b0356350> assign[=] call[name[self].sample_dynamic_prior, parameter[name[samples], name[batch_size], name[length], name[fix_dynamic]]] variable[likelihood] assign[=] call[name[self].decoder, parameter[tuple[[<ast.Name object at 0x7da1b0354520>, <ast.Name object at 0x7da1b03543d0>]]]] return[name[likelihood]]
keyword[def] identifier[generate] ( identifier[self] , identifier[batch_size] , identifier[length] , identifier[samples] = literal[int] , identifier[fix_static] = keyword[False] , identifier[fix_dynamic] = keyword[False] ): literal[string] identifier[static_sample] , identifier[_] = identifier[self] . identifier[sample_static_prior] ( identifier[samples] , identifier[batch_size] , identifier[fix_static] ) identifier[dynamic_sample] , identifier[_] = identifier[self] . identifier[sample_dynamic_prior] ( identifier[samples] , identifier[batch_size] , identifier[length] , identifier[fix_dynamic] ) identifier[likelihood] = identifier[self] . identifier[decoder] (( identifier[dynamic_sample] , identifier[static_sample] )) keyword[return] identifier[likelihood]
def generate(self, batch_size, length, samples=1, fix_static=False, fix_dynamic=False): """Generate new sequences. Args: batch_size: Number of sequences to generate. length: Number of timesteps to generate for each sequence. samples: Number of samples to draw from the latent distributions. fix_static: Boolean for whether or not to share the same random sample of the static latent variable `f` from its prior across all examples. fix_dynamic: Boolean for whether or not to share the same random sample of the dynamic latent variable `z_{1:T}` from its prior across all examples. Returns: A batched Independent distribution wrapping a set of Normal distributions over the pixels of the generated sequences, where the Independent distribution has event shape [height, width, channels], batch shape [samples, batch_size, timesteps], and sample shape [sample_shape, samples, batch_size, timesteps, height, width, channels]. """ (static_sample, _) = self.sample_static_prior(samples, batch_size, fix_static) (dynamic_sample, _) = self.sample_dynamic_prior(samples, batch_size, length, fix_dynamic) likelihood = self.decoder((dynamic_sample, static_sample)) return likelihood
def delete_object( request, model, post_delete_redirect, object_id=None, slug=None, slug_field='slug', template_name=None, template_loader=loader, extra_context=None, login_required=False, context_processors=None, template_object_name='object'): """ Generic object-delete function. The given template will be used to confirm deletetion if this view is fetched using GET; for safty, deletion will only be performed if this view is POSTed. Templates: ``<app_label>/<model_name>_confirm_delete.html`` Context: object the original object being deleted """ if extra_context is None: extra_context = {} if login_required and not request.user.is_authenticated: return redirect_to_login(request.path) obj = lookup_object(model, object_id, slug, slug_field) if request.method == 'POST': obj.delete() msg = ugettext("The %(verbose_name)s was deleted.") %\ {"verbose_name": model._meta.verbose_name} messages.success(request, msg, fail_silently=True) return HttpResponseRedirect(post_delete_redirect) else: if not template_name: template_name = "%s/%s_confirm_delete.html" % ( model._meta.app_label, model._meta.object_name.lower()) t = template_loader.get_template(template_name) c = { template_object_name: obj, } apply_extra_context(extra_context, c) response = HttpResponse(t.render(context=c, request=request)) return response
def function[delete_object, parameter[request, model, post_delete_redirect, object_id, slug, slug_field, template_name, template_loader, extra_context, login_required, context_processors, template_object_name]]: constant[ Generic object-delete function. The given template will be used to confirm deletetion if this view is fetched using GET; for safty, deletion will only be performed if this view is POSTed. Templates: ``<app_label>/<model_name>_confirm_delete.html`` Context: object the original object being deleted ] if compare[name[extra_context] is constant[None]] begin[:] variable[extra_context] assign[=] dictionary[[], []] if <ast.BoolOp object at 0x7da20c9908e0> begin[:] return[call[name[redirect_to_login], parameter[name[request].path]]] variable[obj] assign[=] call[name[lookup_object], parameter[name[model], name[object_id], name[slug], name[slug_field]]] if compare[name[request].method equal[==] constant[POST]] begin[:] call[name[obj].delete, parameter[]] variable[msg] assign[=] binary_operation[call[name[ugettext], parameter[constant[The %(verbose_name)s was deleted.]]] <ast.Mod object at 0x7da2590d6920> dictionary[[<ast.Constant object at 0x7da20c991420>], [<ast.Attribute object at 0x7da20c992410>]]] call[name[messages].success, parameter[name[request], name[msg]]] return[call[name[HttpResponseRedirect], parameter[name[post_delete_redirect]]]]
keyword[def] identifier[delete_object] ( identifier[request] , identifier[model] , identifier[post_delete_redirect] , identifier[object_id] = keyword[None] , identifier[slug] = keyword[None] , identifier[slug_field] = literal[string] , identifier[template_name] = keyword[None] , identifier[template_loader] = identifier[loader] , identifier[extra_context] = keyword[None] , identifier[login_required] = keyword[False] , identifier[context_processors] = keyword[None] , identifier[template_object_name] = literal[string] ): literal[string] keyword[if] identifier[extra_context] keyword[is] keyword[None] : identifier[extra_context] ={} keyword[if] identifier[login_required] keyword[and] keyword[not] identifier[request] . identifier[user] . identifier[is_authenticated] : keyword[return] identifier[redirect_to_login] ( identifier[request] . identifier[path] ) identifier[obj] = identifier[lookup_object] ( identifier[model] , identifier[object_id] , identifier[slug] , identifier[slug_field] ) keyword[if] identifier[request] . identifier[method] == literal[string] : identifier[obj] . identifier[delete] () identifier[msg] = identifier[ugettext] ( literal[string] )%{ literal[string] : identifier[model] . identifier[_meta] . identifier[verbose_name] } identifier[messages] . identifier[success] ( identifier[request] , identifier[msg] , identifier[fail_silently] = keyword[True] ) keyword[return] identifier[HttpResponseRedirect] ( identifier[post_delete_redirect] ) keyword[else] : keyword[if] keyword[not] identifier[template_name] : identifier[template_name] = literal[string] %( identifier[model] . identifier[_meta] . identifier[app_label] , identifier[model] . identifier[_meta] . identifier[object_name] . identifier[lower] ()) identifier[t] = identifier[template_loader] . identifier[get_template] ( identifier[template_name] ) identifier[c] ={ identifier[template_object_name] : identifier[obj] , } identifier[apply_extra_context] ( identifier[extra_context] , identifier[c] ) identifier[response] = identifier[HttpResponse] ( identifier[t] . identifier[render] ( identifier[context] = identifier[c] , identifier[request] = identifier[request] )) keyword[return] identifier[response]
def delete_object(request, model, post_delete_redirect, object_id=None, slug=None, slug_field='slug', template_name=None, template_loader=loader, extra_context=None, login_required=False, context_processors=None, template_object_name='object'): """ Generic object-delete function. The given template will be used to confirm deletetion if this view is fetched using GET; for safty, deletion will only be performed if this view is POSTed. Templates: ``<app_label>/<model_name>_confirm_delete.html`` Context: object the original object being deleted """ if extra_context is None: extra_context = {} # depends on [control=['if'], data=['extra_context']] if login_required and (not request.user.is_authenticated): return redirect_to_login(request.path) # depends on [control=['if'], data=[]] obj = lookup_object(model, object_id, slug, slug_field) if request.method == 'POST': obj.delete() msg = ugettext('The %(verbose_name)s was deleted.') % {'verbose_name': model._meta.verbose_name} messages.success(request, msg, fail_silently=True) return HttpResponseRedirect(post_delete_redirect) # depends on [control=['if'], data=[]] else: if not template_name: template_name = '%s/%s_confirm_delete.html' % (model._meta.app_label, model._meta.object_name.lower()) # depends on [control=['if'], data=[]] t = template_loader.get_template(template_name) c = {template_object_name: obj} apply_extra_context(extra_context, c) response = HttpResponse(t.render(context=c, request=request)) return response
def get_redis_connection(config, use_strict_redis=False): """ Returns a redis connection from a connection config """ redis_cls = redis.StrictRedis if use_strict_redis else redis.Redis if 'URL' in config: return redis_cls.from_url(config['URL'], db=config.get('DB')) if 'USE_REDIS_CACHE' in config.keys(): try: # Assume that we're using django-redis from django_redis import get_redis_connection as get_redis return get_redis(config['USE_REDIS_CACHE']) except ImportError: pass from django.core.cache import caches cache = caches[config['USE_REDIS_CACHE']] # We're using django-redis-cache try: return cache._client except AttributeError: # For django-redis-cache > 0.13.1 return cache.get_master_client() if 'UNIX_SOCKET_PATH' in config: return redis_cls(unix_socket_path=config['UNIX_SOCKET_PATH'], db=config['DB']) if 'SENTINELS' in config: sentinel_kwargs = { 'db': config.get('DB'), 'password': config.get('PASSWORD'), 'socket_timeout': config.get('SOCKET_TIMEOUT'), } sentinel_kwargs.update(config.get('CONNECTION_KWARGS', {})) sentinel = Sentinel(config['SENTINELS'], **sentinel_kwargs) return sentinel.master_for( service_name=config['MASTER_NAME'], redis_class=redis_cls, ) return redis_cls(host=config['HOST'], port=config['PORT'], db=config['DB'], password=config.get('PASSWORD'), ssl=config.get('SSL', False))
def function[get_redis_connection, parameter[config, use_strict_redis]]: constant[ Returns a redis connection from a connection config ] variable[redis_cls] assign[=] <ast.IfExp object at 0x7da20e9558a0> if compare[constant[URL] in name[config]] begin[:] return[call[name[redis_cls].from_url, parameter[call[name[config]][constant[URL]]]]] if compare[constant[USE_REDIS_CACHE] in call[name[config].keys, parameter[]]] begin[:] <ast.Try object at 0x7da1b1d5e500> from relative_module[django.core.cache] import module[caches] variable[cache] assign[=] call[name[caches]][call[name[config]][constant[USE_REDIS_CACHE]]] <ast.Try object at 0x7da1b1d5c0d0> if compare[constant[UNIX_SOCKET_PATH] in name[config]] begin[:] return[call[name[redis_cls], parameter[]]] if compare[constant[SENTINELS] in name[config]] begin[:] variable[sentinel_kwargs] assign[=] dictionary[[<ast.Constant object at 0x7da1b1d5c1f0>, <ast.Constant object at 0x7da1b1d5ea10>, <ast.Constant object at 0x7da1b1d5d7b0>], [<ast.Call object at 0x7da1b1d5f9a0>, <ast.Call object at 0x7da1b1d5ed40>, <ast.Call object at 0x7da1b1d5f970>]] call[name[sentinel_kwargs].update, parameter[call[name[config].get, parameter[constant[CONNECTION_KWARGS], dictionary[[], []]]]]] variable[sentinel] assign[=] call[name[Sentinel], parameter[call[name[config]][constant[SENTINELS]]]] return[call[name[sentinel].master_for, parameter[]]] return[call[name[redis_cls], parameter[]]]
keyword[def] identifier[get_redis_connection] ( identifier[config] , identifier[use_strict_redis] = keyword[False] ): literal[string] identifier[redis_cls] = identifier[redis] . identifier[StrictRedis] keyword[if] identifier[use_strict_redis] keyword[else] identifier[redis] . identifier[Redis] keyword[if] literal[string] keyword[in] identifier[config] : keyword[return] identifier[redis_cls] . identifier[from_url] ( identifier[config] [ literal[string] ], identifier[db] = identifier[config] . identifier[get] ( literal[string] )) keyword[if] literal[string] keyword[in] identifier[config] . identifier[keys] (): keyword[try] : keyword[from] identifier[django_redis] keyword[import] identifier[get_redis_connection] keyword[as] identifier[get_redis] keyword[return] identifier[get_redis] ( identifier[config] [ literal[string] ]) keyword[except] identifier[ImportError] : keyword[pass] keyword[from] identifier[django] . identifier[core] . identifier[cache] keyword[import] identifier[caches] identifier[cache] = identifier[caches] [ identifier[config] [ literal[string] ]] keyword[try] : keyword[return] identifier[cache] . identifier[_client] keyword[except] identifier[AttributeError] : keyword[return] identifier[cache] . identifier[get_master_client] () keyword[if] literal[string] keyword[in] identifier[config] : keyword[return] identifier[redis_cls] ( identifier[unix_socket_path] = identifier[config] [ literal[string] ], identifier[db] = identifier[config] [ literal[string] ]) keyword[if] literal[string] keyword[in] identifier[config] : identifier[sentinel_kwargs] ={ literal[string] : identifier[config] . identifier[get] ( literal[string] ), literal[string] : identifier[config] . identifier[get] ( literal[string] ), literal[string] : identifier[config] . identifier[get] ( literal[string] ), } identifier[sentinel_kwargs] . identifier[update] ( identifier[config] . identifier[get] ( literal[string] ,{})) identifier[sentinel] = identifier[Sentinel] ( identifier[config] [ literal[string] ],** identifier[sentinel_kwargs] ) keyword[return] identifier[sentinel] . identifier[master_for] ( identifier[service_name] = identifier[config] [ literal[string] ], identifier[redis_class] = identifier[redis_cls] , ) keyword[return] identifier[redis_cls] ( identifier[host] = identifier[config] [ literal[string] ], identifier[port] = identifier[config] [ literal[string] ], identifier[db] = identifier[config] [ literal[string] ], identifier[password] = identifier[config] . identifier[get] ( literal[string] ), identifier[ssl] = identifier[config] . identifier[get] ( literal[string] , keyword[False] ))
def get_redis_connection(config, use_strict_redis=False): """ Returns a redis connection from a connection config """ redis_cls = redis.StrictRedis if use_strict_redis else redis.Redis if 'URL' in config: return redis_cls.from_url(config['URL'], db=config.get('DB')) # depends on [control=['if'], data=['config']] if 'USE_REDIS_CACHE' in config.keys(): try: # Assume that we're using django-redis from django_redis import get_redis_connection as get_redis return get_redis(config['USE_REDIS_CACHE']) # depends on [control=['try'], data=[]] except ImportError: pass # depends on [control=['except'], data=[]] from django.core.cache import caches cache = caches[config['USE_REDIS_CACHE']] # We're using django-redis-cache try: return cache._client # depends on [control=['try'], data=[]] except AttributeError: # For django-redis-cache > 0.13.1 return cache.get_master_client() # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] if 'UNIX_SOCKET_PATH' in config: return redis_cls(unix_socket_path=config['UNIX_SOCKET_PATH'], db=config['DB']) # depends on [control=['if'], data=['config']] if 'SENTINELS' in config: sentinel_kwargs = {'db': config.get('DB'), 'password': config.get('PASSWORD'), 'socket_timeout': config.get('SOCKET_TIMEOUT')} sentinel_kwargs.update(config.get('CONNECTION_KWARGS', {})) sentinel = Sentinel(config['SENTINELS'], **sentinel_kwargs) return sentinel.master_for(service_name=config['MASTER_NAME'], redis_class=redis_cls) # depends on [control=['if'], data=['config']] return redis_cls(host=config['HOST'], port=config['PORT'], db=config['DB'], password=config.get('PASSWORD'), ssl=config.get('SSL', False))
def get_scenario_from_nrml(oqparam, fname): """ :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance :param fname: the NRML files containing the GMFs :returns: a pair (eids, gmf array) """ if not oqparam.imtls: oqparam.set_risk_imtls(get_risk_models(oqparam)) imts = sorted(oqparam.imtls) num_imts = len(imts) imt_dt = numpy.dtype([(imt, F32) for imt in imts]) gmfset = nrml.read(fname).gmfCollection.gmfSet eids, sitecounts = _extract_eids_sitecounts(gmfset) coords = sorted(sitecounts) oqparam.sites = [(lon, lat, 0) for lon, lat in coords] site_idx = {lonlat: i for i, lonlat in enumerate(coords)} oqparam.number_of_ground_motion_fields = num_events = len(eids) num_sites = len(oqparam.sites) gmf_by_imt = numpy.zeros((num_events, num_sites), imt_dt) counts = collections.Counter() for i, gmf in enumerate(gmfset): if len(gmf) != num_sites: # there must be one node per site raise InvalidFile('Expected %d sites, got %d nodes in %s, line %d' % (num_sites, len(gmf), fname, gmf.lineno)) counts[gmf['ruptureId']] += 1 imt = gmf['IMT'] if imt == 'SA': imt = 'SA(%s)' % gmf['saPeriod'] for node in gmf: sid = site_idx[node['lon'], node['lat']] gmf_by_imt[imt][i % num_events, sid] = node['gmv'] for rupid, count in sorted(counts.items()): if count < num_imts: raise InvalidFile("Found a missing ruptureId %d in %s" % (rupid, fname)) elif count > num_imts: raise InvalidFile("Found a duplicated ruptureId '%s' in %s" % (rupid, fname)) expected_gmvs_per_site = num_imts * len(eids) for lonlat, counts in sitecounts.items(): if counts != expected_gmvs_per_site: raise InvalidFile( '%s: expected %d gmvs at location %s, found %d' % (fname, expected_gmvs_per_site, lonlat, counts)) return eids, gmf_by_imt.T
def function[get_scenario_from_nrml, parameter[oqparam, fname]]: constant[ :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance :param fname: the NRML files containing the GMFs :returns: a pair (eids, gmf array) ] if <ast.UnaryOp object at 0x7da1b1500b20> begin[:] call[name[oqparam].set_risk_imtls, parameter[call[name[get_risk_models], parameter[name[oqparam]]]]] variable[imts] assign[=] call[name[sorted], parameter[name[oqparam].imtls]] variable[num_imts] assign[=] call[name[len], parameter[name[imts]]] variable[imt_dt] assign[=] call[name[numpy].dtype, parameter[<ast.ListComp object at 0x7da1b1501030>]] variable[gmfset] assign[=] call[name[nrml].read, parameter[name[fname]]].gmfCollection.gmfSet <ast.Tuple object at 0x7da1b1503ee0> assign[=] call[name[_extract_eids_sitecounts], parameter[name[gmfset]]] variable[coords] assign[=] call[name[sorted], parameter[name[sitecounts]]] name[oqparam].sites assign[=] <ast.ListComp object at 0x7da1b1503c40> variable[site_idx] assign[=] <ast.DictComp object at 0x7da1b15039d0> name[oqparam].number_of_ground_motion_fields assign[=] call[name[len], parameter[name[eids]]] variable[num_sites] assign[=] call[name[len], parameter[name[oqparam].sites]] variable[gmf_by_imt] assign[=] call[name[numpy].zeros, parameter[tuple[[<ast.Name object at 0x7da1b1503430>, <ast.Name object at 0x7da1b1503400>]], name[imt_dt]]] variable[counts] assign[=] call[name[collections].Counter, parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b1503250>, <ast.Name object at 0x7da1b1503220>]]] in starred[call[name[enumerate], parameter[name[gmfset]]]] begin[:] if compare[call[name[len], parameter[name[gmf]]] not_equal[!=] name[num_sites]] begin[:] <ast.Raise object at 0x7da1b1503010> <ast.AugAssign object at 0x7da1b1502da0> variable[imt] assign[=] call[name[gmf]][constant[IMT]] if compare[name[imt] equal[==] constant[SA]] begin[:] variable[imt] assign[=] binary_operation[constant[SA(%s)] <ast.Mod object at 0x7da2590d6920> call[name[gmf]][constant[saPeriod]]] for taget[name[node]] in starred[name[gmf]] begin[:] variable[sid] assign[=] call[name[site_idx]][tuple[[<ast.Subscript object at 0x7da1b15027d0>, <ast.Subscript object at 0x7da1b1502740>]]] call[call[name[gmf_by_imt]][name[imt]]][tuple[[<ast.BinOp object at 0x7da1b1502590>, <ast.Name object at 0x7da1b1502500>]]] assign[=] call[name[node]][constant[gmv]] for taget[tuple[[<ast.Name object at 0x7da1b15023e0>, <ast.Name object at 0x7da1b15023b0>]]] in starred[call[name[sorted], parameter[call[name[counts].items, parameter[]]]]] begin[:] if compare[name[count] less[<] name[num_imts]] begin[:] <ast.Raise object at 0x7da1b15012a0> variable[expected_gmvs_per_site] assign[=] binary_operation[name[num_imts] * call[name[len], parameter[name[eids]]]] for taget[tuple[[<ast.Name object at 0x7da1b1501810>, <ast.Name object at 0x7da1b1501840>]]] in starred[call[name[sitecounts].items, parameter[]]] begin[:] if compare[name[counts] not_equal[!=] name[expected_gmvs_per_site]] begin[:] <ast.Raise object at 0x7da1b15019c0> return[tuple[[<ast.Name object at 0x7da1b1501c00>, <ast.Attribute object at 0x7da1b1501c30>]]]
keyword[def] identifier[get_scenario_from_nrml] ( identifier[oqparam] , identifier[fname] ): literal[string] keyword[if] keyword[not] identifier[oqparam] . identifier[imtls] : identifier[oqparam] . identifier[set_risk_imtls] ( identifier[get_risk_models] ( identifier[oqparam] )) identifier[imts] = identifier[sorted] ( identifier[oqparam] . identifier[imtls] ) identifier[num_imts] = identifier[len] ( identifier[imts] ) identifier[imt_dt] = identifier[numpy] . identifier[dtype] ([( identifier[imt] , identifier[F32] ) keyword[for] identifier[imt] keyword[in] identifier[imts] ]) identifier[gmfset] = identifier[nrml] . identifier[read] ( identifier[fname] ). identifier[gmfCollection] . identifier[gmfSet] identifier[eids] , identifier[sitecounts] = identifier[_extract_eids_sitecounts] ( identifier[gmfset] ) identifier[coords] = identifier[sorted] ( identifier[sitecounts] ) identifier[oqparam] . identifier[sites] =[( identifier[lon] , identifier[lat] , literal[int] ) keyword[for] identifier[lon] , identifier[lat] keyword[in] identifier[coords] ] identifier[site_idx] ={ identifier[lonlat] : identifier[i] keyword[for] identifier[i] , identifier[lonlat] keyword[in] identifier[enumerate] ( identifier[coords] )} identifier[oqparam] . identifier[number_of_ground_motion_fields] = identifier[num_events] = identifier[len] ( identifier[eids] ) identifier[num_sites] = identifier[len] ( identifier[oqparam] . identifier[sites] ) identifier[gmf_by_imt] = identifier[numpy] . identifier[zeros] (( identifier[num_events] , identifier[num_sites] ), identifier[imt_dt] ) identifier[counts] = identifier[collections] . identifier[Counter] () keyword[for] identifier[i] , identifier[gmf] keyword[in] identifier[enumerate] ( identifier[gmfset] ): keyword[if] identifier[len] ( identifier[gmf] )!= identifier[num_sites] : keyword[raise] identifier[InvalidFile] ( literal[string] %( identifier[num_sites] , identifier[len] ( identifier[gmf] ), identifier[fname] , identifier[gmf] . identifier[lineno] )) identifier[counts] [ identifier[gmf] [ literal[string] ]]+= literal[int] identifier[imt] = identifier[gmf] [ literal[string] ] keyword[if] identifier[imt] == literal[string] : identifier[imt] = literal[string] % identifier[gmf] [ literal[string] ] keyword[for] identifier[node] keyword[in] identifier[gmf] : identifier[sid] = identifier[site_idx] [ identifier[node] [ literal[string] ], identifier[node] [ literal[string] ]] identifier[gmf_by_imt] [ identifier[imt] ][ identifier[i] % identifier[num_events] , identifier[sid] ]= identifier[node] [ literal[string] ] keyword[for] identifier[rupid] , identifier[count] keyword[in] identifier[sorted] ( identifier[counts] . identifier[items] ()): keyword[if] identifier[count] < identifier[num_imts] : keyword[raise] identifier[InvalidFile] ( literal[string] % ( identifier[rupid] , identifier[fname] )) keyword[elif] identifier[count] > identifier[num_imts] : keyword[raise] identifier[InvalidFile] ( literal[string] % ( identifier[rupid] , identifier[fname] )) identifier[expected_gmvs_per_site] = identifier[num_imts] * identifier[len] ( identifier[eids] ) keyword[for] identifier[lonlat] , identifier[counts] keyword[in] identifier[sitecounts] . identifier[items] (): keyword[if] identifier[counts] != identifier[expected_gmvs_per_site] : keyword[raise] identifier[InvalidFile] ( literal[string] % ( identifier[fname] , identifier[expected_gmvs_per_site] , identifier[lonlat] , identifier[counts] )) keyword[return] identifier[eids] , identifier[gmf_by_imt] . identifier[T]
def get_scenario_from_nrml(oqparam, fname): """ :param oqparam: an :class:`openquake.commonlib.oqvalidation.OqParam` instance :param fname: the NRML files containing the GMFs :returns: a pair (eids, gmf array) """ if not oqparam.imtls: oqparam.set_risk_imtls(get_risk_models(oqparam)) # depends on [control=['if'], data=[]] imts = sorted(oqparam.imtls) num_imts = len(imts) imt_dt = numpy.dtype([(imt, F32) for imt in imts]) gmfset = nrml.read(fname).gmfCollection.gmfSet (eids, sitecounts) = _extract_eids_sitecounts(gmfset) coords = sorted(sitecounts) oqparam.sites = [(lon, lat, 0) for (lon, lat) in coords] site_idx = {lonlat: i for (i, lonlat) in enumerate(coords)} oqparam.number_of_ground_motion_fields = num_events = len(eids) num_sites = len(oqparam.sites) gmf_by_imt = numpy.zeros((num_events, num_sites), imt_dt) counts = collections.Counter() for (i, gmf) in enumerate(gmfset): if len(gmf) != num_sites: # there must be one node per site raise InvalidFile('Expected %d sites, got %d nodes in %s, line %d' % (num_sites, len(gmf), fname, gmf.lineno)) # depends on [control=['if'], data=['num_sites']] counts[gmf['ruptureId']] += 1 imt = gmf['IMT'] if imt == 'SA': imt = 'SA(%s)' % gmf['saPeriod'] # depends on [control=['if'], data=['imt']] for node in gmf: sid = site_idx[node['lon'], node['lat']] gmf_by_imt[imt][i % num_events, sid] = node['gmv'] # depends on [control=['for'], data=['node']] # depends on [control=['for'], data=[]] for (rupid, count) in sorted(counts.items()): if count < num_imts: raise InvalidFile('Found a missing ruptureId %d in %s' % (rupid, fname)) # depends on [control=['if'], data=[]] elif count > num_imts: raise InvalidFile("Found a duplicated ruptureId '%s' in %s" % (rupid, fname)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] expected_gmvs_per_site = num_imts * len(eids) for (lonlat, counts) in sitecounts.items(): if counts != expected_gmvs_per_site: raise InvalidFile('%s: expected %d gmvs at location %s, found %d' % (fname, expected_gmvs_per_site, lonlat, counts)) # depends on [control=['if'], data=['counts', 'expected_gmvs_per_site']] # depends on [control=['for'], data=[]] return (eids, gmf_by_imt.T)
def to_bigquery_ddl(self, name_case=DdlParseBase.NAME_CASE.original): """ Generate BigQuery CREATE TABLE statements :param name_case: name case type * DdlParse.NAME_CASE.original : Return to no convert * DdlParse.NAME_CASE.lower : Return to lower * DdlParse.NAME_CASE.upper : Return to upper :return: BigQuery CREATE TABLE statements """ if self.schema is None: dataset = "dataset" elif name_case == self.NAME_CASE.lower: dataset = self.schema.lower() elif name_case == self.NAME_CASE.upper: dataset = self.schema.upper() else: dataset = self.schema cols_defs = [] for col in self.columns.values(): col_name = col.get_name(name_case) if col.array_dimensional < 1: # no array data type type = col.bigquery_standard_data_type not_null = " NOT NULL" if col.not_null else "" else: # one or multiple dimensional array data type type_front = "ARRAY<" type_back = ">" for i in range(1, col.array_dimensional): type_front += "STRUCT<dimension_{} ARRAY<".format(i) type_back += ">>" type = "{}{}{}".format(type_front, col.bigquery_standard_data_type, type_back) not_null = "" cols_defs.append("{name} {type}{not_null}".format( name=col_name, type=type, not_null=not_null, )) return textwrap.dedent( """\ #standardSQL CREATE TABLE `project.{dataset}.{table}` ( {colmns_define} )""").format( dataset=dataset, table=self.get_name(name_case), colmns_define=",\n ".join(cols_defs), )
def function[to_bigquery_ddl, parameter[self, name_case]]: constant[ Generate BigQuery CREATE TABLE statements :param name_case: name case type * DdlParse.NAME_CASE.original : Return to no convert * DdlParse.NAME_CASE.lower : Return to lower * DdlParse.NAME_CASE.upper : Return to upper :return: BigQuery CREATE TABLE statements ] if compare[name[self].schema is constant[None]] begin[:] variable[dataset] assign[=] constant[dataset] variable[cols_defs] assign[=] list[[]] for taget[name[col]] in starred[call[name[self].columns.values, parameter[]]] begin[:] variable[col_name] assign[=] call[name[col].get_name, parameter[name[name_case]]] if compare[name[col].array_dimensional less[<] constant[1]] begin[:] variable[type] assign[=] name[col].bigquery_standard_data_type variable[not_null] assign[=] <ast.IfExp object at 0x7da207f004f0> call[name[cols_defs].append, parameter[call[constant[{name} {type}{not_null}].format, parameter[]]]] return[call[call[name[textwrap].dedent, parameter[constant[ #standardSQL CREATE TABLE `project.{dataset}.{table}` ( {colmns_define} )]]].format, parameter[]]]
keyword[def] identifier[to_bigquery_ddl] ( identifier[self] , identifier[name_case] = identifier[DdlParseBase] . identifier[NAME_CASE] . identifier[original] ): literal[string] keyword[if] identifier[self] . identifier[schema] keyword[is] keyword[None] : identifier[dataset] = literal[string] keyword[elif] identifier[name_case] == identifier[self] . identifier[NAME_CASE] . identifier[lower] : identifier[dataset] = identifier[self] . identifier[schema] . identifier[lower] () keyword[elif] identifier[name_case] == identifier[self] . identifier[NAME_CASE] . identifier[upper] : identifier[dataset] = identifier[self] . identifier[schema] . identifier[upper] () keyword[else] : identifier[dataset] = identifier[self] . identifier[schema] identifier[cols_defs] =[] keyword[for] identifier[col] keyword[in] identifier[self] . identifier[columns] . identifier[values] (): identifier[col_name] = identifier[col] . identifier[get_name] ( identifier[name_case] ) keyword[if] identifier[col] . identifier[array_dimensional] < literal[int] : identifier[type] = identifier[col] . identifier[bigquery_standard_data_type] identifier[not_null] = literal[string] keyword[if] identifier[col] . identifier[not_null] keyword[else] literal[string] keyword[else] : identifier[type_front] = literal[string] identifier[type_back] = literal[string] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[col] . identifier[array_dimensional] ): identifier[type_front] += literal[string] . identifier[format] ( identifier[i] ) identifier[type_back] += literal[string] identifier[type] = literal[string] . identifier[format] ( identifier[type_front] , identifier[col] . identifier[bigquery_standard_data_type] , identifier[type_back] ) identifier[not_null] = literal[string] identifier[cols_defs] . identifier[append] ( literal[string] . identifier[format] ( identifier[name] = identifier[col_name] , identifier[type] = identifier[type] , identifier[not_null] = identifier[not_null] , )) keyword[return] identifier[textwrap] . identifier[dedent] ( literal[string] ). identifier[format] ( identifier[dataset] = identifier[dataset] , identifier[table] = identifier[self] . identifier[get_name] ( identifier[name_case] ), identifier[colmns_define] = literal[string] . identifier[join] ( identifier[cols_defs] ), )
def to_bigquery_ddl(self, name_case=DdlParseBase.NAME_CASE.original): """ Generate BigQuery CREATE TABLE statements :param name_case: name case type * DdlParse.NAME_CASE.original : Return to no convert * DdlParse.NAME_CASE.lower : Return to lower * DdlParse.NAME_CASE.upper : Return to upper :return: BigQuery CREATE TABLE statements """ if self.schema is None: dataset = 'dataset' # depends on [control=['if'], data=[]] elif name_case == self.NAME_CASE.lower: dataset = self.schema.lower() # depends on [control=['if'], data=[]] elif name_case == self.NAME_CASE.upper: dataset = self.schema.upper() # depends on [control=['if'], data=[]] else: dataset = self.schema cols_defs = [] for col in self.columns.values(): col_name = col.get_name(name_case) if col.array_dimensional < 1: # no array data type type = col.bigquery_standard_data_type not_null = ' NOT NULL' if col.not_null else '' # depends on [control=['if'], data=[]] else: # one or multiple dimensional array data type type_front = 'ARRAY<' type_back = '>' for i in range(1, col.array_dimensional): type_front += 'STRUCT<dimension_{} ARRAY<'.format(i) type_back += '>>' # depends on [control=['for'], data=['i']] type = '{}{}{}'.format(type_front, col.bigquery_standard_data_type, type_back) not_null = '' cols_defs.append('{name} {type}{not_null}'.format(name=col_name, type=type, not_null=not_null)) # depends on [control=['for'], data=['col']] return textwrap.dedent(' #standardSQL\n CREATE TABLE `project.{dataset}.{table}`\n (\n {colmns_define}\n )').format(dataset=dataset, table=self.get_name(name_case), colmns_define=',\n '.join(cols_defs))
def convert_celeba(which_format, directory, output_directory, output_filename=None): """Converts the CelebA dataset to HDF5. Converts the CelebA dataset to an HDF5 dataset compatible with :class:`fuel.datasets.CelebA`. The converted dataset is saved as 'celeba_aligned_cropped.hdf5' or 'celeba_64.hdf5', depending on the `which_format` argument. Parameters ---------- which_format : str Either 'aligned_cropped' or '64'. Determines which format to convert to. directory : str Directory in which input files reside. output_directory : str Directory in which to save the converted dataset. output_filename : str, optional Name of the saved dataset. Defaults to 'celeba_aligned_cropped.hdf5' or 'celeba_64.hdf5', depending on `which_format`. Returns ------- output_paths : tuple of str Single-element tuple containing the path to the converted dataset. """ if which_format not in ('aligned_cropped', '64'): raise ValueError("CelebA format needs to be either " "'aligned_cropped' or '64'.") if not output_filename: output_filename = 'celeba_{}.hdf5'.format(which_format) if which_format == 'aligned_cropped': return convert_celeba_aligned_cropped( directory, output_directory, output_filename) else: return convert_celeba_64( directory, output_directory, output_filename)
def function[convert_celeba, parameter[which_format, directory, output_directory, output_filename]]: constant[Converts the CelebA dataset to HDF5. Converts the CelebA dataset to an HDF5 dataset compatible with :class:`fuel.datasets.CelebA`. The converted dataset is saved as 'celeba_aligned_cropped.hdf5' or 'celeba_64.hdf5', depending on the `which_format` argument. Parameters ---------- which_format : str Either 'aligned_cropped' or '64'. Determines which format to convert to. directory : str Directory in which input files reside. output_directory : str Directory in which to save the converted dataset. output_filename : str, optional Name of the saved dataset. Defaults to 'celeba_aligned_cropped.hdf5' or 'celeba_64.hdf5', depending on `which_format`. Returns ------- output_paths : tuple of str Single-element tuple containing the path to the converted dataset. ] if compare[name[which_format] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da18f720430>, <ast.Constant object at 0x7da18f7236d0>]]] begin[:] <ast.Raise object at 0x7da18f7201c0> if <ast.UnaryOp object at 0x7da18f720d30> begin[:] variable[output_filename] assign[=] call[constant[celeba_{}.hdf5].format, parameter[name[which_format]]] if compare[name[which_format] equal[==] constant[aligned_cropped]] begin[:] return[call[name[convert_celeba_aligned_cropped], parameter[name[directory], name[output_directory], name[output_filename]]]]
keyword[def] identifier[convert_celeba] ( identifier[which_format] , identifier[directory] , identifier[output_directory] , identifier[output_filename] = keyword[None] ): literal[string] keyword[if] identifier[which_format] keyword[not] keyword[in] ( literal[string] , literal[string] ): keyword[raise] identifier[ValueError] ( literal[string] literal[string] ) keyword[if] keyword[not] identifier[output_filename] : identifier[output_filename] = literal[string] . identifier[format] ( identifier[which_format] ) keyword[if] identifier[which_format] == literal[string] : keyword[return] identifier[convert_celeba_aligned_cropped] ( identifier[directory] , identifier[output_directory] , identifier[output_filename] ) keyword[else] : keyword[return] identifier[convert_celeba_64] ( identifier[directory] , identifier[output_directory] , identifier[output_filename] )
def convert_celeba(which_format, directory, output_directory, output_filename=None): """Converts the CelebA dataset to HDF5. Converts the CelebA dataset to an HDF5 dataset compatible with :class:`fuel.datasets.CelebA`. The converted dataset is saved as 'celeba_aligned_cropped.hdf5' or 'celeba_64.hdf5', depending on the `which_format` argument. Parameters ---------- which_format : str Either 'aligned_cropped' or '64'. Determines which format to convert to. directory : str Directory in which input files reside. output_directory : str Directory in which to save the converted dataset. output_filename : str, optional Name of the saved dataset. Defaults to 'celeba_aligned_cropped.hdf5' or 'celeba_64.hdf5', depending on `which_format`. Returns ------- output_paths : tuple of str Single-element tuple containing the path to the converted dataset. """ if which_format not in ('aligned_cropped', '64'): raise ValueError("CelebA format needs to be either 'aligned_cropped' or '64'.") # depends on [control=['if'], data=[]] if not output_filename: output_filename = 'celeba_{}.hdf5'.format(which_format) # depends on [control=['if'], data=[]] if which_format == 'aligned_cropped': return convert_celeba_aligned_cropped(directory, output_directory, output_filename) # depends on [control=['if'], data=[]] else: return convert_celeba_64(directory, output_directory, output_filename)
def getUniqueFeaturesLocationsInObject(self, name): """ Return two sets. The first set contains the unique locations Ids in the object. The second set contains the unique feature Ids in the object. """ uniqueFeatures = set() uniqueLocations = set() for pair in self.objects[name]: uniqueLocations = uniqueLocations.union({pair[0]}) uniqueFeatures = uniqueFeatures.union({pair[1]}) return uniqueLocations, uniqueFeatures
def function[getUniqueFeaturesLocationsInObject, parameter[self, name]]: constant[ Return two sets. The first set contains the unique locations Ids in the object. The second set contains the unique feature Ids in the object. ] variable[uniqueFeatures] assign[=] call[name[set], parameter[]] variable[uniqueLocations] assign[=] call[name[set], parameter[]] for taget[name[pair]] in starred[call[name[self].objects][name[name]]] begin[:] variable[uniqueLocations] assign[=] call[name[uniqueLocations].union, parameter[<ast.Set object at 0x7da1b08d51b0>]] variable[uniqueFeatures] assign[=] call[name[uniqueFeatures].union, parameter[<ast.Set object at 0x7da1b08d53f0>]] return[tuple[[<ast.Name object at 0x7da1b088a710>, <ast.Name object at 0x7da1b088aaa0>]]]
keyword[def] identifier[getUniqueFeaturesLocationsInObject] ( identifier[self] , identifier[name] ): literal[string] identifier[uniqueFeatures] = identifier[set] () identifier[uniqueLocations] = identifier[set] () keyword[for] identifier[pair] keyword[in] identifier[self] . identifier[objects] [ identifier[name] ]: identifier[uniqueLocations] = identifier[uniqueLocations] . identifier[union] ({ identifier[pair] [ literal[int] ]}) identifier[uniqueFeatures] = identifier[uniqueFeatures] . identifier[union] ({ identifier[pair] [ literal[int] ]}) keyword[return] identifier[uniqueLocations] , identifier[uniqueFeatures]
def getUniqueFeaturesLocationsInObject(self, name): """ Return two sets. The first set contains the unique locations Ids in the object. The second set contains the unique feature Ids in the object. """ uniqueFeatures = set() uniqueLocations = set() for pair in self.objects[name]: uniqueLocations = uniqueLocations.union({pair[0]}) uniqueFeatures = uniqueFeatures.union({pair[1]}) # depends on [control=['for'], data=['pair']] return (uniqueLocations, uniqueFeatures)
def _set_bin_view(self, session): """Sets the underlying bin view to match current view""" if self._bin_view == COMPARATIVE: try: session.use_comparative_bin_view() except AttributeError: pass else: try: session.use_plenary_bin_view() except AttributeError: pass
def function[_set_bin_view, parameter[self, session]]: constant[Sets the underlying bin view to match current view] if compare[name[self]._bin_view equal[==] name[COMPARATIVE]] begin[:] <ast.Try object at 0x7da207f01de0>
keyword[def] identifier[_set_bin_view] ( identifier[self] , identifier[session] ): literal[string] keyword[if] identifier[self] . identifier[_bin_view] == identifier[COMPARATIVE] : keyword[try] : identifier[session] . identifier[use_comparative_bin_view] () keyword[except] identifier[AttributeError] : keyword[pass] keyword[else] : keyword[try] : identifier[session] . identifier[use_plenary_bin_view] () keyword[except] identifier[AttributeError] : keyword[pass]
def _set_bin_view(self, session): """Sets the underlying bin view to match current view""" if self._bin_view == COMPARATIVE: try: session.use_comparative_bin_view() # depends on [control=['try'], data=[]] except AttributeError: pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] else: try: session.use_plenary_bin_view() # depends on [control=['try'], data=[]] except AttributeError: pass # depends on [control=['except'], data=[]]
def write_lammps_inputs(output_dir, script_template, settings=None, data=None, script_filename="in.lammps", make_dir_if_not_present=True, **kwargs): """ Writes input files for a LAMMPS run. Input script is constructed from a str template with placeholders to be filled by custom settings. Data file is either written from a LammpsData instance or copied from an existing file if read_data cmd is inspected in the input script. Other supporting files are not handled at the moment. Args: output_dir (str): Directory to output the input files. script_template (str): String template for input script with placeholders. The format for placeholders has to be '$variable_name', e.g., '$temperature' settings (dict): Contains values to be written to the placeholders, e.g., {'temperature': 1}. Default to None. data (LammpsData or str): Data file as a LammpsData instance or path to an existing data file. Default to None, i.e., no data file supplied. Useful only when read_data cmd is in the script. script_filename (str): Filename for the input script. make_dir_if_not_present (bool): Set to True if you want the directory (and the whole path) to be created if it is not present. **kwargs: kwargs supported by LammpsData.write_file. Examples: >>> eam_template = '''units metal ... atom_style atomic ... ... lattice fcc 3.615 ... region box block 0 20 0 20 0 20 ... create_box 1 box ... create_atoms 1 box ... ... pair_style eam ... pair_coeff 1 1 Cu_u3.eam ... ... velocity all create $temperature 376847 loop geom ... ... neighbor 1.0 bin ... neigh_modify delay 5 every 1 ... ... fix 1 all nvt temp $temperature $temperature 0.1 ... ... timestep 0.005 ... ... run $nsteps''' >>> write_lammps_inputs('.', eam_template, settings={'temperature': 1600.0, 'nsteps': 100}) >>> with open('in.lammps') as f: ... script = f.read() ... >>> print(script) units metal atom_style atomic lattice fcc 3.615 region box block 0 20 0 20 0 20 create_box 1 box create_atoms 1 box pair_style eam pair_coeff 1 1 Cu_u3.eam velocity all create 1600.0 376847 loop geom neighbor 1.0 bin neigh_modify delay 5 every 1 fix 1 all nvt temp 1600.0 1600.0 0.1 timestep 0.005 run 100 """ variables = {} if settings is None else settings template = Template(script_template) input_script = template.safe_substitute(**variables) if make_dir_if_not_present and not os.path.exists(output_dir): os.makedirs(output_dir) with open(os.path.join(output_dir, script_filename), "w") as f: f.write(input_script) read_data = re.search(r"read_data\s+(.*)\n", input_script) if read_data: data_filename = read_data.group(1).split()[0] if isinstance(data, LammpsData): data.write_file(os.path.join(output_dir, data_filename), **kwargs) elif isinstance(data, str) and os.path.exists(data): shutil.copyfile(data, os.path.join(output_dir, data_filename)) else: warnings.warn("No data file supplied. Skip writing %s." % data_filename)
def function[write_lammps_inputs, parameter[output_dir, script_template, settings, data, script_filename, make_dir_if_not_present]]: constant[ Writes input files for a LAMMPS run. Input script is constructed from a str template with placeholders to be filled by custom settings. Data file is either written from a LammpsData instance or copied from an existing file if read_data cmd is inspected in the input script. Other supporting files are not handled at the moment. Args: output_dir (str): Directory to output the input files. script_template (str): String template for input script with placeholders. The format for placeholders has to be '$variable_name', e.g., '$temperature' settings (dict): Contains values to be written to the placeholders, e.g., {'temperature': 1}. Default to None. data (LammpsData or str): Data file as a LammpsData instance or path to an existing data file. Default to None, i.e., no data file supplied. Useful only when read_data cmd is in the script. script_filename (str): Filename for the input script. make_dir_if_not_present (bool): Set to True if you want the directory (and the whole path) to be created if it is not present. **kwargs: kwargs supported by LammpsData.write_file. Examples: >>> eam_template = '''units metal ... atom_style atomic ... ... lattice fcc 3.615 ... region box block 0 20 0 20 0 20 ... create_box 1 box ... create_atoms 1 box ... ... pair_style eam ... pair_coeff 1 1 Cu_u3.eam ... ... velocity all create $temperature 376847 loop geom ... ... neighbor 1.0 bin ... neigh_modify delay 5 every 1 ... ... fix 1 all nvt temp $temperature $temperature 0.1 ... ... timestep 0.005 ... ... run $nsteps''' >>> write_lammps_inputs('.', eam_template, settings={'temperature': 1600.0, 'nsteps': 100}) >>> with open('in.lammps') as f: ... script = f.read() ... >>> print(script) units metal atom_style atomic lattice fcc 3.615 region box block 0 20 0 20 0 20 create_box 1 box create_atoms 1 box pair_style eam pair_coeff 1 1 Cu_u3.eam velocity all create 1600.0 376847 loop geom neighbor 1.0 bin neigh_modify delay 5 every 1 fix 1 all nvt temp 1600.0 1600.0 0.1 timestep 0.005 run 100 ] variable[variables] assign[=] <ast.IfExp object at 0x7da20c6a9900> variable[template] assign[=] call[name[Template], parameter[name[script_template]]] variable[input_script] assign[=] call[name[template].safe_substitute, parameter[]] if <ast.BoolOp object at 0x7da20c6abbe0> begin[:] call[name[os].makedirs, parameter[name[output_dir]]] with call[name[open], parameter[call[name[os].path.join, parameter[name[output_dir], name[script_filename]]], constant[w]]] begin[:] call[name[f].write, parameter[name[input_script]]] variable[read_data] assign[=] call[name[re].search, parameter[constant[read_data\s+(.*)\n], name[input_script]]] if name[read_data] begin[:] variable[data_filename] assign[=] call[call[call[name[read_data].group, parameter[constant[1]]].split, parameter[]]][constant[0]] if call[name[isinstance], parameter[name[data], name[LammpsData]]] begin[:] call[name[data].write_file, parameter[call[name[os].path.join, parameter[name[output_dir], name[data_filename]]]]]
keyword[def] identifier[write_lammps_inputs] ( identifier[output_dir] , identifier[script_template] , identifier[settings] = keyword[None] , identifier[data] = keyword[None] , identifier[script_filename] = literal[string] , identifier[make_dir_if_not_present] = keyword[True] ,** identifier[kwargs] ): literal[string] identifier[variables] ={} keyword[if] identifier[settings] keyword[is] keyword[None] keyword[else] identifier[settings] identifier[template] = identifier[Template] ( identifier[script_template] ) identifier[input_script] = identifier[template] . identifier[safe_substitute] (** identifier[variables] ) keyword[if] identifier[make_dir_if_not_present] keyword[and] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[output_dir] ): identifier[os] . identifier[makedirs] ( identifier[output_dir] ) keyword[with] identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( identifier[output_dir] , identifier[script_filename] ), literal[string] ) keyword[as] identifier[f] : identifier[f] . identifier[write] ( identifier[input_script] ) identifier[read_data] = identifier[re] . identifier[search] ( literal[string] , identifier[input_script] ) keyword[if] identifier[read_data] : identifier[data_filename] = identifier[read_data] . identifier[group] ( literal[int] ). identifier[split] ()[ literal[int] ] keyword[if] identifier[isinstance] ( identifier[data] , identifier[LammpsData] ): identifier[data] . identifier[write_file] ( identifier[os] . identifier[path] . identifier[join] ( identifier[output_dir] , identifier[data_filename] ),** identifier[kwargs] ) keyword[elif] identifier[isinstance] ( identifier[data] , identifier[str] ) keyword[and] identifier[os] . identifier[path] . identifier[exists] ( identifier[data] ): identifier[shutil] . identifier[copyfile] ( identifier[data] , identifier[os] . identifier[path] . identifier[join] ( identifier[output_dir] , identifier[data_filename] )) keyword[else] : identifier[warnings] . identifier[warn] ( literal[string] % identifier[data_filename] )
def write_lammps_inputs(output_dir, script_template, settings=None, data=None, script_filename='in.lammps', make_dir_if_not_present=True, **kwargs): """ Writes input files for a LAMMPS run. Input script is constructed from a str template with placeholders to be filled by custom settings. Data file is either written from a LammpsData instance or copied from an existing file if read_data cmd is inspected in the input script. Other supporting files are not handled at the moment. Args: output_dir (str): Directory to output the input files. script_template (str): String template for input script with placeholders. The format for placeholders has to be '$variable_name', e.g., '$temperature' settings (dict): Contains values to be written to the placeholders, e.g., {'temperature': 1}. Default to None. data (LammpsData or str): Data file as a LammpsData instance or path to an existing data file. Default to None, i.e., no data file supplied. Useful only when read_data cmd is in the script. script_filename (str): Filename for the input script. make_dir_if_not_present (bool): Set to True if you want the directory (and the whole path) to be created if it is not present. **kwargs: kwargs supported by LammpsData.write_file. Examples: >>> eam_template = '''units metal ... atom_style atomic ... ... lattice fcc 3.615 ... region box block 0 20 0 20 0 20 ... create_box 1 box ... create_atoms 1 box ... ... pair_style eam ... pair_coeff 1 1 Cu_u3.eam ... ... velocity all create $temperature 376847 loop geom ... ... neighbor 1.0 bin ... neigh_modify delay 5 every 1 ... ... fix 1 all nvt temp $temperature $temperature 0.1 ... ... timestep 0.005 ... ... run $nsteps''' >>> write_lammps_inputs('.', eam_template, settings={'temperature': 1600.0, 'nsteps': 100}) >>> with open('in.lammps') as f: ... script = f.read() ... >>> print(script) units metal atom_style atomic lattice fcc 3.615 region box block 0 20 0 20 0 20 create_box 1 box create_atoms 1 box pair_style eam pair_coeff 1 1 Cu_u3.eam velocity all create 1600.0 376847 loop geom neighbor 1.0 bin neigh_modify delay 5 every 1 fix 1 all nvt temp 1600.0 1600.0 0.1 timestep 0.005 run 100 """ variables = {} if settings is None else settings template = Template(script_template) input_script = template.safe_substitute(**variables) if make_dir_if_not_present and (not os.path.exists(output_dir)): os.makedirs(output_dir) # depends on [control=['if'], data=[]] with open(os.path.join(output_dir, script_filename), 'w') as f: f.write(input_script) # depends on [control=['with'], data=['f']] read_data = re.search('read_data\\s+(.*)\\n', input_script) if read_data: data_filename = read_data.group(1).split()[0] if isinstance(data, LammpsData): data.write_file(os.path.join(output_dir, data_filename), **kwargs) # depends on [control=['if'], data=[]] elif isinstance(data, str) and os.path.exists(data): shutil.copyfile(data, os.path.join(output_dir, data_filename)) # depends on [control=['if'], data=[]] else: warnings.warn('No data file supplied. Skip writing %s.' % data_filename) # depends on [control=['if'], data=[]]
async def connect(self): """ Connect to target. """ self.tls_context = None if self.tls: self.tls_context = self.create_tls_context() (self.reader, self.writer) = await asyncio.open_connection( host=self.hostname, port=self.port, local_addr=self.source_address, ssl=self.tls_context, loop=self.eventloop )
<ast.AsyncFunctionDef object at 0x7da207f00be0>
keyword[async] keyword[def] identifier[connect] ( identifier[self] ): literal[string] identifier[self] . identifier[tls_context] = keyword[None] keyword[if] identifier[self] . identifier[tls] : identifier[self] . identifier[tls_context] = identifier[self] . identifier[create_tls_context] () ( identifier[self] . identifier[reader] , identifier[self] . identifier[writer] )= keyword[await] identifier[asyncio] . identifier[open_connection] ( identifier[host] = identifier[self] . identifier[hostname] , identifier[port] = identifier[self] . identifier[port] , identifier[local_addr] = identifier[self] . identifier[source_address] , identifier[ssl] = identifier[self] . identifier[tls_context] , identifier[loop] = identifier[self] . identifier[eventloop] )
async def connect(self): """ Connect to target. """ self.tls_context = None if self.tls: self.tls_context = self.create_tls_context() # depends on [control=['if'], data=[]] (self.reader, self.writer) = await asyncio.open_connection(host=self.hostname, port=self.port, local_addr=self.source_address, ssl=self.tls_context, loop=self.eventloop)
def _gi_build_stub(parent): """ Inspect the passed module recursively and build stubs for functions, classes, etc. """ classes = {} functions = {} constants = {} methods = {} for name in dir(parent): if name.startswith("__"): continue # Check if this is a valid name in python if not re.match(_identifier_re, name): continue try: obj = getattr(parent, name) except: continue if inspect.isclass(obj): classes[name] = obj elif inspect.isfunction(obj) or inspect.isbuiltin(obj): functions[name] = obj elif inspect.ismethod(obj) or inspect.ismethoddescriptor(obj): methods[name] = obj elif ( str(obj).startswith("<flags") or str(obj).startswith("<enum ") or str(obj).startswith("<GType ") or inspect.isdatadescriptor(obj) ): constants[name] = 0 elif isinstance(obj, (int, str)): constants[name] = obj elif callable(obj): # Fall back to a function for anything callable functions[name] = obj else: # Assume everything else is some manner of constant constants[name] = 0 ret = "" if constants: ret += "# %s constants\n\n" % parent.__name__ for name in sorted(constants): if name[0].isdigit(): # GDK has some busted constant names like # Gdk.EventType.2BUTTON_PRESS continue val = constants[name] strval = str(val) if isinstance(val, str): strval = '"%s"' % str(val).replace("\\", "\\\\") ret += "%s = %s\n" % (name, strval) if ret: ret += "\n\n" if functions: ret += "# %s functions\n\n" % parent.__name__ for name in sorted(functions): ret += "def %s(*args, **kwargs):\n" % name ret += " pass\n" if ret: ret += "\n\n" if methods: ret += "# %s methods\n\n" % parent.__name__ for name in sorted(methods): ret += "def %s(self, *args, **kwargs):\n" % name ret += " pass\n" if ret: ret += "\n\n" if classes: ret += "# %s classes\n\n" % parent.__name__ for name, obj in sorted(classes.items()): base = "object" if issubclass(obj, Exception): base = "Exception" ret += "class %s(%s):\n" % (name, base) classret = _gi_build_stub(obj) if not classret: classret = "pass\n" for line in classret.splitlines(): ret += " " + line + "\n" ret += "\n" return ret
def function[_gi_build_stub, parameter[parent]]: constant[ Inspect the passed module recursively and build stubs for functions, classes, etc. ] variable[classes] assign[=] dictionary[[], []] variable[functions] assign[=] dictionary[[], []] variable[constants] assign[=] dictionary[[], []] variable[methods] assign[=] dictionary[[], []] for taget[name[name]] in starred[call[name[dir], parameter[name[parent]]]] begin[:] if call[name[name].startswith, parameter[constant[__]]] begin[:] continue if <ast.UnaryOp object at 0x7da1b1e58e80> begin[:] continue <ast.Try object at 0x7da1b1e58b80> if call[name[inspect].isclass, parameter[name[obj]]] begin[:] call[name[classes]][name[name]] assign[=] name[obj] variable[ret] assign[=] constant[] if name[constants] begin[:] <ast.AugAssign object at 0x7da1b1edb9a0> for taget[name[name]] in starred[call[name[sorted], parameter[name[constants]]]] begin[:] if call[call[name[name]][constant[0]].isdigit, parameter[]] begin[:] continue variable[val] assign[=] call[name[constants]][name[name]] variable[strval] assign[=] call[name[str], parameter[name[val]]] if call[name[isinstance], parameter[name[val], name[str]]] begin[:] variable[strval] assign[=] binary_operation[constant["%s"] <ast.Mod object at 0x7da2590d6920> call[call[name[str], parameter[name[val]]].replace, parameter[constant[\], constant[\\]]]] <ast.AugAssign object at 0x7da1b1e586d0> if name[ret] begin[:] <ast.AugAssign object at 0x7da1b1e5bbb0> if name[functions] begin[:] <ast.AugAssign object at 0x7da1b1e59240> for taget[name[name]] in starred[call[name[sorted], parameter[name[functions]]]] begin[:] <ast.AugAssign object at 0x7da1b1e58220> <ast.AugAssign object at 0x7da1b1e5a020> if name[ret] begin[:] <ast.AugAssign object at 0x7da1b1e59750> if name[methods] begin[:] <ast.AugAssign object at 0x7da1b1e597b0> for taget[name[name]] in starred[call[name[sorted], parameter[name[methods]]]] begin[:] <ast.AugAssign object at 0x7da1b1e5bdc0> <ast.AugAssign object at 0x7da1b1e5b100> if name[ret] begin[:] <ast.AugAssign object at 0x7da1b1e59900> if name[classes] begin[:] <ast.AugAssign object at 0x7da1b1e59ea0> for taget[tuple[[<ast.Name object at 0x7da1b1e643a0>, <ast.Name object at 0x7da1b1e64610>]]] in starred[call[name[sorted], parameter[call[name[classes].items, parameter[]]]]] begin[:] variable[base] assign[=] constant[object] if call[name[issubclass], parameter[name[obj], name[Exception]]] begin[:] variable[base] assign[=] constant[Exception] <ast.AugAssign object at 0x7da1b1e64d60> variable[classret] assign[=] call[name[_gi_build_stub], parameter[name[obj]]] if <ast.UnaryOp object at 0x7da1b1e66ef0> begin[:] variable[classret] assign[=] constant[pass ] for taget[name[line]] in starred[call[name[classret].splitlines, parameter[]]] begin[:] <ast.AugAssign object at 0x7da1b1e670a0> <ast.AugAssign object at 0x7da1b1e65e40> return[name[ret]]
keyword[def] identifier[_gi_build_stub] ( identifier[parent] ): literal[string] identifier[classes] ={} identifier[functions] ={} identifier[constants] ={} identifier[methods] ={} keyword[for] identifier[name] keyword[in] identifier[dir] ( identifier[parent] ): keyword[if] identifier[name] . identifier[startswith] ( literal[string] ): keyword[continue] keyword[if] keyword[not] identifier[re] . identifier[match] ( identifier[_identifier_re] , identifier[name] ): keyword[continue] keyword[try] : identifier[obj] = identifier[getattr] ( identifier[parent] , identifier[name] ) keyword[except] : keyword[continue] keyword[if] identifier[inspect] . identifier[isclass] ( identifier[obj] ): identifier[classes] [ identifier[name] ]= identifier[obj] keyword[elif] identifier[inspect] . identifier[isfunction] ( identifier[obj] ) keyword[or] identifier[inspect] . identifier[isbuiltin] ( identifier[obj] ): identifier[functions] [ identifier[name] ]= identifier[obj] keyword[elif] identifier[inspect] . identifier[ismethod] ( identifier[obj] ) keyword[or] identifier[inspect] . identifier[ismethoddescriptor] ( identifier[obj] ): identifier[methods] [ identifier[name] ]= identifier[obj] keyword[elif] ( identifier[str] ( identifier[obj] ). identifier[startswith] ( literal[string] ) keyword[or] identifier[str] ( identifier[obj] ). identifier[startswith] ( literal[string] ) keyword[or] identifier[str] ( identifier[obj] ). identifier[startswith] ( literal[string] ) keyword[or] identifier[inspect] . identifier[isdatadescriptor] ( identifier[obj] ) ): identifier[constants] [ identifier[name] ]= literal[int] keyword[elif] identifier[isinstance] ( identifier[obj] ,( identifier[int] , identifier[str] )): identifier[constants] [ identifier[name] ]= identifier[obj] keyword[elif] identifier[callable] ( identifier[obj] ): identifier[functions] [ identifier[name] ]= identifier[obj] keyword[else] : identifier[constants] [ identifier[name] ]= literal[int] identifier[ret] = literal[string] keyword[if] identifier[constants] : identifier[ret] += literal[string] % identifier[parent] . identifier[__name__] keyword[for] identifier[name] keyword[in] identifier[sorted] ( identifier[constants] ): keyword[if] identifier[name] [ literal[int] ]. identifier[isdigit] (): keyword[continue] identifier[val] = identifier[constants] [ identifier[name] ] identifier[strval] = identifier[str] ( identifier[val] ) keyword[if] identifier[isinstance] ( identifier[val] , identifier[str] ): identifier[strval] = literal[string] % identifier[str] ( identifier[val] ). identifier[replace] ( literal[string] , literal[string] ) identifier[ret] += literal[string] %( identifier[name] , identifier[strval] ) keyword[if] identifier[ret] : identifier[ret] += literal[string] keyword[if] identifier[functions] : identifier[ret] += literal[string] % identifier[parent] . identifier[__name__] keyword[for] identifier[name] keyword[in] identifier[sorted] ( identifier[functions] ): identifier[ret] += literal[string] % identifier[name] identifier[ret] += literal[string] keyword[if] identifier[ret] : identifier[ret] += literal[string] keyword[if] identifier[methods] : identifier[ret] += literal[string] % identifier[parent] . identifier[__name__] keyword[for] identifier[name] keyword[in] identifier[sorted] ( identifier[methods] ): identifier[ret] += literal[string] % identifier[name] identifier[ret] += literal[string] keyword[if] identifier[ret] : identifier[ret] += literal[string] keyword[if] identifier[classes] : identifier[ret] += literal[string] % identifier[parent] . identifier[__name__] keyword[for] identifier[name] , identifier[obj] keyword[in] identifier[sorted] ( identifier[classes] . identifier[items] ()): identifier[base] = literal[string] keyword[if] identifier[issubclass] ( identifier[obj] , identifier[Exception] ): identifier[base] = literal[string] identifier[ret] += literal[string] %( identifier[name] , identifier[base] ) identifier[classret] = identifier[_gi_build_stub] ( identifier[obj] ) keyword[if] keyword[not] identifier[classret] : identifier[classret] = literal[string] keyword[for] identifier[line] keyword[in] identifier[classret] . identifier[splitlines] (): identifier[ret] += literal[string] + identifier[line] + literal[string] identifier[ret] += literal[string] keyword[return] identifier[ret]
def _gi_build_stub(parent): """ Inspect the passed module recursively and build stubs for functions, classes, etc. """ classes = {} functions = {} constants = {} methods = {} for name in dir(parent): if name.startswith('__'): continue # depends on [control=['if'], data=[]] # Check if this is a valid name in python if not re.match(_identifier_re, name): continue # depends on [control=['if'], data=[]] try: obj = getattr(parent, name) # depends on [control=['try'], data=[]] except: continue # depends on [control=['except'], data=[]] if inspect.isclass(obj): classes[name] = obj # depends on [control=['if'], data=[]] elif inspect.isfunction(obj) or inspect.isbuiltin(obj): functions[name] = obj # depends on [control=['if'], data=[]] elif inspect.ismethod(obj) or inspect.ismethoddescriptor(obj): methods[name] = obj # depends on [control=['if'], data=[]] elif str(obj).startswith('<flags') or str(obj).startswith('<enum ') or str(obj).startswith('<GType ') or inspect.isdatadescriptor(obj): constants[name] = 0 # depends on [control=['if'], data=[]] elif isinstance(obj, (int, str)): constants[name] = obj # depends on [control=['if'], data=[]] elif callable(obj): # Fall back to a function for anything callable functions[name] = obj # depends on [control=['if'], data=[]] else: # Assume everything else is some manner of constant constants[name] = 0 # depends on [control=['for'], data=['name']] ret = '' if constants: ret += '# %s constants\n\n' % parent.__name__ # depends on [control=['if'], data=[]] for name in sorted(constants): if name[0].isdigit(): # GDK has some busted constant names like # Gdk.EventType.2BUTTON_PRESS continue # depends on [control=['if'], data=[]] val = constants[name] strval = str(val) if isinstance(val, str): strval = '"%s"' % str(val).replace('\\', '\\\\') # depends on [control=['if'], data=[]] ret += '%s = %s\n' % (name, strval) # depends on [control=['for'], data=['name']] if ret: ret += '\n\n' # depends on [control=['if'], data=[]] if functions: ret += '# %s functions\n\n' % parent.__name__ # depends on [control=['if'], data=[]] for name in sorted(functions): ret += 'def %s(*args, **kwargs):\n' % name ret += ' pass\n' # depends on [control=['for'], data=['name']] if ret: ret += '\n\n' # depends on [control=['if'], data=[]] if methods: ret += '# %s methods\n\n' % parent.__name__ # depends on [control=['if'], data=[]] for name in sorted(methods): ret += 'def %s(self, *args, **kwargs):\n' % name ret += ' pass\n' # depends on [control=['for'], data=['name']] if ret: ret += '\n\n' # depends on [control=['if'], data=[]] if classes: ret += '# %s classes\n\n' % parent.__name__ # depends on [control=['if'], data=[]] for (name, obj) in sorted(classes.items()): base = 'object' if issubclass(obj, Exception): base = 'Exception' # depends on [control=['if'], data=[]] ret += 'class %s(%s):\n' % (name, base) classret = _gi_build_stub(obj) if not classret: classret = 'pass\n' # depends on [control=['if'], data=[]] for line in classret.splitlines(): ret += ' ' + line + '\n' # depends on [control=['for'], data=['line']] ret += '\n' # depends on [control=['for'], data=[]] return ret
def validate(config): ''' Validate the beacon configuration ''' VALID_ITEMS = [ 'type', 'bytes_sent', 'bytes_recv', 'packets_sent', 'packets_recv', 'errin', 'errout', 'dropin', 'dropout' ] # Configuration for load beacon should be a list of dicts if not isinstance(config, list): return False, ('Configuration for network_info beacon must be a list.') else: _config = {} list(map(_config.update, config)) for item in _config.get('interfaces', {}): if not isinstance(_config['interfaces'][item], dict): return False, ('Configuration for network_info beacon must ' 'be a list of dictionaries.') else: if not any(j in VALID_ITEMS for j in _config['interfaces'][item]): return False, ('Invalid configuration item in ' 'Beacon configuration.') return True, 'Valid beacon configuration'
def function[validate, parameter[config]]: constant[ Validate the beacon configuration ] variable[VALID_ITEMS] assign[=] list[[<ast.Constant object at 0x7da18bc70460>, <ast.Constant object at 0x7da18bc716c0>, <ast.Constant object at 0x7da18bc708e0>, <ast.Constant object at 0x7da18bc73df0>, <ast.Constant object at 0x7da18bc73b50>, <ast.Constant object at 0x7da18bc70310>, <ast.Constant object at 0x7da18bc71f90>, <ast.Constant object at 0x7da18bc73eb0>, <ast.Constant object at 0x7da18bc70700>]] if <ast.UnaryOp object at 0x7da18bc72830> begin[:] return[tuple[[<ast.Constant object at 0x7da18bc73d60>, <ast.Constant object at 0x7da18bc73850>]]] return[tuple[[<ast.Constant object at 0x7da18bc736d0>, <ast.Constant object at 0x7da18bc70e80>]]]
keyword[def] identifier[validate] ( identifier[config] ): literal[string] identifier[VALID_ITEMS] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ] keyword[if] keyword[not] identifier[isinstance] ( identifier[config] , identifier[list] ): keyword[return] keyword[False] ,( literal[string] ) keyword[else] : identifier[_config] ={} identifier[list] ( identifier[map] ( identifier[_config] . identifier[update] , identifier[config] )) keyword[for] identifier[item] keyword[in] identifier[_config] . identifier[get] ( literal[string] ,{}): keyword[if] keyword[not] identifier[isinstance] ( identifier[_config] [ literal[string] ][ identifier[item] ], identifier[dict] ): keyword[return] keyword[False] ,( literal[string] literal[string] ) keyword[else] : keyword[if] keyword[not] identifier[any] ( identifier[j] keyword[in] identifier[VALID_ITEMS] keyword[for] identifier[j] keyword[in] identifier[_config] [ literal[string] ][ identifier[item] ]): keyword[return] keyword[False] ,( literal[string] literal[string] ) keyword[return] keyword[True] , literal[string]
def validate(config): """ Validate the beacon configuration """ VALID_ITEMS = ['type', 'bytes_sent', 'bytes_recv', 'packets_sent', 'packets_recv', 'errin', 'errout', 'dropin', 'dropout'] # Configuration for load beacon should be a list of dicts if not isinstance(config, list): return (False, 'Configuration for network_info beacon must be a list.') # depends on [control=['if'], data=[]] else: _config = {} list(map(_config.update, config)) for item in _config.get('interfaces', {}): if not isinstance(_config['interfaces'][item], dict): return (False, 'Configuration for network_info beacon must be a list of dictionaries.') # depends on [control=['if'], data=[]] elif not any((j in VALID_ITEMS for j in _config['interfaces'][item])): return (False, 'Invalid configuration item in Beacon configuration.') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']] return (True, 'Valid beacon configuration')
def _listitemify(self, item): '''Creates an xbmcswift2.ListItem if the provided value for item is a dict. If item is already a valid xbmcswift2.ListItem, the item is returned unmodified. ''' info_type = self.info_type if hasattr(self, 'info_type') else 'video' # Create ListItems for anything that is not already an instance of # ListItem if not hasattr(item, 'as_tuple'): if 'info_type' not in item.keys(): item['info_type'] = info_type item = xbmcswift2.ListItem.from_dict(**item) return item
def function[_listitemify, parameter[self, item]]: constant[Creates an xbmcswift2.ListItem if the provided value for item is a dict. If item is already a valid xbmcswift2.ListItem, the item is returned unmodified. ] variable[info_type] assign[=] <ast.IfExp object at 0x7da18dc04e50> if <ast.UnaryOp object at 0x7da1b1beeb60> begin[:] if compare[constant[info_type] <ast.NotIn object at 0x7da2590d7190> call[name[item].keys, parameter[]]] begin[:] call[name[item]][constant[info_type]] assign[=] name[info_type] variable[item] assign[=] call[name[xbmcswift2].ListItem.from_dict, parameter[]] return[name[item]]
keyword[def] identifier[_listitemify] ( identifier[self] , identifier[item] ): literal[string] identifier[info_type] = identifier[self] . identifier[info_type] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[else] literal[string] keyword[if] keyword[not] identifier[hasattr] ( identifier[item] , literal[string] ): keyword[if] literal[string] keyword[not] keyword[in] identifier[item] . identifier[keys] (): identifier[item] [ literal[string] ]= identifier[info_type] identifier[item] = identifier[xbmcswift2] . identifier[ListItem] . identifier[from_dict] (** identifier[item] ) keyword[return] identifier[item]
def _listitemify(self, item): """Creates an xbmcswift2.ListItem if the provided value for item is a dict. If item is already a valid xbmcswift2.ListItem, the item is returned unmodified. """ info_type = self.info_type if hasattr(self, 'info_type') else 'video' # Create ListItems for anything that is not already an instance of # ListItem if not hasattr(item, 'as_tuple'): if 'info_type' not in item.keys(): item['info_type'] = info_type # depends on [control=['if'], data=[]] item = xbmcswift2.ListItem.from_dict(**item) # depends on [control=['if'], data=[]] return item
def isloaded(self, name): """Checks if given hook module has been loaded Args: name (str): The name of the module to check Returns: bool. The return code:: True -- Loaded False -- Not Loaded """ if name is None: return True if isinstance(name, str): return (name in [x.__module__ for x in self]) if isinstance(name, Iterable): return set(name).issubset([x.__module__ for x in self]) return False
def function[isloaded, parameter[self, name]]: constant[Checks if given hook module has been loaded Args: name (str): The name of the module to check Returns: bool. The return code:: True -- Loaded False -- Not Loaded ] if compare[name[name] is constant[None]] begin[:] return[constant[True]] if call[name[isinstance], parameter[name[name], name[str]]] begin[:] return[compare[name[name] in <ast.ListComp object at 0x7da18f58f580>]] if call[name[isinstance], parameter[name[name], name[Iterable]]] begin[:] return[call[call[name[set], parameter[name[name]]].issubset, parameter[<ast.ListComp object at 0x7da18f58d330>]]] return[constant[False]]
keyword[def] identifier[isloaded] ( identifier[self] , identifier[name] ): literal[string] keyword[if] identifier[name] keyword[is] keyword[None] : keyword[return] keyword[True] keyword[if] identifier[isinstance] ( identifier[name] , identifier[str] ): keyword[return] ( identifier[name] keyword[in] [ identifier[x] . identifier[__module__] keyword[for] identifier[x] keyword[in] identifier[self] ]) keyword[if] identifier[isinstance] ( identifier[name] , identifier[Iterable] ): keyword[return] identifier[set] ( identifier[name] ). identifier[issubset] ([ identifier[x] . identifier[__module__] keyword[for] identifier[x] keyword[in] identifier[self] ]) keyword[return] keyword[False]
def isloaded(self, name): """Checks if given hook module has been loaded Args: name (str): The name of the module to check Returns: bool. The return code:: True -- Loaded False -- Not Loaded """ if name is None: return True # depends on [control=['if'], data=[]] if isinstance(name, str): return name in [x.__module__ for x in self] # depends on [control=['if'], data=[]] if isinstance(name, Iterable): return set(name).issubset([x.__module__ for x in self]) # depends on [control=['if'], data=[]] return False
def _round(self): """ Subclasses may override this method. """ for contour in self.contours: contour.round() for component in self.components: component.round() for anchor in self.anchors: anchor.round() for guideline in self.guidelines: guideline.round() self.width = normalizers.normalizeRounding(self.width) self.height = normalizers.normalizeRounding(self.height)
def function[_round, parameter[self]]: constant[ Subclasses may override this method. ] for taget[name[contour]] in starred[name[self].contours] begin[:] call[name[contour].round, parameter[]] for taget[name[component]] in starred[name[self].components] begin[:] call[name[component].round, parameter[]] for taget[name[anchor]] in starred[name[self].anchors] begin[:] call[name[anchor].round, parameter[]] for taget[name[guideline]] in starred[name[self].guidelines] begin[:] call[name[guideline].round, parameter[]] name[self].width assign[=] call[name[normalizers].normalizeRounding, parameter[name[self].width]] name[self].height assign[=] call[name[normalizers].normalizeRounding, parameter[name[self].height]]
keyword[def] identifier[_round] ( identifier[self] ): literal[string] keyword[for] identifier[contour] keyword[in] identifier[self] . identifier[contours] : identifier[contour] . identifier[round] () keyword[for] identifier[component] keyword[in] identifier[self] . identifier[components] : identifier[component] . identifier[round] () keyword[for] identifier[anchor] keyword[in] identifier[self] . identifier[anchors] : identifier[anchor] . identifier[round] () keyword[for] identifier[guideline] keyword[in] identifier[self] . identifier[guidelines] : identifier[guideline] . identifier[round] () identifier[self] . identifier[width] = identifier[normalizers] . identifier[normalizeRounding] ( identifier[self] . identifier[width] ) identifier[self] . identifier[height] = identifier[normalizers] . identifier[normalizeRounding] ( identifier[self] . identifier[height] )
def _round(self): """ Subclasses may override this method. """ for contour in self.contours: contour.round() # depends on [control=['for'], data=['contour']] for component in self.components: component.round() # depends on [control=['for'], data=['component']] for anchor in self.anchors: anchor.round() # depends on [control=['for'], data=['anchor']] for guideline in self.guidelines: guideline.round() # depends on [control=['for'], data=['guideline']] self.width = normalizers.normalizeRounding(self.width) self.height = normalizers.normalizeRounding(self.height)
def translate_sbml_reaction(entry, new_id, compartment_map, compound_map): """Translate SBML reaction entry.""" new_entry = DictReactionEntry(entry, id=new_id) # Convert compound IDs in reaction equation if new_entry.equation is not None: compounds = [] for compound, value in new_entry.equation.compounds: # Translate compartment to new ID, if available. compartment = compartment_map.get( compound.compartment, compound.compartment) new_compound = compound.translate( lambda name: compound_map.get(name, name)).in_compartment( compartment) compounds.append((new_compound, value)) new_entry.equation = Reaction( new_entry.equation.direction, compounds) # Get XHTML notes properties for key, value in iteritems(parse_xhtml_reaction_notes(entry)): if key not in new_entry.properties: new_entry.properties[key] = value return new_entry
def function[translate_sbml_reaction, parameter[entry, new_id, compartment_map, compound_map]]: constant[Translate SBML reaction entry.] variable[new_entry] assign[=] call[name[DictReactionEntry], parameter[name[entry]]] if compare[name[new_entry].equation is_not constant[None]] begin[:] variable[compounds] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da204622e90>, <ast.Name object at 0x7da204622b00>]]] in starred[name[new_entry].equation.compounds] begin[:] variable[compartment] assign[=] call[name[compartment_map].get, parameter[name[compound].compartment, name[compound].compartment]] variable[new_compound] assign[=] call[call[name[compound].translate, parameter[<ast.Lambda object at 0x7da204620eb0>]].in_compartment, parameter[name[compartment]]] call[name[compounds].append, parameter[tuple[[<ast.Name object at 0x7da204622fe0>, <ast.Name object at 0x7da204623820>]]]] name[new_entry].equation assign[=] call[name[Reaction], parameter[name[new_entry].equation.direction, name[compounds]]] for taget[tuple[[<ast.Name object at 0x7da204620640>, <ast.Name object at 0x7da204623610>]]] in starred[call[name[iteritems], parameter[call[name[parse_xhtml_reaction_notes], parameter[name[entry]]]]]] begin[:] if compare[name[key] <ast.NotIn object at 0x7da2590d7190> name[new_entry].properties] begin[:] call[name[new_entry].properties][name[key]] assign[=] name[value] return[name[new_entry]]
keyword[def] identifier[translate_sbml_reaction] ( identifier[entry] , identifier[new_id] , identifier[compartment_map] , identifier[compound_map] ): literal[string] identifier[new_entry] = identifier[DictReactionEntry] ( identifier[entry] , identifier[id] = identifier[new_id] ) keyword[if] identifier[new_entry] . identifier[equation] keyword[is] keyword[not] keyword[None] : identifier[compounds] =[] keyword[for] identifier[compound] , identifier[value] keyword[in] identifier[new_entry] . identifier[equation] . identifier[compounds] : identifier[compartment] = identifier[compartment_map] . identifier[get] ( identifier[compound] . identifier[compartment] , identifier[compound] . identifier[compartment] ) identifier[new_compound] = identifier[compound] . identifier[translate] ( keyword[lambda] identifier[name] : identifier[compound_map] . identifier[get] ( identifier[name] , identifier[name] )). identifier[in_compartment] ( identifier[compartment] ) identifier[compounds] . identifier[append] (( identifier[new_compound] , identifier[value] )) identifier[new_entry] . identifier[equation] = identifier[Reaction] ( identifier[new_entry] . identifier[equation] . identifier[direction] , identifier[compounds] ) keyword[for] identifier[key] , identifier[value] keyword[in] identifier[iteritems] ( identifier[parse_xhtml_reaction_notes] ( identifier[entry] )): keyword[if] identifier[key] keyword[not] keyword[in] identifier[new_entry] . identifier[properties] : identifier[new_entry] . identifier[properties] [ identifier[key] ]= identifier[value] keyword[return] identifier[new_entry]
def translate_sbml_reaction(entry, new_id, compartment_map, compound_map): """Translate SBML reaction entry.""" new_entry = DictReactionEntry(entry, id=new_id) # Convert compound IDs in reaction equation if new_entry.equation is not None: compounds = [] for (compound, value) in new_entry.equation.compounds: # Translate compartment to new ID, if available. compartment = compartment_map.get(compound.compartment, compound.compartment) new_compound = compound.translate(lambda name: compound_map.get(name, name)).in_compartment(compartment) compounds.append((new_compound, value)) # depends on [control=['for'], data=[]] new_entry.equation = Reaction(new_entry.equation.direction, compounds) # depends on [control=['if'], data=[]] # Get XHTML notes properties for (key, value) in iteritems(parse_xhtml_reaction_notes(entry)): if key not in new_entry.properties: new_entry.properties[key] = value # depends on [control=['if'], data=['key']] # depends on [control=['for'], data=[]] return new_entry
def launch_cambridge_hphi( jobname: str, cmd: str, memory_mb: int, qos: str, email: str, duration: timedelta, cpus_per_task: int, project: str = "hphi", tasks_per_node: int = 1, partition: str = "wbic-cs", # 2018-02: was "wbic", now "wbic-cs" modules: List[str] = None, directory: str = os.getcwd(), encoding: str = "ascii") -> None: """ Specialization of :func:`launch_slurm` (q.v.) with defaults for the University of Cambridge WBIC HPHI. """ if modules is None: modules = ["default-wbic"] launch_slurm( cmd=cmd, cpus_per_task=cpus_per_task, directory=directory, duration=duration, email=email, encoding=encoding, jobname=jobname, memory_mb=memory_mb, modules=modules, partition=partition, project=project, qos=qos, tasks_per_node=tasks_per_node, )
def function[launch_cambridge_hphi, parameter[jobname, cmd, memory_mb, qos, email, duration, cpus_per_task, project, tasks_per_node, partition, modules, directory, encoding]]: constant[ Specialization of :func:`launch_slurm` (q.v.) with defaults for the University of Cambridge WBIC HPHI. ] if compare[name[modules] is constant[None]] begin[:] variable[modules] assign[=] list[[<ast.Constant object at 0x7da1b184b340>]] call[name[launch_slurm], parameter[]]
keyword[def] identifier[launch_cambridge_hphi] ( identifier[jobname] : identifier[str] , identifier[cmd] : identifier[str] , identifier[memory_mb] : identifier[int] , identifier[qos] : identifier[str] , identifier[email] : identifier[str] , identifier[duration] : identifier[timedelta] , identifier[cpus_per_task] : identifier[int] , identifier[project] : identifier[str] = literal[string] , identifier[tasks_per_node] : identifier[int] = literal[int] , identifier[partition] : identifier[str] = literal[string] , identifier[modules] : identifier[List] [ identifier[str] ]= keyword[None] , identifier[directory] : identifier[str] = identifier[os] . identifier[getcwd] (), identifier[encoding] : identifier[str] = literal[string] )-> keyword[None] : literal[string] keyword[if] identifier[modules] keyword[is] keyword[None] : identifier[modules] =[ literal[string] ] identifier[launch_slurm] ( identifier[cmd] = identifier[cmd] , identifier[cpus_per_task] = identifier[cpus_per_task] , identifier[directory] = identifier[directory] , identifier[duration] = identifier[duration] , identifier[email] = identifier[email] , identifier[encoding] = identifier[encoding] , identifier[jobname] = identifier[jobname] , identifier[memory_mb] = identifier[memory_mb] , identifier[modules] = identifier[modules] , identifier[partition] = identifier[partition] , identifier[project] = identifier[project] , identifier[qos] = identifier[qos] , identifier[tasks_per_node] = identifier[tasks_per_node] , )
def launch_cambridge_hphi(jobname: str, cmd: str, memory_mb: int, qos: str, email: str, duration: timedelta, cpus_per_task: int, project: str='hphi', tasks_per_node: int=1, partition: str='wbic-cs', modules: List[str]=None, directory: str=os.getcwd(), encoding: str='ascii') -> None: # 2018-02: was "wbic", now "wbic-cs" '\n Specialization of :func:`launch_slurm` (q.v.) with defaults for the\n University of Cambridge WBIC HPHI.\n ' if modules is None: modules = ['default-wbic'] # depends on [control=['if'], data=['modules']] launch_slurm(cmd=cmd, cpus_per_task=cpus_per_task, directory=directory, duration=duration, email=email, encoding=encoding, jobname=jobname, memory_mb=memory_mb, modules=modules, partition=partition, project=project, qos=qos, tasks_per_node=tasks_per_node)
def as_objective(obj): """Convert obj into Objective class. Strings of the form "layer:n" become the Objective channel(layer, n). Objectives are returned unchanged. Args: obj: string or Objective. Returns: Objective """ if isinstance(obj, Objective): return obj elif callable(obj): return obj elif isinstance(obj, str): layer, n = obj.split(":") layer, n = layer.strip(), int(n) return channel(layer, n)
def function[as_objective, parameter[obj]]: constant[Convert obj into Objective class. Strings of the form "layer:n" become the Objective channel(layer, n). Objectives are returned unchanged. Args: obj: string or Objective. Returns: Objective ] if call[name[isinstance], parameter[name[obj], name[Objective]]] begin[:] return[name[obj]]
keyword[def] identifier[as_objective] ( identifier[obj] ): literal[string] keyword[if] identifier[isinstance] ( identifier[obj] , identifier[Objective] ): keyword[return] identifier[obj] keyword[elif] identifier[callable] ( identifier[obj] ): keyword[return] identifier[obj] keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[str] ): identifier[layer] , identifier[n] = identifier[obj] . identifier[split] ( literal[string] ) identifier[layer] , identifier[n] = identifier[layer] . identifier[strip] (), identifier[int] ( identifier[n] ) keyword[return] identifier[channel] ( identifier[layer] , identifier[n] )
def as_objective(obj): """Convert obj into Objective class. Strings of the form "layer:n" become the Objective channel(layer, n). Objectives are returned unchanged. Args: obj: string or Objective. Returns: Objective """ if isinstance(obj, Objective): return obj # depends on [control=['if'], data=[]] elif callable(obj): return obj # depends on [control=['if'], data=[]] elif isinstance(obj, str): (layer, n) = obj.split(':') (layer, n) = (layer.strip(), int(n)) return channel(layer, n) # depends on [control=['if'], data=[]]
def _ip_route_linux(): ''' Return ip routing information for Linux distros (netstat is deprecated and may not be available) ''' # table main closest to old netstat inet output ret = [] cmd = 'ip -4 route show table main' out = __salt__['cmd.run'](cmd, python_shell=True) for line in out.splitlines(): comps = line.split() # need to fake similar output to that provided by netstat # to maintain output format if comps[0] == "unreachable": continue if comps[0] == "default": ip_interface = '' if comps[3] == "dev": ip_interface = comps[4] ret.append({ 'addr_family': 'inet', 'destination': '0.0.0.0', 'gateway': comps[2], 'netmask': '0.0.0.0', 'flags': 'UG', 'interface': ip_interface}) else: address_mask = convert_cidr(comps[0]) ip_interface = '' if comps[1] == "dev": ip_interface = comps[2] ret.append({ 'addr_family': 'inet', 'destination': address_mask['network'], 'gateway': '0.0.0.0', 'netmask': address_mask['netmask'], 'flags': 'U', 'interface': ip_interface}) # table all closest to old netstat inet6 output cmd = 'ip -6 route show table all' out = __salt__['cmd.run'](cmd, python_shell=True) for line in out.splitlines(): comps = line.split() # need to fake similar output to that provided by netstat # to maintain output format if comps[0] == "unreachable": continue if comps[0] == "default": ip_interface = '' if comps[3] == "dev": ip_interface = comps[4] ret.append({ 'addr_family': 'inet6', 'destination': '::/0', 'gateway': comps[2], 'netmask': '', 'flags': 'UG', 'interface': ip_interface}) elif comps[0] == "local": ip_interface = '' if comps[2] == "dev": ip_interface = comps[3] local_address = comps[1] + "/128" ret.append({ 'addr_family': 'inet6', 'destination': local_address, 'gateway': '::', 'netmask': '', 'flags': 'U', 'interface': ip_interface}) else: address_mask = convert_cidr(comps[0]) ip_interface = '' if comps[1] == "dev": ip_interface = comps[2] ret.append({ 'addr_family': 'inet6', 'destination': comps[0], 'gateway': '::', 'netmask': '', 'flags': 'U', 'interface': ip_interface}) return ret
def function[_ip_route_linux, parameter[]]: constant[ Return ip routing information for Linux distros (netstat is deprecated and may not be available) ] variable[ret] assign[=] list[[]] variable[cmd] assign[=] constant[ip -4 route show table main] variable[out] assign[=] call[call[name[__salt__]][constant[cmd.run]], parameter[name[cmd]]] for taget[name[line]] in starred[call[name[out].splitlines, parameter[]]] begin[:] variable[comps] assign[=] call[name[line].split, parameter[]] if compare[call[name[comps]][constant[0]] equal[==] constant[unreachable]] begin[:] continue if compare[call[name[comps]][constant[0]] equal[==] constant[default]] begin[:] variable[ip_interface] assign[=] constant[] if compare[call[name[comps]][constant[3]] equal[==] constant[dev]] begin[:] variable[ip_interface] assign[=] call[name[comps]][constant[4]] call[name[ret].append, parameter[dictionary[[<ast.Constant object at 0x7da1b20447f0>, <ast.Constant object at 0x7da1b2046290>, <ast.Constant object at 0x7da1b2047640>, <ast.Constant object at 0x7da1b20446d0>, <ast.Constant object at 0x7da1b2047b20>, <ast.Constant object at 0x7da1b2045fc0>], [<ast.Constant object at 0x7da1b2046620>, <ast.Constant object at 0x7da1b20474c0>, <ast.Subscript object at 0x7da1b2044370>, <ast.Constant object at 0x7da1b2047430>, <ast.Constant object at 0x7da1b20467d0>, <ast.Name object at 0x7da1b2047d00>]]]] variable[cmd] assign[=] constant[ip -6 route show table all] variable[out] assign[=] call[call[name[__salt__]][constant[cmd.run]], parameter[name[cmd]]] for taget[name[line]] in starred[call[name[out].splitlines, parameter[]]] begin[:] variable[comps] assign[=] call[name[line].split, parameter[]] if compare[call[name[comps]][constant[0]] equal[==] constant[unreachable]] begin[:] continue if compare[call[name[comps]][constant[0]] equal[==] constant[default]] begin[:] variable[ip_interface] assign[=] constant[] if compare[call[name[comps]][constant[3]] equal[==] constant[dev]] begin[:] variable[ip_interface] assign[=] call[name[comps]][constant[4]] call[name[ret].append, parameter[dictionary[[<ast.Constant object at 0x7da1b20443a0>, <ast.Constant object at 0x7da1b2045810>, <ast.Constant object at 0x7da1b2047ee0>, <ast.Constant object at 0x7da1b2046830>, <ast.Constant object at 0x7da1b20452a0>, <ast.Constant object at 0x7da1b2045060>], [<ast.Constant object at 0x7da1b2047fd0>, <ast.Constant object at 0x7da1b2046fb0>, <ast.Subscript object at 0x7da1b2045210>, <ast.Constant object at 0x7da1b2045270>, <ast.Constant object at 0x7da1b2045570>, <ast.Name object at 0x7da1b20472b0>]]]] return[name[ret]]
keyword[def] identifier[_ip_route_linux] (): literal[string] identifier[ret] =[] identifier[cmd] = literal[string] identifier[out] = identifier[__salt__] [ literal[string] ]( identifier[cmd] , identifier[python_shell] = keyword[True] ) keyword[for] identifier[line] keyword[in] identifier[out] . identifier[splitlines] (): identifier[comps] = identifier[line] . identifier[split] () keyword[if] identifier[comps] [ literal[int] ]== literal[string] : keyword[continue] keyword[if] identifier[comps] [ literal[int] ]== literal[string] : identifier[ip_interface] = literal[string] keyword[if] identifier[comps] [ literal[int] ]== literal[string] : identifier[ip_interface] = identifier[comps] [ literal[int] ] identifier[ret] . identifier[append] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : identifier[comps] [ literal[int] ], literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : identifier[ip_interface] }) keyword[else] : identifier[address_mask] = identifier[convert_cidr] ( identifier[comps] [ literal[int] ]) identifier[ip_interface] = literal[string] keyword[if] identifier[comps] [ literal[int] ]== literal[string] : identifier[ip_interface] = identifier[comps] [ literal[int] ] identifier[ret] . identifier[append] ({ literal[string] : literal[string] , literal[string] : identifier[address_mask] [ literal[string] ], literal[string] : literal[string] , literal[string] : identifier[address_mask] [ literal[string] ], literal[string] : literal[string] , literal[string] : identifier[ip_interface] }) identifier[cmd] = literal[string] identifier[out] = identifier[__salt__] [ literal[string] ]( identifier[cmd] , identifier[python_shell] = keyword[True] ) keyword[for] identifier[line] keyword[in] identifier[out] . identifier[splitlines] (): identifier[comps] = identifier[line] . identifier[split] () keyword[if] identifier[comps] [ literal[int] ]== literal[string] : keyword[continue] keyword[if] identifier[comps] [ literal[int] ]== literal[string] : identifier[ip_interface] = literal[string] keyword[if] identifier[comps] [ literal[int] ]== literal[string] : identifier[ip_interface] = identifier[comps] [ literal[int] ] identifier[ret] . identifier[append] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : identifier[comps] [ literal[int] ], literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : identifier[ip_interface] }) keyword[elif] identifier[comps] [ literal[int] ]== literal[string] : identifier[ip_interface] = literal[string] keyword[if] identifier[comps] [ literal[int] ]== literal[string] : identifier[ip_interface] = identifier[comps] [ literal[int] ] identifier[local_address] = identifier[comps] [ literal[int] ]+ literal[string] identifier[ret] . identifier[append] ({ literal[string] : literal[string] , literal[string] : identifier[local_address] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : identifier[ip_interface] }) keyword[else] : identifier[address_mask] = identifier[convert_cidr] ( identifier[comps] [ literal[int] ]) identifier[ip_interface] = literal[string] keyword[if] identifier[comps] [ literal[int] ]== literal[string] : identifier[ip_interface] = identifier[comps] [ literal[int] ] identifier[ret] . identifier[append] ({ literal[string] : literal[string] , literal[string] : identifier[comps] [ literal[int] ], literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : identifier[ip_interface] }) keyword[return] identifier[ret]
def _ip_route_linux(): """ Return ip routing information for Linux distros (netstat is deprecated and may not be available) """ # table main closest to old netstat inet output ret = [] cmd = 'ip -4 route show table main' out = __salt__['cmd.run'](cmd, python_shell=True) for line in out.splitlines(): comps = line.split() # need to fake similar output to that provided by netstat # to maintain output format if comps[0] == 'unreachable': continue # depends on [control=['if'], data=[]] if comps[0] == 'default': ip_interface = '' if comps[3] == 'dev': ip_interface = comps[4] # depends on [control=['if'], data=[]] ret.append({'addr_family': 'inet', 'destination': '0.0.0.0', 'gateway': comps[2], 'netmask': '0.0.0.0', 'flags': 'UG', 'interface': ip_interface}) # depends on [control=['if'], data=[]] else: address_mask = convert_cidr(comps[0]) ip_interface = '' if comps[1] == 'dev': ip_interface = comps[2] # depends on [control=['if'], data=[]] ret.append({'addr_family': 'inet', 'destination': address_mask['network'], 'gateway': '0.0.0.0', 'netmask': address_mask['netmask'], 'flags': 'U', 'interface': ip_interface}) # depends on [control=['for'], data=['line']] # table all closest to old netstat inet6 output cmd = 'ip -6 route show table all' out = __salt__['cmd.run'](cmd, python_shell=True) for line in out.splitlines(): comps = line.split() # need to fake similar output to that provided by netstat # to maintain output format if comps[0] == 'unreachable': continue # depends on [control=['if'], data=[]] if comps[0] == 'default': ip_interface = '' if comps[3] == 'dev': ip_interface = comps[4] # depends on [control=['if'], data=[]] ret.append({'addr_family': 'inet6', 'destination': '::/0', 'gateway': comps[2], 'netmask': '', 'flags': 'UG', 'interface': ip_interface}) # depends on [control=['if'], data=[]] elif comps[0] == 'local': ip_interface = '' if comps[2] == 'dev': ip_interface = comps[3] # depends on [control=['if'], data=[]] local_address = comps[1] + '/128' ret.append({'addr_family': 'inet6', 'destination': local_address, 'gateway': '::', 'netmask': '', 'flags': 'U', 'interface': ip_interface}) # depends on [control=['if'], data=[]] else: address_mask = convert_cidr(comps[0]) ip_interface = '' if comps[1] == 'dev': ip_interface = comps[2] # depends on [control=['if'], data=[]] ret.append({'addr_family': 'inet6', 'destination': comps[0], 'gateway': '::', 'netmask': '', 'flags': 'U', 'interface': ip_interface}) # depends on [control=['for'], data=['line']] return ret
def UnitToLNode(u: Unit, node: Optional[LNode]=None, toL: Optional[dict]=None, optimizations=[]) -> LNode: """ Build LNode instance from Unit instance :attention: unit has to be synthesized """ if toL is None: toL = {} if node is None: root = LNode(name=u._name, originObj=u, node2lnode=toL) else: root = node stmPorts = {} # {RtlSignal: NetCtx} netCtx = NetCtxs(root) # create subunits for su in u._units: n = root.addNode(name=su._name, originObj=su) UnitToLNode(su, n, toL, optimizations) # create subunits from statements for stm in u._ctx.statements: n = addStmAsLNode(root, stm, stmPorts, netCtx) # create ports for this unit for intf in u._interfaces: addPort(root, intf) # render content of statements for stm in u._ctx.statements: n = toL.get(stm, None) if n is not None: if isinstance(n, VirtualLNode): # statement is not in wrap and does not need any port context p = None else: # statement is in wrap and needs a port context # to resolve port connections to wrap p = stmPorts[n] r = StatementRenderer(n, toL, p, netCtx) r.renderContent() # connect nets inside this unit for s in u._ctx.signals: if not s.hidden: net, _ = netCtx.getDefault(s) for e in s.endpoints: if isinstance(e, PortItem): net.addEndpoint(toL[e]) for d in s.drivers: if isinstance(d, PortItem): net.addDriver(toL[d]) netCtx.applyConnections(root) for opt in optimizations: opt(root) isRootOfWholeGraph = root.parent is None if not isRootOfWholeGraph: for intf in u._interfaces: # connect my external port to port on my container on parent # also override toL to use this new port ext_p = toL[originObjOfPort(intf)].parentNode nodePort = addPortToLNode(root, intf) # connect this node which represents port to port of this node if intf._direction == INTF_DIRECTION.SLAVE: src = nodePort dst = ext_p.addPort("", PortType.INPUT, PortSide.WEST) else: src = ext_p.addPort("", PortType.OUTPUT, PortSide.EAST) dst = nodePort root.addEdge(src, dst, name=repr(intf), originObj=intf) return root
def function[UnitToLNode, parameter[u, node, toL, optimizations]]: constant[ Build LNode instance from Unit instance :attention: unit has to be synthesized ] if compare[name[toL] is constant[None]] begin[:] variable[toL] assign[=] dictionary[[], []] if compare[name[node] is constant[None]] begin[:] variable[root] assign[=] call[name[LNode], parameter[]] variable[stmPorts] assign[=] dictionary[[], []] variable[netCtx] assign[=] call[name[NetCtxs], parameter[name[root]]] for taget[name[su]] in starred[name[u]._units] begin[:] variable[n] assign[=] call[name[root].addNode, parameter[]] call[name[UnitToLNode], parameter[name[su], name[n], name[toL], name[optimizations]]] for taget[name[stm]] in starred[name[u]._ctx.statements] begin[:] variable[n] assign[=] call[name[addStmAsLNode], parameter[name[root], name[stm], name[stmPorts], name[netCtx]]] for taget[name[intf]] in starred[name[u]._interfaces] begin[:] call[name[addPort], parameter[name[root], name[intf]]] for taget[name[stm]] in starred[name[u]._ctx.statements] begin[:] variable[n] assign[=] call[name[toL].get, parameter[name[stm], constant[None]]] if compare[name[n] is_not constant[None]] begin[:] if call[name[isinstance], parameter[name[n], name[VirtualLNode]]] begin[:] variable[p] assign[=] constant[None] variable[r] assign[=] call[name[StatementRenderer], parameter[name[n], name[toL], name[p], name[netCtx]]] call[name[r].renderContent, parameter[]] for taget[name[s]] in starred[name[u]._ctx.signals] begin[:] if <ast.UnaryOp object at 0x7da18f09c8e0> begin[:] <ast.Tuple object at 0x7da18f09c550> assign[=] call[name[netCtx].getDefault, parameter[name[s]]] for taget[name[e]] in starred[name[s].endpoints] begin[:] if call[name[isinstance], parameter[name[e], name[PortItem]]] begin[:] call[name[net].addEndpoint, parameter[call[name[toL]][name[e]]]] for taget[name[d]] in starred[name[s].drivers] begin[:] if call[name[isinstance], parameter[name[d], name[PortItem]]] begin[:] call[name[net].addDriver, parameter[call[name[toL]][name[d]]]] call[name[netCtx].applyConnections, parameter[name[root]]] for taget[name[opt]] in starred[name[optimizations]] begin[:] call[name[opt], parameter[name[root]]] variable[isRootOfWholeGraph] assign[=] compare[name[root].parent is constant[None]] if <ast.UnaryOp object at 0x7da18f09fac0> begin[:] for taget[name[intf]] in starred[name[u]._interfaces] begin[:] variable[ext_p] assign[=] call[name[toL]][call[name[originObjOfPort], parameter[name[intf]]]].parentNode variable[nodePort] assign[=] call[name[addPortToLNode], parameter[name[root], name[intf]]] if compare[name[intf]._direction equal[==] name[INTF_DIRECTION].SLAVE] begin[:] variable[src] assign[=] name[nodePort] variable[dst] assign[=] call[name[ext_p].addPort, parameter[constant[], name[PortType].INPUT, name[PortSide].WEST]] call[name[root].addEdge, parameter[name[src], name[dst]]] return[name[root]]
keyword[def] identifier[UnitToLNode] ( identifier[u] : identifier[Unit] , identifier[node] : identifier[Optional] [ identifier[LNode] ]= keyword[None] , identifier[toL] : identifier[Optional] [ identifier[dict] ]= keyword[None] , identifier[optimizations] =[])-> identifier[LNode] : literal[string] keyword[if] identifier[toL] keyword[is] keyword[None] : identifier[toL] ={} keyword[if] identifier[node] keyword[is] keyword[None] : identifier[root] = identifier[LNode] ( identifier[name] = identifier[u] . identifier[_name] , identifier[originObj] = identifier[u] , identifier[node2lnode] = identifier[toL] ) keyword[else] : identifier[root] = identifier[node] identifier[stmPorts] ={} identifier[netCtx] = identifier[NetCtxs] ( identifier[root] ) keyword[for] identifier[su] keyword[in] identifier[u] . identifier[_units] : identifier[n] = identifier[root] . identifier[addNode] ( identifier[name] = identifier[su] . identifier[_name] , identifier[originObj] = identifier[su] ) identifier[UnitToLNode] ( identifier[su] , identifier[n] , identifier[toL] , identifier[optimizations] ) keyword[for] identifier[stm] keyword[in] identifier[u] . identifier[_ctx] . identifier[statements] : identifier[n] = identifier[addStmAsLNode] ( identifier[root] , identifier[stm] , identifier[stmPorts] , identifier[netCtx] ) keyword[for] identifier[intf] keyword[in] identifier[u] . identifier[_interfaces] : identifier[addPort] ( identifier[root] , identifier[intf] ) keyword[for] identifier[stm] keyword[in] identifier[u] . identifier[_ctx] . identifier[statements] : identifier[n] = identifier[toL] . identifier[get] ( identifier[stm] , keyword[None] ) keyword[if] identifier[n] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[isinstance] ( identifier[n] , identifier[VirtualLNode] ): identifier[p] = keyword[None] keyword[else] : identifier[p] = identifier[stmPorts] [ identifier[n] ] identifier[r] = identifier[StatementRenderer] ( identifier[n] , identifier[toL] , identifier[p] , identifier[netCtx] ) identifier[r] . identifier[renderContent] () keyword[for] identifier[s] keyword[in] identifier[u] . identifier[_ctx] . identifier[signals] : keyword[if] keyword[not] identifier[s] . identifier[hidden] : identifier[net] , identifier[_] = identifier[netCtx] . identifier[getDefault] ( identifier[s] ) keyword[for] identifier[e] keyword[in] identifier[s] . identifier[endpoints] : keyword[if] identifier[isinstance] ( identifier[e] , identifier[PortItem] ): identifier[net] . identifier[addEndpoint] ( identifier[toL] [ identifier[e] ]) keyword[for] identifier[d] keyword[in] identifier[s] . identifier[drivers] : keyword[if] identifier[isinstance] ( identifier[d] , identifier[PortItem] ): identifier[net] . identifier[addDriver] ( identifier[toL] [ identifier[d] ]) identifier[netCtx] . identifier[applyConnections] ( identifier[root] ) keyword[for] identifier[opt] keyword[in] identifier[optimizations] : identifier[opt] ( identifier[root] ) identifier[isRootOfWholeGraph] = identifier[root] . identifier[parent] keyword[is] keyword[None] keyword[if] keyword[not] identifier[isRootOfWholeGraph] : keyword[for] identifier[intf] keyword[in] identifier[u] . identifier[_interfaces] : identifier[ext_p] = identifier[toL] [ identifier[originObjOfPort] ( identifier[intf] )]. identifier[parentNode] identifier[nodePort] = identifier[addPortToLNode] ( identifier[root] , identifier[intf] ) keyword[if] identifier[intf] . identifier[_direction] == identifier[INTF_DIRECTION] . identifier[SLAVE] : identifier[src] = identifier[nodePort] identifier[dst] = identifier[ext_p] . identifier[addPort] ( literal[string] , identifier[PortType] . identifier[INPUT] , identifier[PortSide] . identifier[WEST] ) keyword[else] : identifier[src] = identifier[ext_p] . identifier[addPort] ( literal[string] , identifier[PortType] . identifier[OUTPUT] , identifier[PortSide] . identifier[EAST] ) identifier[dst] = identifier[nodePort] identifier[root] . identifier[addEdge] ( identifier[src] , identifier[dst] , identifier[name] = identifier[repr] ( identifier[intf] ), identifier[originObj] = identifier[intf] ) keyword[return] identifier[root]
def UnitToLNode(u: Unit, node: Optional[LNode]=None, toL: Optional[dict]=None, optimizations=[]) -> LNode: """ Build LNode instance from Unit instance :attention: unit has to be synthesized """ if toL is None: toL = {} # depends on [control=['if'], data=['toL']] if node is None: root = LNode(name=u._name, originObj=u, node2lnode=toL) # depends on [control=['if'], data=[]] else: root = node stmPorts = {} # {RtlSignal: NetCtx} netCtx = NetCtxs(root) # create subunits for su in u._units: n = root.addNode(name=su._name, originObj=su) UnitToLNode(su, n, toL, optimizations) # depends on [control=['for'], data=['su']] # create subunits from statements for stm in u._ctx.statements: n = addStmAsLNode(root, stm, stmPorts, netCtx) # depends on [control=['for'], data=['stm']] # create ports for this unit for intf in u._interfaces: addPort(root, intf) # depends on [control=['for'], data=['intf']] # render content of statements for stm in u._ctx.statements: n = toL.get(stm, None) if n is not None: if isinstance(n, VirtualLNode): # statement is not in wrap and does not need any port context p = None # depends on [control=['if'], data=[]] else: # statement is in wrap and needs a port context # to resolve port connections to wrap p = stmPorts[n] r = StatementRenderer(n, toL, p, netCtx) r.renderContent() # depends on [control=['if'], data=['n']] # depends on [control=['for'], data=['stm']] # connect nets inside this unit for s in u._ctx.signals: if not s.hidden: (net, _) = netCtx.getDefault(s) for e in s.endpoints: if isinstance(e, PortItem): net.addEndpoint(toL[e]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['e']] for d in s.drivers: if isinstance(d, PortItem): net.addDriver(toL[d]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['d']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['s']] netCtx.applyConnections(root) for opt in optimizations: opt(root) # depends on [control=['for'], data=['opt']] isRootOfWholeGraph = root.parent is None if not isRootOfWholeGraph: for intf in u._interfaces: # connect my external port to port on my container on parent # also override toL to use this new port ext_p = toL[originObjOfPort(intf)].parentNode nodePort = addPortToLNode(root, intf) # connect this node which represents port to port of this node if intf._direction == INTF_DIRECTION.SLAVE: src = nodePort dst = ext_p.addPort('', PortType.INPUT, PortSide.WEST) # depends on [control=['if'], data=[]] else: src = ext_p.addPort('', PortType.OUTPUT, PortSide.EAST) dst = nodePort root.addEdge(src, dst, name=repr(intf), originObj=intf) # depends on [control=['for'], data=['intf']] # depends on [control=['if'], data=[]] return root
def user_remove(name, user=None, password=None, host=None, port=None, database='admin', authdb=None): ''' Remove a MongoDB user CLI Example: .. code-block:: bash salt '*' mongodb.user_remove <name> <user> <password> <host> <port> <database> ''' conn = _connect(user, password, host, port) if not conn: return 'Failed to connect to mongo database' try: log.info('Removing user %s', name) mdb = pymongo.database.Database(conn, database) mdb.remove_user(name) except pymongo.errors.PyMongoError as err: log.error('Creating database %s failed with error: %s', name, err) return six.text_type(err) return True
def function[user_remove, parameter[name, user, password, host, port, database, authdb]]: constant[ Remove a MongoDB user CLI Example: .. code-block:: bash salt '*' mongodb.user_remove <name> <user> <password> <host> <port> <database> ] variable[conn] assign[=] call[name[_connect], parameter[name[user], name[password], name[host], name[port]]] if <ast.UnaryOp object at 0x7da18bc73fd0> begin[:] return[constant[Failed to connect to mongo database]] <ast.Try object at 0x7da18bc70be0> return[constant[True]]
keyword[def] identifier[user_remove] ( identifier[name] , identifier[user] = keyword[None] , identifier[password] = keyword[None] , identifier[host] = keyword[None] , identifier[port] = keyword[None] , identifier[database] = literal[string] , identifier[authdb] = keyword[None] ): literal[string] identifier[conn] = identifier[_connect] ( identifier[user] , identifier[password] , identifier[host] , identifier[port] ) keyword[if] keyword[not] identifier[conn] : keyword[return] literal[string] keyword[try] : identifier[log] . identifier[info] ( literal[string] , identifier[name] ) identifier[mdb] = identifier[pymongo] . identifier[database] . identifier[Database] ( identifier[conn] , identifier[database] ) identifier[mdb] . identifier[remove_user] ( identifier[name] ) keyword[except] identifier[pymongo] . identifier[errors] . identifier[PyMongoError] keyword[as] identifier[err] : identifier[log] . identifier[error] ( literal[string] , identifier[name] , identifier[err] ) keyword[return] identifier[six] . identifier[text_type] ( identifier[err] ) keyword[return] keyword[True]
def user_remove(name, user=None, password=None, host=None, port=None, database='admin', authdb=None): """ Remove a MongoDB user CLI Example: .. code-block:: bash salt '*' mongodb.user_remove <name> <user> <password> <host> <port> <database> """ conn = _connect(user, password, host, port) if not conn: return 'Failed to connect to mongo database' # depends on [control=['if'], data=[]] try: log.info('Removing user %s', name) mdb = pymongo.database.Database(conn, database) mdb.remove_user(name) # depends on [control=['try'], data=[]] except pymongo.errors.PyMongoError as err: log.error('Creating database %s failed with error: %s', name, err) return six.text_type(err) # depends on [control=['except'], data=['err']] return True
def create_pod_security_policy(self, body, **kwargs): """ create a PodSecurityPolicy This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_pod_security_policy(body, async_req=True) >>> result = thread.get() :param async_req bool :param ExtensionsV1beta1PodSecurityPolicy body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. :return: ExtensionsV1beta1PodSecurityPolicy If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.create_pod_security_policy_with_http_info(body, **kwargs) else: (data) = self.create_pod_security_policy_with_http_info(body, **kwargs) return data
def function[create_pod_security_policy, parameter[self, body]]: constant[ create a PodSecurityPolicy This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_pod_security_policy(body, async_req=True) >>> result = thread.get() :param async_req bool :param ExtensionsV1beta1PodSecurityPolicy body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. :return: ExtensionsV1beta1PodSecurityPolicy If the method is called asynchronously, returns the request thread. ] call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True] if call[name[kwargs].get, parameter[constant[async_req]]] begin[:] return[call[name[self].create_pod_security_policy_with_http_info, parameter[name[body]]]]
keyword[def] identifier[create_pod_security_policy] ( identifier[self] , identifier[body] ,** identifier[kwargs] ): literal[string] identifier[kwargs] [ literal[string] ]= keyword[True] keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ): keyword[return] identifier[self] . identifier[create_pod_security_policy_with_http_info] ( identifier[body] ,** identifier[kwargs] ) keyword[else] : ( identifier[data] )= identifier[self] . identifier[create_pod_security_policy_with_http_info] ( identifier[body] ,** identifier[kwargs] ) keyword[return] identifier[data]
def create_pod_security_policy(self, body, **kwargs): """ create a PodSecurityPolicy This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_pod_security_policy(body, async_req=True) >>> result = thread.get() :param async_req bool :param ExtensionsV1beta1PodSecurityPolicy body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. :return: ExtensionsV1beta1PodSecurityPolicy If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.create_pod_security_policy_with_http_info(body, **kwargs) # depends on [control=['if'], data=[]] else: data = self.create_pod_security_policy_with_http_info(body, **kwargs) return data
def validate(self): """ Validate the contents of a dimension data dictionary """ extents_valid = (0 <= self.lower_extent <= self.upper_extent <= self.global_size) if not extents_valid: raise ValueError("Dimension '{d}' fails 0 <= {el} <= {eu} <= {gs}" .format(d=self.name, gs=self.global_size, el=self.lower_extent, eu=self.upper_extent))
def function[validate, parameter[self]]: constant[ Validate the contents of a dimension data dictionary ] variable[extents_valid] assign[=] compare[constant[0] less_or_equal[<=] name[self].lower_extent] if <ast.UnaryOp object at 0x7da1b2525270> begin[:] <ast.Raise object at 0x7da1b25250f0>
keyword[def] identifier[validate] ( identifier[self] ): literal[string] identifier[extents_valid] =( literal[int] <= identifier[self] . identifier[lower_extent] <= identifier[self] . identifier[upper_extent] <= identifier[self] . identifier[global_size] ) keyword[if] keyword[not] identifier[extents_valid] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[d] = identifier[self] . identifier[name] , identifier[gs] = identifier[self] . identifier[global_size] , identifier[el] = identifier[self] . identifier[lower_extent] , identifier[eu] = identifier[self] . identifier[upper_extent] ))
def validate(self): """ Validate the contents of a dimension data dictionary """ extents_valid = 0 <= self.lower_extent <= self.upper_extent <= self.global_size if not extents_valid: raise ValueError("Dimension '{d}' fails 0 <= {el} <= {eu} <= {gs}".format(d=self.name, gs=self.global_size, el=self.lower_extent, eu=self.upper_extent)) # depends on [control=['if'], data=[]]
def detect_link_tag_time(self, tag): """ Detect link, name and time for specified tag. :param dict tag: Tag data. :rtype: str, str, datetime :return: Link, name and time of the tag. """ # if tag is nil - set current time newer_tag_time = self.get_time_of_tag(tag) if tag \ else datetime.datetime.now() # if it's future release tag - set this value if tag["name"] == self.options.unreleased_label \ and self.options.future_release: newer_tag_name = self.options.future_release newer_tag_link = self.options.future_release elif tag["name"] is not self.options.unreleased_label : # put unreleased label if there is no name for the tag newer_tag_name = tag["name"] newer_tag_link = newer_tag_name else: newer_tag_name = self.options.unreleased_label newer_tag_link = "HEAD" return [newer_tag_link, newer_tag_name, newer_tag_time]
def function[detect_link_tag_time, parameter[self, tag]]: constant[ Detect link, name and time for specified tag. :param dict tag: Tag data. :rtype: str, str, datetime :return: Link, name and time of the tag. ] variable[newer_tag_time] assign[=] <ast.IfExp object at 0x7da1b00faa40> if <ast.BoolOp object at 0x7da1b00f92a0> begin[:] variable[newer_tag_name] assign[=] name[self].options.future_release variable[newer_tag_link] assign[=] name[self].options.future_release return[list[[<ast.Name object at 0x7da1b00faf80>, <ast.Name object at 0x7da1b00f9600>, <ast.Name object at 0x7da1b00f9660>]]]
keyword[def] identifier[detect_link_tag_time] ( identifier[self] , identifier[tag] ): literal[string] identifier[newer_tag_time] = identifier[self] . identifier[get_time_of_tag] ( identifier[tag] ) keyword[if] identifier[tag] keyword[else] identifier[datetime] . identifier[datetime] . identifier[now] () keyword[if] identifier[tag] [ literal[string] ]== identifier[self] . identifier[options] . identifier[unreleased_label] keyword[and] identifier[self] . identifier[options] . identifier[future_release] : identifier[newer_tag_name] = identifier[self] . identifier[options] . identifier[future_release] identifier[newer_tag_link] = identifier[self] . identifier[options] . identifier[future_release] keyword[elif] identifier[tag] [ literal[string] ] keyword[is] keyword[not] identifier[self] . identifier[options] . identifier[unreleased_label] : identifier[newer_tag_name] = identifier[tag] [ literal[string] ] identifier[newer_tag_link] = identifier[newer_tag_name] keyword[else] : identifier[newer_tag_name] = identifier[self] . identifier[options] . identifier[unreleased_label] identifier[newer_tag_link] = literal[string] keyword[return] [ identifier[newer_tag_link] , identifier[newer_tag_name] , identifier[newer_tag_time] ]
def detect_link_tag_time(self, tag): """ Detect link, name and time for specified tag. :param dict tag: Tag data. :rtype: str, str, datetime :return: Link, name and time of the tag. """ # if tag is nil - set current time newer_tag_time = self.get_time_of_tag(tag) if tag else datetime.datetime.now() # if it's future release tag - set this value if tag['name'] == self.options.unreleased_label and self.options.future_release: newer_tag_name = self.options.future_release newer_tag_link = self.options.future_release # depends on [control=['if'], data=[]] elif tag['name'] is not self.options.unreleased_label: # put unreleased label if there is no name for the tag newer_tag_name = tag['name'] newer_tag_link = newer_tag_name # depends on [control=['if'], data=[]] else: newer_tag_name = self.options.unreleased_label newer_tag_link = 'HEAD' return [newer_tag_link, newer_tag_name, newer_tag_time]
def _sam_to_soft_clipped(self, sam): '''Returns tuple of whether or not the left and right end of the mapped read in the sam record is soft-clipped''' if sam.is_unmapped: raise Error('Cannot get soft clip info from an unmapped read') if sam.cigar is None or len(sam.cigar) == 0: return False, False return (sam.cigar[0][0] == 4, sam.cigar[-1][0] == 4)
def function[_sam_to_soft_clipped, parameter[self, sam]]: constant[Returns tuple of whether or not the left and right end of the mapped read in the sam record is soft-clipped] if name[sam].is_unmapped begin[:] <ast.Raise object at 0x7da2044c0970> if <ast.BoolOp object at 0x7da2044c1f30> begin[:] return[tuple[[<ast.Constant object at 0x7da2044c1780>, <ast.Constant object at 0x7da18fe93610>]]] return[tuple[[<ast.Compare object at 0x7da18f00e8f0>, <ast.Compare object at 0x7da18f00ed40>]]]
keyword[def] identifier[_sam_to_soft_clipped] ( identifier[self] , identifier[sam] ): literal[string] keyword[if] identifier[sam] . identifier[is_unmapped] : keyword[raise] identifier[Error] ( literal[string] ) keyword[if] identifier[sam] . identifier[cigar] keyword[is] keyword[None] keyword[or] identifier[len] ( identifier[sam] . identifier[cigar] )== literal[int] : keyword[return] keyword[False] , keyword[False] keyword[return] ( identifier[sam] . identifier[cigar] [ literal[int] ][ literal[int] ]== literal[int] , identifier[sam] . identifier[cigar] [- literal[int] ][ literal[int] ]== literal[int] )
def _sam_to_soft_clipped(self, sam): """Returns tuple of whether or not the left and right end of the mapped read in the sam record is soft-clipped""" if sam.is_unmapped: raise Error('Cannot get soft clip info from an unmapped read') # depends on [control=['if'], data=[]] if sam.cigar is None or len(sam.cigar) == 0: return (False, False) # depends on [control=['if'], data=[]] return (sam.cigar[0][0] == 4, sam.cigar[-1][0] == 4)
def save(self, file, *attributes, **options): """ Saves the selected field *attributes* for each :class:`Field` *nested* in the `Container` to an ``.ini`` *file*. :param str file: name and location of the ``.ini`` *file*. :param str attributes: selected :class:`Field` attributes. Fallback is the field :attr:`~Field.value`. :keyword str section: section in the ``.ini`` file to look for the :class:`Field` values of the `Container`. If no *section* is specified the class name of the instance is used. :keyword bool nested: if ``True`` all :class:`Pointer` fields in the `Container` saves their referenced :attr:`~Pointer.data` object field attributes as well (chained method call). Example: >>> class Foo(Structure): ... def __init__(self): ... super().__init__() ... self.stream = Stream() ... self.float = Float() ... self.structure = Structure() ... self.structure.decimal = Decimal(8) ... self.array = Array(Byte, 3) ... self.pointer = Pointer() >>> foo = Foo() >>> foo.to_list(nested=True) [('Foo.stream', ''), ('Foo.float', 0.0), ('Foo.structure.decimal', 0), ('Foo.array[0]', '0x0'), ('Foo.array[1]', '0x0'), ('Foo.array[2]', '0x0'), ('Foo.pointer', '0x0')] >>> foo.to_json(nested=True) '{"stream": "", "float": 0.0, "structure": {"decimal": 0}, "array": ["0x0", "0x0", "0x0"], "pointer": {"value": "0x0", "data": null}}' >>> foo.save('foo.ini') File `foo.ini`: .. code-block:: ini [Foo] stream = float = 0.0 structure.decimal = 0 array[0] = 0x0 array[1] = 0x0 array[2] = 0x0 pointer = 0x0 """ options['save'] = True parser = ConfigParser() parser.read_dict(self.to_dict(*attributes, **options)) with open(file, 'w') as file_handle: parser.write(file_handle) file_handle.close()
def function[save, parameter[self, file]]: constant[ Saves the selected field *attributes* for each :class:`Field` *nested* in the `Container` to an ``.ini`` *file*. :param str file: name and location of the ``.ini`` *file*. :param str attributes: selected :class:`Field` attributes. Fallback is the field :attr:`~Field.value`. :keyword str section: section in the ``.ini`` file to look for the :class:`Field` values of the `Container`. If no *section* is specified the class name of the instance is used. :keyword bool nested: if ``True`` all :class:`Pointer` fields in the `Container` saves their referenced :attr:`~Pointer.data` object field attributes as well (chained method call). Example: >>> class Foo(Structure): ... def __init__(self): ... super().__init__() ... self.stream = Stream() ... self.float = Float() ... self.structure = Structure() ... self.structure.decimal = Decimal(8) ... self.array = Array(Byte, 3) ... self.pointer = Pointer() >>> foo = Foo() >>> foo.to_list(nested=True) [('Foo.stream', ''), ('Foo.float', 0.0), ('Foo.structure.decimal', 0), ('Foo.array[0]', '0x0'), ('Foo.array[1]', '0x0'), ('Foo.array[2]', '0x0'), ('Foo.pointer', '0x0')] >>> foo.to_json(nested=True) '{"stream": "", "float": 0.0, "structure": {"decimal": 0}, "array": ["0x0", "0x0", "0x0"], "pointer": {"value": "0x0", "data": null}}' >>> foo.save('foo.ini') File `foo.ini`: .. code-block:: ini [Foo] stream = float = 0.0 structure.decimal = 0 array[0] = 0x0 array[1] = 0x0 array[2] = 0x0 pointer = 0x0 ] call[name[options]][constant[save]] assign[=] constant[True] variable[parser] assign[=] call[name[ConfigParser], parameter[]] call[name[parser].read_dict, parameter[call[name[self].to_dict, parameter[<ast.Starred object at 0x7da2054a78e0>]]]] with call[name[open], parameter[name[file], constant[w]]] begin[:] call[name[parser].write, parameter[name[file_handle]]] call[name[file_handle].close, parameter[]]
keyword[def] identifier[save] ( identifier[self] , identifier[file] ,* identifier[attributes] ,** identifier[options] ): literal[string] identifier[options] [ literal[string] ]= keyword[True] identifier[parser] = identifier[ConfigParser] () identifier[parser] . identifier[read_dict] ( identifier[self] . identifier[to_dict] (* identifier[attributes] ,** identifier[options] )) keyword[with] identifier[open] ( identifier[file] , literal[string] ) keyword[as] identifier[file_handle] : identifier[parser] . identifier[write] ( identifier[file_handle] ) identifier[file_handle] . identifier[close] ()
def save(self, file, *attributes, **options): """ Saves the selected field *attributes* for each :class:`Field` *nested* in the `Container` to an ``.ini`` *file*. :param str file: name and location of the ``.ini`` *file*. :param str attributes: selected :class:`Field` attributes. Fallback is the field :attr:`~Field.value`. :keyword str section: section in the ``.ini`` file to look for the :class:`Field` values of the `Container`. If no *section* is specified the class name of the instance is used. :keyword bool nested: if ``True`` all :class:`Pointer` fields in the `Container` saves their referenced :attr:`~Pointer.data` object field attributes as well (chained method call). Example: >>> class Foo(Structure): ... def __init__(self): ... super().__init__() ... self.stream = Stream() ... self.float = Float() ... self.structure = Structure() ... self.structure.decimal = Decimal(8) ... self.array = Array(Byte, 3) ... self.pointer = Pointer() >>> foo = Foo() >>> foo.to_list(nested=True) [('Foo.stream', ''), ('Foo.float', 0.0), ('Foo.structure.decimal', 0), ('Foo.array[0]', '0x0'), ('Foo.array[1]', '0x0'), ('Foo.array[2]', '0x0'), ('Foo.pointer', '0x0')] >>> foo.to_json(nested=True) '{"stream": "", "float": 0.0, "structure": {"decimal": 0}, "array": ["0x0", "0x0", "0x0"], "pointer": {"value": "0x0", "data": null}}' >>> foo.save('foo.ini') File `foo.ini`: .. code-block:: ini [Foo] stream = float = 0.0 structure.decimal = 0 array[0] = 0x0 array[1] = 0x0 array[2] = 0x0 pointer = 0x0 """ options['save'] = True parser = ConfigParser() parser.read_dict(self.to_dict(*attributes, **options)) with open(file, 'w') as file_handle: parser.write(file_handle) # depends on [control=['with'], data=['file_handle']] file_handle.close()
def split_once(self, horizontal: bool, position: int) -> None: """Split this partition into 2 sub-partitions. Args: horizontal (bool): position (int): """ cdata = self._as_cdata() lib.TCOD_bsp_split_once(cdata, horizontal, position) self._unpack_bsp_tree(cdata)
def function[split_once, parameter[self, horizontal, position]]: constant[Split this partition into 2 sub-partitions. Args: horizontal (bool): position (int): ] variable[cdata] assign[=] call[name[self]._as_cdata, parameter[]] call[name[lib].TCOD_bsp_split_once, parameter[name[cdata], name[horizontal], name[position]]] call[name[self]._unpack_bsp_tree, parameter[name[cdata]]]
keyword[def] identifier[split_once] ( identifier[self] , identifier[horizontal] : identifier[bool] , identifier[position] : identifier[int] )-> keyword[None] : literal[string] identifier[cdata] = identifier[self] . identifier[_as_cdata] () identifier[lib] . identifier[TCOD_bsp_split_once] ( identifier[cdata] , identifier[horizontal] , identifier[position] ) identifier[self] . identifier[_unpack_bsp_tree] ( identifier[cdata] )
def split_once(self, horizontal: bool, position: int) -> None: """Split this partition into 2 sub-partitions. Args: horizontal (bool): position (int): """ cdata = self._as_cdata() lib.TCOD_bsp_split_once(cdata, horizontal, position) self._unpack_bsp_tree(cdata)
def acknowledge_streamer(self, index, ack, force): """Acknowledge a streamer value as received from the remote side.""" if index >= len(self.graph.streamers): return _pack_sgerror(SensorGraphError.STREAMER_NOT_ALLOCATED) old_ack = self.streamer_acks.get(index, 0) if ack != 0: if ack <= old_ack and not force: return _pack_sgerror(SensorGraphError.OLD_ACKNOWLEDGE_UPDATE) self.streamer_acks[index] = ack current_ack = self.streamer_acks.get(index, 0) return self._seek_streamer(index, current_ack)
def function[acknowledge_streamer, parameter[self, index, ack, force]]: constant[Acknowledge a streamer value as received from the remote side.] if compare[name[index] greater_or_equal[>=] call[name[len], parameter[name[self].graph.streamers]]] begin[:] return[call[name[_pack_sgerror], parameter[name[SensorGraphError].STREAMER_NOT_ALLOCATED]]] variable[old_ack] assign[=] call[name[self].streamer_acks.get, parameter[name[index], constant[0]]] if compare[name[ack] not_equal[!=] constant[0]] begin[:] if <ast.BoolOp object at 0x7da207f9bb80> begin[:] return[call[name[_pack_sgerror], parameter[name[SensorGraphError].OLD_ACKNOWLEDGE_UPDATE]]] call[name[self].streamer_acks][name[index]] assign[=] name[ack] variable[current_ack] assign[=] call[name[self].streamer_acks.get, parameter[name[index], constant[0]]] return[call[name[self]._seek_streamer, parameter[name[index], name[current_ack]]]]
keyword[def] identifier[acknowledge_streamer] ( identifier[self] , identifier[index] , identifier[ack] , identifier[force] ): literal[string] keyword[if] identifier[index] >= identifier[len] ( identifier[self] . identifier[graph] . identifier[streamers] ): keyword[return] identifier[_pack_sgerror] ( identifier[SensorGraphError] . identifier[STREAMER_NOT_ALLOCATED] ) identifier[old_ack] = identifier[self] . identifier[streamer_acks] . identifier[get] ( identifier[index] , literal[int] ) keyword[if] identifier[ack] != literal[int] : keyword[if] identifier[ack] <= identifier[old_ack] keyword[and] keyword[not] identifier[force] : keyword[return] identifier[_pack_sgerror] ( identifier[SensorGraphError] . identifier[OLD_ACKNOWLEDGE_UPDATE] ) identifier[self] . identifier[streamer_acks] [ identifier[index] ]= identifier[ack] identifier[current_ack] = identifier[self] . identifier[streamer_acks] . identifier[get] ( identifier[index] , literal[int] ) keyword[return] identifier[self] . identifier[_seek_streamer] ( identifier[index] , identifier[current_ack] )
def acknowledge_streamer(self, index, ack, force): """Acknowledge a streamer value as received from the remote side.""" if index >= len(self.graph.streamers): return _pack_sgerror(SensorGraphError.STREAMER_NOT_ALLOCATED) # depends on [control=['if'], data=[]] old_ack = self.streamer_acks.get(index, 0) if ack != 0: if ack <= old_ack and (not force): return _pack_sgerror(SensorGraphError.OLD_ACKNOWLEDGE_UPDATE) # depends on [control=['if'], data=[]] self.streamer_acks[index] = ack # depends on [control=['if'], data=['ack']] current_ack = self.streamer_acks.get(index, 0) return self._seek_streamer(index, current_ack)
def add_arguments(parser): """ adds arguments for the deploy command """ parser.add_argument('-e', '--environment', help='Environment name', required=True) parser.add_argument('-w', '--dont-wait', help='Skip waiting for the init to finish', action='store_true') parser.add_argument('-l', '--version-label', help='Version label', required=False)
def function[add_arguments, parameter[parser]]: constant[ adds arguments for the deploy command ] call[name[parser].add_argument, parameter[constant[-e], constant[--environment]]] call[name[parser].add_argument, parameter[constant[-w], constant[--dont-wait]]] call[name[parser].add_argument, parameter[constant[-l], constant[--version-label]]]
keyword[def] identifier[add_arguments] ( identifier[parser] ): literal[string] identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[help] = literal[string] , identifier[required] = keyword[True] ) identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[help] = literal[string] , identifier[action] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[help] = literal[string] , identifier[required] = keyword[False] )
def add_arguments(parser): """ adds arguments for the deploy command """ parser.add_argument('-e', '--environment', help='Environment name', required=True) parser.add_argument('-w', '--dont-wait', help='Skip waiting for the init to finish', action='store_true') parser.add_argument('-l', '--version-label', help='Version label', required=False)
def cellbrowser( adata, data_dir, data_name, embedding_keys = None, annot_keys = ["louvain", "percent_mito", "n_genes", "n_counts"], cluster_field = "louvain", nb_marker = 50, skip_matrix = False, html_dir = None, port = None, do_debug = False ): """ Export adata to a UCSC Cell Browser project directory. If `html_dir` is set, subsequently build the html files from the project directory into `html_dir`. If `port` is set, start an HTTP server in the background and serve `html_dir` on `port`. By default, export all gene expression data from `adata.raw`, the annotations `louvain`, `percent_mito`, `n_genes` and `n_counts` and the top `nb_marker` cluster markers. All existing files in data_dir are overwritten, except cellbrowser.conf. See `UCSC Cellbrowser <https://github.com/maximilianh/cellBrowser>`__ for details. Parameters ---------- adata : :class:`~anndata.AnnData` Annotated data matrix data_dir : `str` Path to directory for exported Cell Browser files. Usually these are the files `exprMatrix.tsv.gz`, `meta.tsv`, coordinate files like `tsne.coords.tsv`, and cluster marker gene lists like `markers.tsv`. A file `cellbrowser.conf` is also created with pointers to these files. As a result, each adata object should have its own project_dir. data_name : `str` Name of dataset in Cell Browser, a string without special characters. This is written to `data_dir`/cellbrowser.conf. Ideally this is a short unique name for the dataset, like "pbmc3k" or "tabulamuris". embedding_keys: `list` of `str` or `dict` of `key (str)`->`display label (str)` 2-D embeddings in `adata.obsm` to export. The prefix "`X_`" or "`X_draw_graph_`" is not necessary. Coordinates missing from `adata` are skipped. By default, these keys are tried: ["tsne", "umap", "pagaFa", "pagaFr", "pagaUmap", "phate", "fa", "fr", "kk", "drl", "rt"]. For these, default display labels are automatically used. For other values, you can specify a dictionary instead of a list, the values of the dictionary are then the display labels for the coordinates, e.g. `{'tsne' : "t-SNE by Scanpy"}` annot_keys: `list` of `str` or `dict` of `key (str)`->`display label (str)` Annotations in `adata.obsm` to export. Can be a dictionary with key -> display label. skip_matrix: `boolean` Do not export the matrix. If you had previously exported this adata into the same `data_dir`, then there is no need to export the whole matrix again. This option will make the export a lot faster, e.g. when only coordinates or meta data were changed. html_dir: `str` If this variable is set, the export will build html files from `data_dir` to `html_dir`, creating html/js/json files. Usually there is one global html output directory for all datasets. Often, `html_dir` is located under a webserver's (like Apache) htdocs directory or is copied to one. A directory `html_dir`/`project_name` will be created and an index.html will be created under `html_dir` for all subdirectories. Existing files will be overwritten. If do not to use html_dir, you can use the command line tool `cbBuild` to build the html directory. port: `int` If this variable and `html_dir` are set, Python's built-in web server will be spawned as a daemon in the background and serve the files under `html_dir`. To kill the process, call `cellbrowser.cellbrowser.stop()`. do_debug: `boolean` Activate debugging output Examples -------- See this `tutorial <https://github.com/theislab/scanpy_usage/tree/master/181126_Cellbrowser_exports>`__. """ try: import cellbrowser.cellbrowser as cb except ImportError: print("The package cellbrowser is not installed. Install with 'pip " "install cellbrowser' and retry.") cb.setDebug(do_debug) cb.scanpyToCellbrowser(adata, data_dir, data_name, coordFields=embedding_keys, metaFields=annot_keys, clusterField=cluster_field, nb_marker=nb_marker, skipMatrix=skip_matrix, doDebug = None ) if html_dir: cb.build(data_dir, html_dir, doDebug=None) if port: cb.serve(html_dir, port)
def function[cellbrowser, parameter[adata, data_dir, data_name, embedding_keys, annot_keys, cluster_field, nb_marker, skip_matrix, html_dir, port, do_debug]]: constant[ Export adata to a UCSC Cell Browser project directory. If `html_dir` is set, subsequently build the html files from the project directory into `html_dir`. If `port` is set, start an HTTP server in the background and serve `html_dir` on `port`. By default, export all gene expression data from `adata.raw`, the annotations `louvain`, `percent_mito`, `n_genes` and `n_counts` and the top `nb_marker` cluster markers. All existing files in data_dir are overwritten, except cellbrowser.conf. See `UCSC Cellbrowser <https://github.com/maximilianh/cellBrowser>`__ for details. Parameters ---------- adata : :class:`~anndata.AnnData` Annotated data matrix data_dir : `str` Path to directory for exported Cell Browser files. Usually these are the files `exprMatrix.tsv.gz`, `meta.tsv`, coordinate files like `tsne.coords.tsv`, and cluster marker gene lists like `markers.tsv`. A file `cellbrowser.conf` is also created with pointers to these files. As a result, each adata object should have its own project_dir. data_name : `str` Name of dataset in Cell Browser, a string without special characters. This is written to `data_dir`/cellbrowser.conf. Ideally this is a short unique name for the dataset, like "pbmc3k" or "tabulamuris". embedding_keys: `list` of `str` or `dict` of `key (str)`->`display label (str)` 2-D embeddings in `adata.obsm` to export. The prefix "`X_`" or "`X_draw_graph_`" is not necessary. Coordinates missing from `adata` are skipped. By default, these keys are tried: ["tsne", "umap", "pagaFa", "pagaFr", "pagaUmap", "phate", "fa", "fr", "kk", "drl", "rt"]. For these, default display labels are automatically used. For other values, you can specify a dictionary instead of a list, the values of the dictionary are then the display labels for the coordinates, e.g. `{'tsne' : "t-SNE by Scanpy"}` annot_keys: `list` of `str` or `dict` of `key (str)`->`display label (str)` Annotations in `adata.obsm` to export. Can be a dictionary with key -> display label. skip_matrix: `boolean` Do not export the matrix. If you had previously exported this adata into the same `data_dir`, then there is no need to export the whole matrix again. This option will make the export a lot faster, e.g. when only coordinates or meta data were changed. html_dir: `str` If this variable is set, the export will build html files from `data_dir` to `html_dir`, creating html/js/json files. Usually there is one global html output directory for all datasets. Often, `html_dir` is located under a webserver's (like Apache) htdocs directory or is copied to one. A directory `html_dir`/`project_name` will be created and an index.html will be created under `html_dir` for all subdirectories. Existing files will be overwritten. If do not to use html_dir, you can use the command line tool `cbBuild` to build the html directory. port: `int` If this variable and `html_dir` are set, Python's built-in web server will be spawned as a daemon in the background and serve the files under `html_dir`. To kill the process, call `cellbrowser.cellbrowser.stop()`. do_debug: `boolean` Activate debugging output Examples -------- See this `tutorial <https://github.com/theislab/scanpy_usage/tree/master/181126_Cellbrowser_exports>`__. ] <ast.Try object at 0x7da18f58c2b0> call[name[cb].setDebug, parameter[name[do_debug]]] call[name[cb].scanpyToCellbrowser, parameter[name[adata], name[data_dir], name[data_name]]] if name[html_dir] begin[:] call[name[cb].build, parameter[name[data_dir], name[html_dir]]] if name[port] begin[:] call[name[cb].serve, parameter[name[html_dir], name[port]]]
keyword[def] identifier[cellbrowser] ( identifier[adata] , identifier[data_dir] , identifier[data_name] , identifier[embedding_keys] = keyword[None] , identifier[annot_keys] =[ literal[string] , literal[string] , literal[string] , literal[string] ], identifier[cluster_field] = literal[string] , identifier[nb_marker] = literal[int] , identifier[skip_matrix] = keyword[False] , identifier[html_dir] = keyword[None] , identifier[port] = keyword[None] , identifier[do_debug] = keyword[False] ): literal[string] keyword[try] : keyword[import] identifier[cellbrowser] . identifier[cellbrowser] keyword[as] identifier[cb] keyword[except] identifier[ImportError] : identifier[print] ( literal[string] literal[string] ) identifier[cb] . identifier[setDebug] ( identifier[do_debug] ) identifier[cb] . identifier[scanpyToCellbrowser] ( identifier[adata] , identifier[data_dir] , identifier[data_name] , identifier[coordFields] = identifier[embedding_keys] , identifier[metaFields] = identifier[annot_keys] , identifier[clusterField] = identifier[cluster_field] , identifier[nb_marker] = identifier[nb_marker] , identifier[skipMatrix] = identifier[skip_matrix] , identifier[doDebug] = keyword[None] ) keyword[if] identifier[html_dir] : identifier[cb] . identifier[build] ( identifier[data_dir] , identifier[html_dir] , identifier[doDebug] = keyword[None] ) keyword[if] identifier[port] : identifier[cb] . identifier[serve] ( identifier[html_dir] , identifier[port] )
def cellbrowser(adata, data_dir, data_name, embedding_keys=None, annot_keys=['louvain', 'percent_mito', 'n_genes', 'n_counts'], cluster_field='louvain', nb_marker=50, skip_matrix=False, html_dir=None, port=None, do_debug=False): """ Export adata to a UCSC Cell Browser project directory. If `html_dir` is set, subsequently build the html files from the project directory into `html_dir`. If `port` is set, start an HTTP server in the background and serve `html_dir` on `port`. By default, export all gene expression data from `adata.raw`, the annotations `louvain`, `percent_mito`, `n_genes` and `n_counts` and the top `nb_marker` cluster markers. All existing files in data_dir are overwritten, except cellbrowser.conf. See `UCSC Cellbrowser <https://github.com/maximilianh/cellBrowser>`__ for details. Parameters ---------- adata : :class:`~anndata.AnnData` Annotated data matrix data_dir : `str` Path to directory for exported Cell Browser files. Usually these are the files `exprMatrix.tsv.gz`, `meta.tsv`, coordinate files like `tsne.coords.tsv`, and cluster marker gene lists like `markers.tsv`. A file `cellbrowser.conf` is also created with pointers to these files. As a result, each adata object should have its own project_dir. data_name : `str` Name of dataset in Cell Browser, a string without special characters. This is written to `data_dir`/cellbrowser.conf. Ideally this is a short unique name for the dataset, like "pbmc3k" or "tabulamuris". embedding_keys: `list` of `str` or `dict` of `key (str)`->`display label (str)` 2-D embeddings in `adata.obsm` to export. The prefix "`X_`" or "`X_draw_graph_`" is not necessary. Coordinates missing from `adata` are skipped. By default, these keys are tried: ["tsne", "umap", "pagaFa", "pagaFr", "pagaUmap", "phate", "fa", "fr", "kk", "drl", "rt"]. For these, default display labels are automatically used. For other values, you can specify a dictionary instead of a list, the values of the dictionary are then the display labels for the coordinates, e.g. `{'tsne' : "t-SNE by Scanpy"}` annot_keys: `list` of `str` or `dict` of `key (str)`->`display label (str)` Annotations in `adata.obsm` to export. Can be a dictionary with key -> display label. skip_matrix: `boolean` Do not export the matrix. If you had previously exported this adata into the same `data_dir`, then there is no need to export the whole matrix again. This option will make the export a lot faster, e.g. when only coordinates or meta data were changed. html_dir: `str` If this variable is set, the export will build html files from `data_dir` to `html_dir`, creating html/js/json files. Usually there is one global html output directory for all datasets. Often, `html_dir` is located under a webserver's (like Apache) htdocs directory or is copied to one. A directory `html_dir`/`project_name` will be created and an index.html will be created under `html_dir` for all subdirectories. Existing files will be overwritten. If do not to use html_dir, you can use the command line tool `cbBuild` to build the html directory. port: `int` If this variable and `html_dir` are set, Python's built-in web server will be spawned as a daemon in the background and serve the files under `html_dir`. To kill the process, call `cellbrowser.cellbrowser.stop()`. do_debug: `boolean` Activate debugging output Examples -------- See this `tutorial <https://github.com/theislab/scanpy_usage/tree/master/181126_Cellbrowser_exports>`__. """ try: import cellbrowser.cellbrowser as cb # depends on [control=['try'], data=[]] except ImportError: print("The package cellbrowser is not installed. Install with 'pip install cellbrowser' and retry.") # depends on [control=['except'], data=[]] cb.setDebug(do_debug) cb.scanpyToCellbrowser(adata, data_dir, data_name, coordFields=embedding_keys, metaFields=annot_keys, clusterField=cluster_field, nb_marker=nb_marker, skipMatrix=skip_matrix, doDebug=None) if html_dir: cb.build(data_dir, html_dir, doDebug=None) # depends on [control=['if'], data=[]] if port: cb.serve(html_dir, port) # depends on [control=['if'], data=[]]
def sync_headers(cloud_obj, headers=None, header_patterns=HEADER_PATTERNS): """ Overwrites the given cloud_obj's headers with the ones given as ``headers` and adds additional headers as defined in the HEADERS setting depending on the cloud_obj's file name. """ if headers is None: headers = {} # don't set headers on directories content_type = getattr(cloud_obj, "content_type", None) if content_type == "application/directory": return matched_headers = {} for pattern, pattern_headers in header_patterns: if pattern.match(cloud_obj.name): matched_headers.update(pattern_headers.copy()) # preserve headers already set matched_headers.update(cloud_obj.headers) # explicitly set headers overwrite matches and already set headers matched_headers.update(headers) if matched_headers != cloud_obj.headers: cloud_obj.headers = matched_headers cloud_obj.sync_metadata()
def function[sync_headers, parameter[cloud_obj, headers, header_patterns]]: constant[ Overwrites the given cloud_obj's headers with the ones given as ``headers` and adds additional headers as defined in the HEADERS setting depending on the cloud_obj's file name. ] if compare[name[headers] is constant[None]] begin[:] variable[headers] assign[=] dictionary[[], []] variable[content_type] assign[=] call[name[getattr], parameter[name[cloud_obj], constant[content_type], constant[None]]] if compare[name[content_type] equal[==] constant[application/directory]] begin[:] return[None] variable[matched_headers] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da1b19cac50>, <ast.Name object at 0x7da1b19cb160>]]] in starred[name[header_patterns]] begin[:] if call[name[pattern].match, parameter[name[cloud_obj].name]] begin[:] call[name[matched_headers].update, parameter[call[name[pattern_headers].copy, parameter[]]]] call[name[matched_headers].update, parameter[name[cloud_obj].headers]] call[name[matched_headers].update, parameter[name[headers]]] if compare[name[matched_headers] not_equal[!=] name[cloud_obj].headers] begin[:] name[cloud_obj].headers assign[=] name[matched_headers] call[name[cloud_obj].sync_metadata, parameter[]]
keyword[def] identifier[sync_headers] ( identifier[cloud_obj] , identifier[headers] = keyword[None] , identifier[header_patterns] = identifier[HEADER_PATTERNS] ): literal[string] keyword[if] identifier[headers] keyword[is] keyword[None] : identifier[headers] ={} identifier[content_type] = identifier[getattr] ( identifier[cloud_obj] , literal[string] , keyword[None] ) keyword[if] identifier[content_type] == literal[string] : keyword[return] identifier[matched_headers] ={} keyword[for] identifier[pattern] , identifier[pattern_headers] keyword[in] identifier[header_patterns] : keyword[if] identifier[pattern] . identifier[match] ( identifier[cloud_obj] . identifier[name] ): identifier[matched_headers] . identifier[update] ( identifier[pattern_headers] . identifier[copy] ()) identifier[matched_headers] . identifier[update] ( identifier[cloud_obj] . identifier[headers] ) identifier[matched_headers] . identifier[update] ( identifier[headers] ) keyword[if] identifier[matched_headers] != identifier[cloud_obj] . identifier[headers] : identifier[cloud_obj] . identifier[headers] = identifier[matched_headers] identifier[cloud_obj] . identifier[sync_metadata] ()
def sync_headers(cloud_obj, headers=None, header_patterns=HEADER_PATTERNS): """ Overwrites the given cloud_obj's headers with the ones given as ``headers` and adds additional headers as defined in the HEADERS setting depending on the cloud_obj's file name. """ if headers is None: headers = {} # depends on [control=['if'], data=['headers']] # don't set headers on directories content_type = getattr(cloud_obj, 'content_type', None) if content_type == 'application/directory': return # depends on [control=['if'], data=[]] matched_headers = {} for (pattern, pattern_headers) in header_patterns: if pattern.match(cloud_obj.name): matched_headers.update(pattern_headers.copy()) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # preserve headers already set matched_headers.update(cloud_obj.headers) # explicitly set headers overwrite matches and already set headers matched_headers.update(headers) if matched_headers != cloud_obj.headers: cloud_obj.headers = matched_headers cloud_obj.sync_metadata() # depends on [control=['if'], data=['matched_headers']]
def validate(self, metadata, path, value): """ Validate this requirement. """ if isinstance(value, Requirement): # if the RHS is still a Requirement object, it was not set if metadata.testing and self.mock_value is not None: value = self.mock_value elif self.default_value is not None: value = self.default_value elif not value.required: return None else: raise ValidationError(f"Missing required configuration for: {'.'.join(path)}") try: return self.type(value) except ValueError: raise ValidationError(f"Missing required configuration for: {'.'.join(path)}: {value}")
def function[validate, parameter[self, metadata, path, value]]: constant[ Validate this requirement. ] if call[name[isinstance], parameter[name[value], name[Requirement]]] begin[:] if <ast.BoolOp object at 0x7da1b0c40640> begin[:] variable[value] assign[=] name[self].mock_value <ast.Try object at 0x7da1b0c917b0>
keyword[def] identifier[validate] ( identifier[self] , identifier[metadata] , identifier[path] , identifier[value] ): literal[string] keyword[if] identifier[isinstance] ( identifier[value] , identifier[Requirement] ): keyword[if] identifier[metadata] . identifier[testing] keyword[and] identifier[self] . identifier[mock_value] keyword[is] keyword[not] keyword[None] : identifier[value] = identifier[self] . identifier[mock_value] keyword[elif] identifier[self] . identifier[default_value] keyword[is] keyword[not] keyword[None] : identifier[value] = identifier[self] . identifier[default_value] keyword[elif] keyword[not] identifier[value] . identifier[required] : keyword[return] keyword[None] keyword[else] : keyword[raise] identifier[ValidationError] ( literal[string] ) keyword[try] : keyword[return] identifier[self] . identifier[type] ( identifier[value] ) keyword[except] identifier[ValueError] : keyword[raise] identifier[ValidationError] ( literal[string] )
def validate(self, metadata, path, value): """ Validate this requirement. """ if isinstance(value, Requirement): # if the RHS is still a Requirement object, it was not set if metadata.testing and self.mock_value is not None: value = self.mock_value # depends on [control=['if'], data=[]] elif self.default_value is not None: value = self.default_value # depends on [control=['if'], data=[]] elif not value.required: return None # depends on [control=['if'], data=[]] else: raise ValidationError(f"Missing required configuration for: {'.'.join(path)}") # depends on [control=['if'], data=[]] try: return self.type(value) # depends on [control=['try'], data=[]] except ValueError: raise ValidationError(f"Missing required configuration for: {'.'.join(path)}: {value}") # depends on [control=['except'], data=[]]
def commit_sell(self, account_id, sell_id, **params): """https://developers.coinbase.com/api/v2#commit-a-sell""" response = self._post( 'v2', 'accounts', account_id, 'sells', sell_id, 'commit', data=params) return self._make_api_object(response, Sell)
def function[commit_sell, parameter[self, account_id, sell_id]]: constant[https://developers.coinbase.com/api/v2#commit-a-sell] variable[response] assign[=] call[name[self]._post, parameter[constant[v2], constant[accounts], name[account_id], constant[sells], name[sell_id], constant[commit]]] return[call[name[self]._make_api_object, parameter[name[response], name[Sell]]]]
keyword[def] identifier[commit_sell] ( identifier[self] , identifier[account_id] , identifier[sell_id] ,** identifier[params] ): literal[string] identifier[response] = identifier[self] . identifier[_post] ( literal[string] , literal[string] , identifier[account_id] , literal[string] , identifier[sell_id] , literal[string] , identifier[data] = identifier[params] ) keyword[return] identifier[self] . identifier[_make_api_object] ( identifier[response] , identifier[Sell] )
def commit_sell(self, account_id, sell_id, **params): """https://developers.coinbase.com/api/v2#commit-a-sell""" response = self._post('v2', 'accounts', account_id, 'sells', sell_id, 'commit', data=params) return self._make_api_object(response, Sell)
async def container(self, container=None, container_type=None, params=None): """ Loads/dumps container :return: """ if hasattr(container_type, 'blob_serialize'): container = container_type() if container is None else container return await container.blob_serialize(self, elem=container, elem_type=container_type, params=params) # Container entry version + container if self.writing: return await self.container_dump(container, container_type, params) else: return await self.container_load(container_type, params=params, container=container)
<ast.AsyncFunctionDef object at 0x7da1b24071f0>
keyword[async] keyword[def] identifier[container] ( identifier[self] , identifier[container] = keyword[None] , identifier[container_type] = keyword[None] , identifier[params] = keyword[None] ): literal[string] keyword[if] identifier[hasattr] ( identifier[container_type] , literal[string] ): identifier[container] = identifier[container_type] () keyword[if] identifier[container] keyword[is] keyword[None] keyword[else] identifier[container] keyword[return] keyword[await] identifier[container] . identifier[blob_serialize] ( identifier[self] , identifier[elem] = identifier[container] , identifier[elem_type] = identifier[container_type] , identifier[params] = identifier[params] ) keyword[if] identifier[self] . identifier[writing] : keyword[return] keyword[await] identifier[self] . identifier[container_dump] ( identifier[container] , identifier[container_type] , identifier[params] ) keyword[else] : keyword[return] keyword[await] identifier[self] . identifier[container_load] ( identifier[container_type] , identifier[params] = identifier[params] , identifier[container] = identifier[container] )
async def container(self, container=None, container_type=None, params=None): """ Loads/dumps container :return: """ if hasattr(container_type, 'blob_serialize'): container = container_type() if container is None else container return await container.blob_serialize(self, elem=container, elem_type=container_type, params=params) # depends on [control=['if'], data=[]] # Container entry version + container if self.writing: return await self.container_dump(container, container_type, params) # depends on [control=['if'], data=[]] else: return await self.container_load(container_type, params=params, container=container)
def lens_model_plot(ax, lensModel, kwargs_lens, numPix=500, deltaPix=0.01, sourcePos_x=0, sourcePos_y=0, point_source=False, with_caustics=False): """ plots a lens model (convergence) and the critical curves and caustics :param ax: :param kwargs_lens: :param numPix: :param deltaPix: :return: """ kwargs_data = sim_util.data_configure_simple(numPix, deltaPix) data = Data(kwargs_data) _frame_size = numPix * deltaPix _coords = data._coords x_grid, y_grid = data.coordinates lensModelExt = LensModelExtensions(lensModel) #ra_crit_list, dec_crit_list, ra_caustic_list, dec_caustic_list = lensModelExt.critical_curve_caustics( # kwargs_lens, compute_window=_frame_size, grid_scale=deltaPix/2.) x_grid1d = util.image2array(x_grid) y_grid1d = util.image2array(y_grid) kappa_result = lensModel.kappa(x_grid1d, y_grid1d, kwargs_lens) kappa_result = util.array2image(kappa_result) im = ax.matshow(np.log10(kappa_result), origin='lower', extent=[0, _frame_size, 0, _frame_size], cmap='Greys', vmin=-1, vmax=1) #, cmap=self._cmap, vmin=v_min, vmax=v_max) if with_caustics is True: ra_crit_list, dec_crit_list = lensModelExt.critical_curve_tiling(kwargs_lens, compute_window=_frame_size, start_scale=deltaPix, max_order=10) ra_caustic_list, dec_caustic_list = lensModel.ray_shooting(ra_crit_list, dec_crit_list, kwargs_lens) plot_line_set(ax, _coords, ra_caustic_list, dec_caustic_list, color='g') plot_line_set(ax, _coords, ra_crit_list, dec_crit_list, color='r') if point_source: from lenstronomy.LensModel.Solver.lens_equation_solver import LensEquationSolver solver = LensEquationSolver(lensModel) theta_x, theta_y = solver.image_position_from_source(sourcePos_x, sourcePos_y, kwargs_lens, min_distance=deltaPix, search_window=deltaPix*numPix) mag_images = lensModel.magnification(theta_x, theta_y, kwargs_lens) x_image, y_image = _coords.map_coord2pix(theta_x, theta_y) abc_list = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K'] for i in range(len(x_image)): x_ = (x_image[i] + 0.5) * deltaPix y_ = (y_image[i] + 0.5) * deltaPix ax.plot(x_, y_, 'dk', markersize=4*(1 + np.log(np.abs(mag_images[i]))), alpha=0.5) ax.text(x_, y_, abc_list[i], fontsize=20, color='k') x_source, y_source = _coords.map_coord2pix(sourcePos_x, sourcePos_y) ax.plot((x_source + 0.5) * deltaPix, (y_source + 0.5) * deltaPix, '*k', markersize=10) ax.get_xaxis().set_visible(False) ax.get_yaxis().set_visible(False) ax.autoscale(False) #image_position_plot(ax, _coords, self._kwargs_else) #source_position_plot(ax, self._coords, self._kwargs_source) return ax
def function[lens_model_plot, parameter[ax, lensModel, kwargs_lens, numPix, deltaPix, sourcePos_x, sourcePos_y, point_source, with_caustics]]: constant[ plots a lens model (convergence) and the critical curves and caustics :param ax: :param kwargs_lens: :param numPix: :param deltaPix: :return: ] variable[kwargs_data] assign[=] call[name[sim_util].data_configure_simple, parameter[name[numPix], name[deltaPix]]] variable[data] assign[=] call[name[Data], parameter[name[kwargs_data]]] variable[_frame_size] assign[=] binary_operation[name[numPix] * name[deltaPix]] variable[_coords] assign[=] name[data]._coords <ast.Tuple object at 0x7da18dc9a980> assign[=] name[data].coordinates variable[lensModelExt] assign[=] call[name[LensModelExtensions], parameter[name[lensModel]]] variable[x_grid1d] assign[=] call[name[util].image2array, parameter[name[x_grid]]] variable[y_grid1d] assign[=] call[name[util].image2array, parameter[name[y_grid]]] variable[kappa_result] assign[=] call[name[lensModel].kappa, parameter[name[x_grid1d], name[y_grid1d], name[kwargs_lens]]] variable[kappa_result] assign[=] call[name[util].array2image, parameter[name[kappa_result]]] variable[im] assign[=] call[name[ax].matshow, parameter[call[name[np].log10, parameter[name[kappa_result]]]]] if compare[name[with_caustics] is constant[True]] begin[:] <ast.Tuple object at 0x7da18dc98fa0> assign[=] call[name[lensModelExt].critical_curve_tiling, parameter[name[kwargs_lens]]] <ast.Tuple object at 0x7da18dc98a90> assign[=] call[name[lensModel].ray_shooting, parameter[name[ra_crit_list], name[dec_crit_list], name[kwargs_lens]]] call[name[plot_line_set], parameter[name[ax], name[_coords], name[ra_caustic_list], name[dec_caustic_list]]] call[name[plot_line_set], parameter[name[ax], name[_coords], name[ra_crit_list], name[dec_crit_list]]] if name[point_source] begin[:] from relative_module[lenstronomy.LensModel.Solver.lens_equation_solver] import module[LensEquationSolver] variable[solver] assign[=] call[name[LensEquationSolver], parameter[name[lensModel]]] <ast.Tuple object at 0x7da18dc9a3b0> assign[=] call[name[solver].image_position_from_source, parameter[name[sourcePos_x], name[sourcePos_y], name[kwargs_lens]]] variable[mag_images] assign[=] call[name[lensModel].magnification, parameter[name[theta_x], name[theta_y], name[kwargs_lens]]] <ast.Tuple object at 0x7da18dc9b4c0> assign[=] call[name[_coords].map_coord2pix, parameter[name[theta_x], name[theta_y]]] variable[abc_list] assign[=] list[[<ast.Constant object at 0x7da18dc982b0>, <ast.Constant object at 0x7da18dc98d30>, <ast.Constant object at 0x7da18dc99600>, <ast.Constant object at 0x7da18dc9a4d0>, <ast.Constant object at 0x7da18dc98f40>, <ast.Constant object at 0x7da18dc98b80>, <ast.Constant object at 0x7da18dc99990>, <ast.Constant object at 0x7da18dc99d80>, <ast.Constant object at 0x7da18dc9a080>, <ast.Constant object at 0x7da18dc99a50>, <ast.Constant object at 0x7da18dc9b8e0>]] for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[x_image]]]]]] begin[:] variable[x_] assign[=] binary_operation[binary_operation[call[name[x_image]][name[i]] + constant[0.5]] * name[deltaPix]] variable[y_] assign[=] binary_operation[binary_operation[call[name[y_image]][name[i]] + constant[0.5]] * name[deltaPix]] call[name[ax].plot, parameter[name[x_], name[y_], constant[dk]]] call[name[ax].text, parameter[name[x_], name[y_], call[name[abc_list]][name[i]]]] <ast.Tuple object at 0x7da20c6e6050> assign[=] call[name[_coords].map_coord2pix, parameter[name[sourcePos_x], name[sourcePos_y]]] call[name[ax].plot, parameter[binary_operation[binary_operation[name[x_source] + constant[0.5]] * name[deltaPix]], binary_operation[binary_operation[name[y_source] + constant[0.5]] * name[deltaPix]], constant[*k]]] call[call[name[ax].get_xaxis, parameter[]].set_visible, parameter[constant[False]]] call[call[name[ax].get_yaxis, parameter[]].set_visible, parameter[constant[False]]] call[name[ax].autoscale, parameter[constant[False]]] return[name[ax]]
keyword[def] identifier[lens_model_plot] ( identifier[ax] , identifier[lensModel] , identifier[kwargs_lens] , identifier[numPix] = literal[int] , identifier[deltaPix] = literal[int] , identifier[sourcePos_x] = literal[int] , identifier[sourcePos_y] = literal[int] , identifier[point_source] = keyword[False] , identifier[with_caustics] = keyword[False] ): literal[string] identifier[kwargs_data] = identifier[sim_util] . identifier[data_configure_simple] ( identifier[numPix] , identifier[deltaPix] ) identifier[data] = identifier[Data] ( identifier[kwargs_data] ) identifier[_frame_size] = identifier[numPix] * identifier[deltaPix] identifier[_coords] = identifier[data] . identifier[_coords] identifier[x_grid] , identifier[y_grid] = identifier[data] . identifier[coordinates] identifier[lensModelExt] = identifier[LensModelExtensions] ( identifier[lensModel] ) identifier[x_grid1d] = identifier[util] . identifier[image2array] ( identifier[x_grid] ) identifier[y_grid1d] = identifier[util] . identifier[image2array] ( identifier[y_grid] ) identifier[kappa_result] = identifier[lensModel] . identifier[kappa] ( identifier[x_grid1d] , identifier[y_grid1d] , identifier[kwargs_lens] ) identifier[kappa_result] = identifier[util] . identifier[array2image] ( identifier[kappa_result] ) identifier[im] = identifier[ax] . identifier[matshow] ( identifier[np] . identifier[log10] ( identifier[kappa_result] ), identifier[origin] = literal[string] , identifier[extent] =[ literal[int] , identifier[_frame_size] , literal[int] , identifier[_frame_size] ], identifier[cmap] = literal[string] , identifier[vmin] =- literal[int] , identifier[vmax] = literal[int] ) keyword[if] identifier[with_caustics] keyword[is] keyword[True] : identifier[ra_crit_list] , identifier[dec_crit_list] = identifier[lensModelExt] . identifier[critical_curve_tiling] ( identifier[kwargs_lens] , identifier[compute_window] = identifier[_frame_size] , identifier[start_scale] = identifier[deltaPix] , identifier[max_order] = literal[int] ) identifier[ra_caustic_list] , identifier[dec_caustic_list] = identifier[lensModel] . identifier[ray_shooting] ( identifier[ra_crit_list] , identifier[dec_crit_list] , identifier[kwargs_lens] ) identifier[plot_line_set] ( identifier[ax] , identifier[_coords] , identifier[ra_caustic_list] , identifier[dec_caustic_list] , identifier[color] = literal[string] ) identifier[plot_line_set] ( identifier[ax] , identifier[_coords] , identifier[ra_crit_list] , identifier[dec_crit_list] , identifier[color] = literal[string] ) keyword[if] identifier[point_source] : keyword[from] identifier[lenstronomy] . identifier[LensModel] . identifier[Solver] . identifier[lens_equation_solver] keyword[import] identifier[LensEquationSolver] identifier[solver] = identifier[LensEquationSolver] ( identifier[lensModel] ) identifier[theta_x] , identifier[theta_y] = identifier[solver] . identifier[image_position_from_source] ( identifier[sourcePos_x] , identifier[sourcePos_y] , identifier[kwargs_lens] , identifier[min_distance] = identifier[deltaPix] , identifier[search_window] = identifier[deltaPix] * identifier[numPix] ) identifier[mag_images] = identifier[lensModel] . identifier[magnification] ( identifier[theta_x] , identifier[theta_y] , identifier[kwargs_lens] ) identifier[x_image] , identifier[y_image] = identifier[_coords] . identifier[map_coord2pix] ( identifier[theta_x] , identifier[theta_y] ) identifier[abc_list] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[x_image] )): identifier[x_] =( identifier[x_image] [ identifier[i] ]+ literal[int] )* identifier[deltaPix] identifier[y_] =( identifier[y_image] [ identifier[i] ]+ literal[int] )* identifier[deltaPix] identifier[ax] . identifier[plot] ( identifier[x_] , identifier[y_] , literal[string] , identifier[markersize] = literal[int] *( literal[int] + identifier[np] . identifier[log] ( identifier[np] . identifier[abs] ( identifier[mag_images] [ identifier[i] ]))), identifier[alpha] = literal[int] ) identifier[ax] . identifier[text] ( identifier[x_] , identifier[y_] , identifier[abc_list] [ identifier[i] ], identifier[fontsize] = literal[int] , identifier[color] = literal[string] ) identifier[x_source] , identifier[y_source] = identifier[_coords] . identifier[map_coord2pix] ( identifier[sourcePos_x] , identifier[sourcePos_y] ) identifier[ax] . identifier[plot] (( identifier[x_source] + literal[int] )* identifier[deltaPix] ,( identifier[y_source] + literal[int] )* identifier[deltaPix] , literal[string] , identifier[markersize] = literal[int] ) identifier[ax] . identifier[get_xaxis] (). identifier[set_visible] ( keyword[False] ) identifier[ax] . identifier[get_yaxis] (). identifier[set_visible] ( keyword[False] ) identifier[ax] . identifier[autoscale] ( keyword[False] ) keyword[return] identifier[ax]
def lens_model_plot(ax, lensModel, kwargs_lens, numPix=500, deltaPix=0.01, sourcePos_x=0, sourcePos_y=0, point_source=False, with_caustics=False): """ plots a lens model (convergence) and the critical curves and caustics :param ax: :param kwargs_lens: :param numPix: :param deltaPix: :return: """ kwargs_data = sim_util.data_configure_simple(numPix, deltaPix) data = Data(kwargs_data) _frame_size = numPix * deltaPix _coords = data._coords (x_grid, y_grid) = data.coordinates lensModelExt = LensModelExtensions(lensModel) #ra_crit_list, dec_crit_list, ra_caustic_list, dec_caustic_list = lensModelExt.critical_curve_caustics( # kwargs_lens, compute_window=_frame_size, grid_scale=deltaPix/2.) x_grid1d = util.image2array(x_grid) y_grid1d = util.image2array(y_grid) kappa_result = lensModel.kappa(x_grid1d, y_grid1d, kwargs_lens) kappa_result = util.array2image(kappa_result) im = ax.matshow(np.log10(kappa_result), origin='lower', extent=[0, _frame_size, 0, _frame_size], cmap='Greys', vmin=-1, vmax=1) #, cmap=self._cmap, vmin=v_min, vmax=v_max) if with_caustics is True: (ra_crit_list, dec_crit_list) = lensModelExt.critical_curve_tiling(kwargs_lens, compute_window=_frame_size, start_scale=deltaPix, max_order=10) (ra_caustic_list, dec_caustic_list) = lensModel.ray_shooting(ra_crit_list, dec_crit_list, kwargs_lens) plot_line_set(ax, _coords, ra_caustic_list, dec_caustic_list, color='g') plot_line_set(ax, _coords, ra_crit_list, dec_crit_list, color='r') # depends on [control=['if'], data=[]] if point_source: from lenstronomy.LensModel.Solver.lens_equation_solver import LensEquationSolver solver = LensEquationSolver(lensModel) (theta_x, theta_y) = solver.image_position_from_source(sourcePos_x, sourcePos_y, kwargs_lens, min_distance=deltaPix, search_window=deltaPix * numPix) mag_images = lensModel.magnification(theta_x, theta_y, kwargs_lens) (x_image, y_image) = _coords.map_coord2pix(theta_x, theta_y) abc_list = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K'] for i in range(len(x_image)): x_ = (x_image[i] + 0.5) * deltaPix y_ = (y_image[i] + 0.5) * deltaPix ax.plot(x_, y_, 'dk', markersize=4 * (1 + np.log(np.abs(mag_images[i]))), alpha=0.5) ax.text(x_, y_, abc_list[i], fontsize=20, color='k') # depends on [control=['for'], data=['i']] (x_source, y_source) = _coords.map_coord2pix(sourcePos_x, sourcePos_y) ax.plot((x_source + 0.5) * deltaPix, (y_source + 0.5) * deltaPix, '*k', markersize=10) # depends on [control=['if'], data=[]] ax.get_xaxis().set_visible(False) ax.get_yaxis().set_visible(False) ax.autoscale(False) #image_position_plot(ax, _coords, self._kwargs_else) #source_position_plot(ax, self._coords, self._kwargs_source) return ax
def run_processor( processorClass, ocrd_tool=None, mets_url=None, resolver=None, workspace=None, page_id=None, log_level=None, input_file_grp=None, output_file_grp=None, parameter=None, working_dir=None, ): # pylint: disable=too-many-locals """ Create a workspace for mets_url and run processor through it Args: parameter (string): URL to the parameter """ workspace = _get_workspace( workspace, resolver, mets_url, working_dir ) if parameter is not None: if not '://' in parameter: fname = os.path.abspath(parameter) else: fname = workspace.download_url(parameter) with open(fname, 'r') as param_json_file: parameter = json.load(param_json_file) else: parameter = {} log.debug("Running processor %s", processorClass) processor = processorClass( workspace, ocrd_tool=ocrd_tool, page_id=page_id, input_file_grp=input_file_grp, output_file_grp=output_file_grp, parameter=parameter ) ocrd_tool = processor.ocrd_tool name = '%s v%s' % (ocrd_tool['executable'], processor.version) otherrole = ocrd_tool['steps'][0] log.debug("Processor instance %s (%s doing %s)", processor, name, otherrole) processor.process() workspace.mets.add_agent( name=name, _type='OTHER', othertype='SOFTWARE', role='OTHER', otherrole=otherrole ) workspace.save_mets() return processor
def function[run_processor, parameter[processorClass, ocrd_tool, mets_url, resolver, workspace, page_id, log_level, input_file_grp, output_file_grp, parameter, working_dir]]: constant[ Create a workspace for mets_url and run processor through it Args: parameter (string): URL to the parameter ] variable[workspace] assign[=] call[name[_get_workspace], parameter[name[workspace], name[resolver], name[mets_url], name[working_dir]]] if compare[name[parameter] is_not constant[None]] begin[:] if <ast.UnaryOp object at 0x7da20e9b1300> begin[:] variable[fname] assign[=] call[name[os].path.abspath, parameter[name[parameter]]] with call[name[open], parameter[name[fname], constant[r]]] begin[:] variable[parameter] assign[=] call[name[json].load, parameter[name[param_json_file]]] call[name[log].debug, parameter[constant[Running processor %s], name[processorClass]]] variable[processor] assign[=] call[name[processorClass], parameter[name[workspace]]] variable[ocrd_tool] assign[=] name[processor].ocrd_tool variable[name] assign[=] binary_operation[constant[%s v%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da20e9b17e0>, <ast.Attribute object at 0x7da20e9b1ba0>]]] variable[otherrole] assign[=] call[call[name[ocrd_tool]][constant[steps]]][constant[0]] call[name[log].debug, parameter[constant[Processor instance %s (%s doing %s)], name[processor], name[name], name[otherrole]]] call[name[processor].process, parameter[]] call[name[workspace].mets.add_agent, parameter[]] call[name[workspace].save_mets, parameter[]] return[name[processor]]
keyword[def] identifier[run_processor] ( identifier[processorClass] , identifier[ocrd_tool] = keyword[None] , identifier[mets_url] = keyword[None] , identifier[resolver] = keyword[None] , identifier[workspace] = keyword[None] , identifier[page_id] = keyword[None] , identifier[log_level] = keyword[None] , identifier[input_file_grp] = keyword[None] , identifier[output_file_grp] = keyword[None] , identifier[parameter] = keyword[None] , identifier[working_dir] = keyword[None] , ): literal[string] identifier[workspace] = identifier[_get_workspace] ( identifier[workspace] , identifier[resolver] , identifier[mets_url] , identifier[working_dir] ) keyword[if] identifier[parameter] keyword[is] keyword[not] keyword[None] : keyword[if] keyword[not] literal[string] keyword[in] identifier[parameter] : identifier[fname] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[parameter] ) keyword[else] : identifier[fname] = identifier[workspace] . identifier[download_url] ( identifier[parameter] ) keyword[with] identifier[open] ( identifier[fname] , literal[string] ) keyword[as] identifier[param_json_file] : identifier[parameter] = identifier[json] . identifier[load] ( identifier[param_json_file] ) keyword[else] : identifier[parameter] ={} identifier[log] . identifier[debug] ( literal[string] , identifier[processorClass] ) identifier[processor] = identifier[processorClass] ( identifier[workspace] , identifier[ocrd_tool] = identifier[ocrd_tool] , identifier[page_id] = identifier[page_id] , identifier[input_file_grp] = identifier[input_file_grp] , identifier[output_file_grp] = identifier[output_file_grp] , identifier[parameter] = identifier[parameter] ) identifier[ocrd_tool] = identifier[processor] . identifier[ocrd_tool] identifier[name] = literal[string] %( identifier[ocrd_tool] [ literal[string] ], identifier[processor] . identifier[version] ) identifier[otherrole] = identifier[ocrd_tool] [ literal[string] ][ literal[int] ] identifier[log] . identifier[debug] ( literal[string] , identifier[processor] , identifier[name] , identifier[otherrole] ) identifier[processor] . identifier[process] () identifier[workspace] . identifier[mets] . identifier[add_agent] ( identifier[name] = identifier[name] , identifier[_type] = literal[string] , identifier[othertype] = literal[string] , identifier[role] = literal[string] , identifier[otherrole] = identifier[otherrole] ) identifier[workspace] . identifier[save_mets] () keyword[return] identifier[processor]
def run_processor(processorClass, ocrd_tool=None, mets_url=None, resolver=None, workspace=None, page_id=None, log_level=None, input_file_grp=None, output_file_grp=None, parameter=None, working_dir=None): # pylint: disable=too-many-locals '\n Create a workspace for mets_url and run processor through it\n\n Args:\n parameter (string): URL to the parameter\n ' workspace = _get_workspace(workspace, resolver, mets_url, working_dir) if parameter is not None: if not '://' in parameter: fname = os.path.abspath(parameter) # depends on [control=['if'], data=[]] else: fname = workspace.download_url(parameter) with open(fname, 'r') as param_json_file: parameter = json.load(param_json_file) # depends on [control=['with'], data=['param_json_file']] # depends on [control=['if'], data=['parameter']] else: parameter = {} log.debug('Running processor %s', processorClass) processor = processorClass(workspace, ocrd_tool=ocrd_tool, page_id=page_id, input_file_grp=input_file_grp, output_file_grp=output_file_grp, parameter=parameter) ocrd_tool = processor.ocrd_tool name = '%s v%s' % (ocrd_tool['executable'], processor.version) otherrole = ocrd_tool['steps'][0] log.debug('Processor instance %s (%s doing %s)', processor, name, otherrole) processor.process() workspace.mets.add_agent(name=name, _type='OTHER', othertype='SOFTWARE', role='OTHER', otherrole=otherrole) workspace.save_mets() return processor
async def _throttled_request(self, request): '''Process a single request, respecting the concurrency limit.''' disconnect = False try: timeout = self.processing_timeout async with timeout_after(timeout): async with self._incoming_concurrency: if self.is_closing(): return if self._cost_fraction: await sleep(self._cost_fraction * self.cost_sleep) result = await self.handle_request(request) except (ProtocolError, RPCError) as e: result = e except TaskTimeout: self.logger.info(f'incoming request {request} timed out after {timeout} secs') result = RPCError(JSONRPC.SERVER_BUSY, 'server busy - request timed out') except ReplyAndDisconnect as e: result = e.result disconnect = True except ExcessiveSessionCostError: result = RPCError(JSONRPC.EXCESSIVE_RESOURCE_USAGE, 'excessive resource usage') disconnect = True except CancelledError: raise except Exception: self.logger.exception(f'exception handling {request}') result = RPCError(JSONRPC.INTERNAL_ERROR, 'internal server error') if isinstance(request, Request): message = request.send_result(result) if message: await self._send_message(message) if isinstance(result, Exception): self._bump_errors(result) if disconnect: await self.close()
<ast.AsyncFunctionDef object at 0x7da18dc075b0>
keyword[async] keyword[def] identifier[_throttled_request] ( identifier[self] , identifier[request] ): literal[string] identifier[disconnect] = keyword[False] keyword[try] : identifier[timeout] = identifier[self] . identifier[processing_timeout] keyword[async] keyword[with] identifier[timeout_after] ( identifier[timeout] ): keyword[async] keyword[with] identifier[self] . identifier[_incoming_concurrency] : keyword[if] identifier[self] . identifier[is_closing] (): keyword[return] keyword[if] identifier[self] . identifier[_cost_fraction] : keyword[await] identifier[sleep] ( identifier[self] . identifier[_cost_fraction] * identifier[self] . identifier[cost_sleep] ) identifier[result] = keyword[await] identifier[self] . identifier[handle_request] ( identifier[request] ) keyword[except] ( identifier[ProtocolError] , identifier[RPCError] ) keyword[as] identifier[e] : identifier[result] = identifier[e] keyword[except] identifier[TaskTimeout] : identifier[self] . identifier[logger] . identifier[info] ( literal[string] ) identifier[result] = identifier[RPCError] ( identifier[JSONRPC] . identifier[SERVER_BUSY] , literal[string] ) keyword[except] identifier[ReplyAndDisconnect] keyword[as] identifier[e] : identifier[result] = identifier[e] . identifier[result] identifier[disconnect] = keyword[True] keyword[except] identifier[ExcessiveSessionCostError] : identifier[result] = identifier[RPCError] ( identifier[JSONRPC] . identifier[EXCESSIVE_RESOURCE_USAGE] , literal[string] ) identifier[disconnect] = keyword[True] keyword[except] identifier[CancelledError] : keyword[raise] keyword[except] identifier[Exception] : identifier[self] . identifier[logger] . identifier[exception] ( literal[string] ) identifier[result] = identifier[RPCError] ( identifier[JSONRPC] . identifier[INTERNAL_ERROR] , literal[string] ) keyword[if] identifier[isinstance] ( identifier[request] , identifier[Request] ): identifier[message] = identifier[request] . identifier[send_result] ( identifier[result] ) keyword[if] identifier[message] : keyword[await] identifier[self] . identifier[_send_message] ( identifier[message] ) keyword[if] identifier[isinstance] ( identifier[result] , identifier[Exception] ): identifier[self] . identifier[_bump_errors] ( identifier[result] ) keyword[if] identifier[disconnect] : keyword[await] identifier[self] . identifier[close] ()
async def _throttled_request(self, request): """Process a single request, respecting the concurrency limit.""" disconnect = False try: timeout = self.processing_timeout async with timeout_after(timeout): async with self._incoming_concurrency: if self.is_closing(): return # depends on [control=['if'], data=[]] if self._cost_fraction: await sleep(self._cost_fraction * self.cost_sleep) # depends on [control=['if'], data=[]] result = await self.handle_request(request) # depends on [control=['try'], data=[]] except (ProtocolError, RPCError) as e: result = e # depends on [control=['except'], data=['e']] except TaskTimeout: self.logger.info(f'incoming request {request} timed out after {timeout} secs') result = RPCError(JSONRPC.SERVER_BUSY, 'server busy - request timed out') # depends on [control=['except'], data=[]] except ReplyAndDisconnect as e: result = e.result disconnect = True # depends on [control=['except'], data=['e']] except ExcessiveSessionCostError: result = RPCError(JSONRPC.EXCESSIVE_RESOURCE_USAGE, 'excessive resource usage') disconnect = True # depends on [control=['except'], data=[]] except CancelledError: raise # depends on [control=['except'], data=[]] except Exception: self.logger.exception(f'exception handling {request}') result = RPCError(JSONRPC.INTERNAL_ERROR, 'internal server error') # depends on [control=['except'], data=[]] if isinstance(request, Request): message = request.send_result(result) if message: await self._send_message(message) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if isinstance(result, Exception): self._bump_errors(result) # depends on [control=['if'], data=[]] if disconnect: await self.close() # depends on [control=['if'], data=[]]