code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def all_tokens(self, delimiter=' '):
"""
Return a list of all tokens occurring in the label-list.
Args:
delimiter (str): The delimiter used to split labels into tokens
(see :meth:`audiomate.annotations.Label.tokenized`).
Returns:
:class:`set`: A set of distinct tokens.
"""
tokens = set()
for label in self:
tokens = tokens.union(set(label.tokenized(delimiter=delimiter)))
return tokens | def function[all_tokens, parameter[self, delimiter]]:
constant[
Return a list of all tokens occurring in the label-list.
Args:
delimiter (str): The delimiter used to split labels into tokens
(see :meth:`audiomate.annotations.Label.tokenized`).
Returns:
:class:`set`: A set of distinct tokens.
]
variable[tokens] assign[=] call[name[set], parameter[]]
for taget[name[label]] in starred[name[self]] begin[:]
variable[tokens] assign[=] call[name[tokens].union, parameter[call[name[set], parameter[call[name[label].tokenized, parameter[]]]]]]
return[name[tokens]] | keyword[def] identifier[all_tokens] ( identifier[self] , identifier[delimiter] = literal[string] ):
literal[string]
identifier[tokens] = identifier[set] ()
keyword[for] identifier[label] keyword[in] identifier[self] :
identifier[tokens] = identifier[tokens] . identifier[union] ( identifier[set] ( identifier[label] . identifier[tokenized] ( identifier[delimiter] = identifier[delimiter] )))
keyword[return] identifier[tokens] | def all_tokens(self, delimiter=' '):
"""
Return a list of all tokens occurring in the label-list.
Args:
delimiter (str): The delimiter used to split labels into tokens
(see :meth:`audiomate.annotations.Label.tokenized`).
Returns:
:class:`set`: A set of distinct tokens.
"""
tokens = set()
for label in self:
tokens = tokens.union(set(label.tokenized(delimiter=delimiter))) # depends on [control=['for'], data=['label']]
return tokens |
def domagicmag(file, Recs):
"""
converts a magic record back into the SIO mag format
"""
for rec in Recs:
type = ".0"
meths = []
tmp = rec["magic_method_codes"].split(':')
for meth in tmp:
meths.append(meth.strip())
if 'LT-T-I' in meths:
type = ".1"
if 'LT-PTRM-I' in meths:
type = ".2"
if 'LT-PTRM-MD' in meths:
type = ".3"
treatment = float(rec["treatment_temp"]) - 273
tr = '%i' % (treatment) + type
inten = '%8.7e ' % (float(rec["measurement_magn_moment"]) * 1e3)
outstring = rec["er_specimen_name"] + " " + tr + " " + rec["measurement_csd"] + \
" " + inten + " " + rec["measurement_dec"] + \
" " + rec["measurement_inc"] + "\n"
file.write(outstring) | def function[domagicmag, parameter[file, Recs]]:
constant[
converts a magic record back into the SIO mag format
]
for taget[name[rec]] in starred[name[Recs]] begin[:]
variable[type] assign[=] constant[.0]
variable[meths] assign[=] list[[]]
variable[tmp] assign[=] call[call[name[rec]][constant[magic_method_codes]].split, parameter[constant[:]]]
for taget[name[meth]] in starred[name[tmp]] begin[:]
call[name[meths].append, parameter[call[name[meth].strip, parameter[]]]]
if compare[constant[LT-T-I] in name[meths]] begin[:]
variable[type] assign[=] constant[.1]
if compare[constant[LT-PTRM-I] in name[meths]] begin[:]
variable[type] assign[=] constant[.2]
if compare[constant[LT-PTRM-MD] in name[meths]] begin[:]
variable[type] assign[=] constant[.3]
variable[treatment] assign[=] binary_operation[call[name[float], parameter[call[name[rec]][constant[treatment_temp]]]] - constant[273]]
variable[tr] assign[=] binary_operation[binary_operation[constant[%i] <ast.Mod object at 0x7da2590d6920> name[treatment]] + name[type]]
variable[inten] assign[=] binary_operation[constant[%8.7e ] <ast.Mod object at 0x7da2590d6920> binary_operation[call[name[float], parameter[call[name[rec]][constant[measurement_magn_moment]]]] * constant[1000.0]]]
variable[outstring] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[call[name[rec]][constant[er_specimen_name]] + constant[ ]] + name[tr]] + constant[ ]] + call[name[rec]][constant[measurement_csd]]] + constant[ ]] + name[inten]] + constant[ ]] + call[name[rec]][constant[measurement_dec]]] + constant[ ]] + call[name[rec]][constant[measurement_inc]]] + constant[
]]
call[name[file].write, parameter[name[outstring]]] | keyword[def] identifier[domagicmag] ( identifier[file] , identifier[Recs] ):
literal[string]
keyword[for] identifier[rec] keyword[in] identifier[Recs] :
identifier[type] = literal[string]
identifier[meths] =[]
identifier[tmp] = identifier[rec] [ literal[string] ]. identifier[split] ( literal[string] )
keyword[for] identifier[meth] keyword[in] identifier[tmp] :
identifier[meths] . identifier[append] ( identifier[meth] . identifier[strip] ())
keyword[if] literal[string] keyword[in] identifier[meths] :
identifier[type] = literal[string]
keyword[if] literal[string] keyword[in] identifier[meths] :
identifier[type] = literal[string]
keyword[if] literal[string] keyword[in] identifier[meths] :
identifier[type] = literal[string]
identifier[treatment] = identifier[float] ( identifier[rec] [ literal[string] ])- literal[int]
identifier[tr] = literal[string] %( identifier[treatment] )+ identifier[type]
identifier[inten] = literal[string] %( identifier[float] ( identifier[rec] [ literal[string] ])* literal[int] )
identifier[outstring] = identifier[rec] [ literal[string] ]+ literal[string] + identifier[tr] + literal[string] + identifier[rec] [ literal[string] ]+ literal[string] + identifier[inten] + literal[string] + identifier[rec] [ literal[string] ]+ literal[string] + identifier[rec] [ literal[string] ]+ literal[string]
identifier[file] . identifier[write] ( identifier[outstring] ) | def domagicmag(file, Recs):
"""
converts a magic record back into the SIO mag format
"""
for rec in Recs:
type = '.0'
meths = []
tmp = rec['magic_method_codes'].split(':')
for meth in tmp:
meths.append(meth.strip()) # depends on [control=['for'], data=['meth']]
if 'LT-T-I' in meths:
type = '.1' # depends on [control=['if'], data=[]]
if 'LT-PTRM-I' in meths:
type = '.2' # depends on [control=['if'], data=[]]
if 'LT-PTRM-MD' in meths:
type = '.3' # depends on [control=['if'], data=[]]
treatment = float(rec['treatment_temp']) - 273
tr = '%i' % treatment + type
inten = '%8.7e ' % (float(rec['measurement_magn_moment']) * 1000.0)
outstring = rec['er_specimen_name'] + ' ' + tr + ' ' + rec['measurement_csd'] + ' ' + inten + ' ' + rec['measurement_dec'] + ' ' + rec['measurement_inc'] + '\n'
file.write(outstring) # depends on [control=['for'], data=['rec']] |
def setter(self, name: str, rename: Optional[str] = None) -> 'ProxyAttr':
"""
为类设置代理 setter
"""
if rename is None:
rename = name
if self.reuse_handle(name, rename, self._setter):
return self
def proxy_set(this: Any, val: Any) -> None:
"""
代理 setattr
"""
proxy_target_ = getattr(this, self._target)
setattr(proxy_target_, name, val)
if name in self._getter:
func = self._func_map[name]
func = func.setter(proxy_set)
else:
func = property(None, proxy_set)
self._func_map[name] = func
setattr(self._proto, rename, func)
self._setter[name] = {rename}
return self | def function[setter, parameter[self, name, rename]]:
constant[
为类设置代理 setter
]
if compare[name[rename] is constant[None]] begin[:]
variable[rename] assign[=] name[name]
if call[name[self].reuse_handle, parameter[name[name], name[rename], name[self]._setter]] begin[:]
return[name[self]]
def function[proxy_set, parameter[this, val]]:
constant[
代理 setattr
]
variable[proxy_target_] assign[=] call[name[getattr], parameter[name[this], name[self]._target]]
call[name[setattr], parameter[name[proxy_target_], name[name], name[val]]]
if compare[name[name] in name[self]._getter] begin[:]
variable[func] assign[=] call[name[self]._func_map][name[name]]
variable[func] assign[=] call[name[func].setter, parameter[name[proxy_set]]]
call[name[self]._func_map][name[name]] assign[=] name[func]
call[name[setattr], parameter[name[self]._proto, name[rename], name[func]]]
call[name[self]._setter][name[name]] assign[=] <ast.Set object at 0x7da18dc98850>
return[name[self]] | keyword[def] identifier[setter] ( identifier[self] , identifier[name] : identifier[str] , identifier[rename] : identifier[Optional] [ identifier[str] ]= keyword[None] )-> literal[string] :
literal[string]
keyword[if] identifier[rename] keyword[is] keyword[None] :
identifier[rename] = identifier[name]
keyword[if] identifier[self] . identifier[reuse_handle] ( identifier[name] , identifier[rename] , identifier[self] . identifier[_setter] ):
keyword[return] identifier[self]
keyword[def] identifier[proxy_set] ( identifier[this] : identifier[Any] , identifier[val] : identifier[Any] )-> keyword[None] :
literal[string]
identifier[proxy_target_] = identifier[getattr] ( identifier[this] , identifier[self] . identifier[_target] )
identifier[setattr] ( identifier[proxy_target_] , identifier[name] , identifier[val] )
keyword[if] identifier[name] keyword[in] identifier[self] . identifier[_getter] :
identifier[func] = identifier[self] . identifier[_func_map] [ identifier[name] ]
identifier[func] = identifier[func] . identifier[setter] ( identifier[proxy_set] )
keyword[else] :
identifier[func] = identifier[property] ( keyword[None] , identifier[proxy_set] )
identifier[self] . identifier[_func_map] [ identifier[name] ]= identifier[func]
identifier[setattr] ( identifier[self] . identifier[_proto] , identifier[rename] , identifier[func] )
identifier[self] . identifier[_setter] [ identifier[name] ]={ identifier[rename] }
keyword[return] identifier[self] | def setter(self, name: str, rename: Optional[str]=None) -> 'ProxyAttr':
"""
为类设置代理 setter
"""
if rename is None:
rename = name # depends on [control=['if'], data=['rename']]
if self.reuse_handle(name, rename, self._setter):
return self # depends on [control=['if'], data=[]]
def proxy_set(this: Any, val: Any) -> None:
"""
代理 setattr
"""
proxy_target_ = getattr(this, self._target)
setattr(proxy_target_, name, val)
if name in self._getter:
func = self._func_map[name]
func = func.setter(proxy_set) # depends on [control=['if'], data=['name']]
else:
func = property(None, proxy_set)
self._func_map[name] = func
setattr(self._proto, rename, func)
self._setter[name] = {rename}
return self |
def dlogpdf_link_dvar(self, link_f, y, Y_metadata=None):
"""
Gradient of the log-likelihood function at y given link(f), w.r.t variance parameter (noise_variance)
.. math::
\\frac{d \\ln p(y_{i}|\\lambda(f_{i}))}{d\\sigma^{2}} = -\\frac{N}{2\\sigma^{2}} + \\frac{(y_{i} - \\lambda(f_{i}))^{2}}{2\\sigma^{4}}
:param link_f: latent variables link(f)
:type link_f: Nx1 array
:param y: data
:type y: Nx1 array
:param Y_metadata: Y_metadata not used in gaussian
:returns: derivative of log likelihood evaluated at points link(f) w.r.t variance parameter
:rtype: float
"""
e = y - link_f
s_4 = 1.0/(self.variance**2)
dlik_dsigma = -0.5/self.variance + 0.5*s_4*np.square(e)
return dlik_dsigma | def function[dlogpdf_link_dvar, parameter[self, link_f, y, Y_metadata]]:
constant[
Gradient of the log-likelihood function at y given link(f), w.r.t variance parameter (noise_variance)
.. math::
\frac{d \ln p(y_{i}|\lambda(f_{i}))}{d\sigma^{2}} = -\frac{N}{2\sigma^{2}} + \frac{(y_{i} - \lambda(f_{i}))^{2}}{2\sigma^{4}}
:param link_f: latent variables link(f)
:type link_f: Nx1 array
:param y: data
:type y: Nx1 array
:param Y_metadata: Y_metadata not used in gaussian
:returns: derivative of log likelihood evaluated at points link(f) w.r.t variance parameter
:rtype: float
]
variable[e] assign[=] binary_operation[name[y] - name[link_f]]
variable[s_4] assign[=] binary_operation[constant[1.0] / binary_operation[name[self].variance ** constant[2]]]
variable[dlik_dsigma] assign[=] binary_operation[binary_operation[<ast.UnaryOp object at 0x7da1b1c698a0> / name[self].variance] + binary_operation[binary_operation[constant[0.5] * name[s_4]] * call[name[np].square, parameter[name[e]]]]]
return[name[dlik_dsigma]] | keyword[def] identifier[dlogpdf_link_dvar] ( identifier[self] , identifier[link_f] , identifier[y] , identifier[Y_metadata] = keyword[None] ):
literal[string]
identifier[e] = identifier[y] - identifier[link_f]
identifier[s_4] = literal[int] /( identifier[self] . identifier[variance] ** literal[int] )
identifier[dlik_dsigma] =- literal[int] / identifier[self] . identifier[variance] + literal[int] * identifier[s_4] * identifier[np] . identifier[square] ( identifier[e] )
keyword[return] identifier[dlik_dsigma] | def dlogpdf_link_dvar(self, link_f, y, Y_metadata=None):
"""
Gradient of the log-likelihood function at y given link(f), w.r.t variance parameter (noise_variance)
.. math::
\\frac{d \\ln p(y_{i}|\\lambda(f_{i}))}{d\\sigma^{2}} = -\\frac{N}{2\\sigma^{2}} + \\frac{(y_{i} - \\lambda(f_{i}))^{2}}{2\\sigma^{4}}
:param link_f: latent variables link(f)
:type link_f: Nx1 array
:param y: data
:type y: Nx1 array
:param Y_metadata: Y_metadata not used in gaussian
:returns: derivative of log likelihood evaluated at points link(f) w.r.t variance parameter
:rtype: float
"""
e = y - link_f
s_4 = 1.0 / self.variance ** 2
dlik_dsigma = -0.5 / self.variance + 0.5 * s_4 * np.square(e)
return dlik_dsigma |
def get_tasks(self, thread_name):
"""
Args:
thread_name (str): name of the thread to get the tasks for
Returns:
OrderedDict of str, Task: list of task names and log records for
each for the given thread
"""
if thread_name not in self.tasks_by_thread:
with self._tasks_lock:
self.tasks_by_thread[thread_name] = OrderedDict()
return self.tasks_by_thread[thread_name] | def function[get_tasks, parameter[self, thread_name]]:
constant[
Args:
thread_name (str): name of the thread to get the tasks for
Returns:
OrderedDict of str, Task: list of task names and log records for
each for the given thread
]
if compare[name[thread_name] <ast.NotIn object at 0x7da2590d7190> name[self].tasks_by_thread] begin[:]
with name[self]._tasks_lock begin[:]
call[name[self].tasks_by_thread][name[thread_name]] assign[=] call[name[OrderedDict], parameter[]]
return[call[name[self].tasks_by_thread][name[thread_name]]] | keyword[def] identifier[get_tasks] ( identifier[self] , identifier[thread_name] ):
literal[string]
keyword[if] identifier[thread_name] keyword[not] keyword[in] identifier[self] . identifier[tasks_by_thread] :
keyword[with] identifier[self] . identifier[_tasks_lock] :
identifier[self] . identifier[tasks_by_thread] [ identifier[thread_name] ]= identifier[OrderedDict] ()
keyword[return] identifier[self] . identifier[tasks_by_thread] [ identifier[thread_name] ] | def get_tasks(self, thread_name):
"""
Args:
thread_name (str): name of the thread to get the tasks for
Returns:
OrderedDict of str, Task: list of task names and log records for
each for the given thread
"""
if thread_name not in self.tasks_by_thread:
with self._tasks_lock:
self.tasks_by_thread[thread_name] = OrderedDict() # depends on [control=['with'], data=[]] # depends on [control=['if'], data=['thread_name']]
return self.tasks_by_thread[thread_name] |
def get_hashed_signature(self, url):
"""
Process from Membersuite Docs: http://bit.ly/2eSIDxz
"""
data = "%s%s" % (url, self.association_id)
if self.session_id:
data = "%s%s" % (data, self.session_id)
data_b = bytearray(data, 'utf-8')
secret_key = base64.b64decode(self.secret_key)
secret_b = bytearray(secret_key)
hashed = hmac.new(secret_b, data_b, sha1).digest()
return base64.b64encode(hashed).decode("utf-8") | def function[get_hashed_signature, parameter[self, url]]:
constant[
Process from Membersuite Docs: http://bit.ly/2eSIDxz
]
variable[data] assign[=] binary_operation[constant[%s%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b2370be0>, <ast.Attribute object at 0x7da1b23e6950>]]]
if name[self].session_id begin[:]
variable[data] assign[=] binary_operation[constant[%s%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da2044c0be0>, <ast.Attribute object at 0x7da2044c3010>]]]
variable[data_b] assign[=] call[name[bytearray], parameter[name[data], constant[utf-8]]]
variable[secret_key] assign[=] call[name[base64].b64decode, parameter[name[self].secret_key]]
variable[secret_b] assign[=] call[name[bytearray], parameter[name[secret_key]]]
variable[hashed] assign[=] call[call[name[hmac].new, parameter[name[secret_b], name[data_b], name[sha1]]].digest, parameter[]]
return[call[call[name[base64].b64encode, parameter[name[hashed]]].decode, parameter[constant[utf-8]]]] | keyword[def] identifier[get_hashed_signature] ( identifier[self] , identifier[url] ):
literal[string]
identifier[data] = literal[string] %( identifier[url] , identifier[self] . identifier[association_id] )
keyword[if] identifier[self] . identifier[session_id] :
identifier[data] = literal[string] %( identifier[data] , identifier[self] . identifier[session_id] )
identifier[data_b] = identifier[bytearray] ( identifier[data] , literal[string] )
identifier[secret_key] = identifier[base64] . identifier[b64decode] ( identifier[self] . identifier[secret_key] )
identifier[secret_b] = identifier[bytearray] ( identifier[secret_key] )
identifier[hashed] = identifier[hmac] . identifier[new] ( identifier[secret_b] , identifier[data_b] , identifier[sha1] ). identifier[digest] ()
keyword[return] identifier[base64] . identifier[b64encode] ( identifier[hashed] ). identifier[decode] ( literal[string] ) | def get_hashed_signature(self, url):
"""
Process from Membersuite Docs: http://bit.ly/2eSIDxz
"""
data = '%s%s' % (url, self.association_id)
if self.session_id:
data = '%s%s' % (data, self.session_id) # depends on [control=['if'], data=[]]
data_b = bytearray(data, 'utf-8')
secret_key = base64.b64decode(self.secret_key)
secret_b = bytearray(secret_key)
hashed = hmac.new(secret_b, data_b, sha1).digest()
return base64.b64encode(hashed).decode('utf-8') |
def download(self, output_dir, url, overwrite):
""" Dowload file to /tmp """
tmp = self.url2tmp(output_dir, url)
if os.path.isfile(tmp) and not overwrite:
logging.info("File {0} already exists. Skipping download.".format(tmp))
return tmp
f = open(tmp, 'wb')
logging.info("Downloading {0}".format(url))
res = requests.get(url, stream=True)
if res.status_code != 200:
# failed to download, cleanup and raise exception
f.close()
os.remove(tmp)
error = "{0}\n\nFailed to download < {0} >".format(res.content, url)
raise IOError(error)
for block in res.iter_content(1024):
f.write(block)
f.close()
return tmp | def function[download, parameter[self, output_dir, url, overwrite]]:
constant[ Dowload file to /tmp ]
variable[tmp] assign[=] call[name[self].url2tmp, parameter[name[output_dir], name[url]]]
if <ast.BoolOp object at 0x7da1b1835ea0> begin[:]
call[name[logging].info, parameter[call[constant[File {0} already exists. Skipping download.].format, parameter[name[tmp]]]]]
return[name[tmp]]
variable[f] assign[=] call[name[open], parameter[name[tmp], constant[wb]]]
call[name[logging].info, parameter[call[constant[Downloading {0}].format, parameter[name[url]]]]]
variable[res] assign[=] call[name[requests].get, parameter[name[url]]]
if compare[name[res].status_code not_equal[!=] constant[200]] begin[:]
call[name[f].close, parameter[]]
call[name[os].remove, parameter[name[tmp]]]
variable[error] assign[=] call[constant[{0}
Failed to download < {0} >].format, parameter[name[res].content, name[url]]]
<ast.Raise object at 0x7da1b190ffa0>
for taget[name[block]] in starred[call[name[res].iter_content, parameter[constant[1024]]]] begin[:]
call[name[f].write, parameter[name[block]]]
call[name[f].close, parameter[]]
return[name[tmp]] | keyword[def] identifier[download] ( identifier[self] , identifier[output_dir] , identifier[url] , identifier[overwrite] ):
literal[string]
identifier[tmp] = identifier[self] . identifier[url2tmp] ( identifier[output_dir] , identifier[url] )
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[tmp] ) keyword[and] keyword[not] identifier[overwrite] :
identifier[logging] . identifier[info] ( literal[string] . identifier[format] ( identifier[tmp] ))
keyword[return] identifier[tmp]
identifier[f] = identifier[open] ( identifier[tmp] , literal[string] )
identifier[logging] . identifier[info] ( literal[string] . identifier[format] ( identifier[url] ))
identifier[res] = identifier[requests] . identifier[get] ( identifier[url] , identifier[stream] = keyword[True] )
keyword[if] identifier[res] . identifier[status_code] != literal[int] :
identifier[f] . identifier[close] ()
identifier[os] . identifier[remove] ( identifier[tmp] )
identifier[error] = literal[string] . identifier[format] ( identifier[res] . identifier[content] , identifier[url] )
keyword[raise] identifier[IOError] ( identifier[error] )
keyword[for] identifier[block] keyword[in] identifier[res] . identifier[iter_content] ( literal[int] ):
identifier[f] . identifier[write] ( identifier[block] )
identifier[f] . identifier[close] ()
keyword[return] identifier[tmp] | def download(self, output_dir, url, overwrite):
""" Dowload file to /tmp """
tmp = self.url2tmp(output_dir, url)
if os.path.isfile(tmp) and (not overwrite):
logging.info('File {0} already exists. Skipping download.'.format(tmp))
return tmp # depends on [control=['if'], data=[]]
f = open(tmp, 'wb')
logging.info('Downloading {0}'.format(url))
res = requests.get(url, stream=True)
if res.status_code != 200:
# failed to download, cleanup and raise exception
f.close()
os.remove(tmp)
error = '{0}\n\nFailed to download < {0} >'.format(res.content, url)
raise IOError(error) # depends on [control=['if'], data=[]]
for block in res.iter_content(1024):
f.write(block) # depends on [control=['for'], data=['block']]
f.close()
return tmp |
async def unformat(self):
"""Unformat this block device."""
self._data = await self._handler.unformat(
system_id=self.node.system_id, id=self.id) | <ast.AsyncFunctionDef object at 0x7da18eb57580> | keyword[async] keyword[def] identifier[unformat] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_data] = keyword[await] identifier[self] . identifier[_handler] . identifier[unformat] (
identifier[system_id] = identifier[self] . identifier[node] . identifier[system_id] , identifier[id] = identifier[self] . identifier[id] ) | async def unformat(self):
"""Unformat this block device."""
self._data = await self._handler.unformat(system_id=self.node.system_id, id=self.id) |
def grant_permission_for(brain_or_object, permission, roles, acquire=0):
"""Grant the permission for the object to the defined roles
Code extracted from `IRoleManager.manage_permission`
:param brain_or_object: Catalog brain or object
:param permission: The permission to be granted
:param roles: The roles the permission to be granted to
:param acquire: Flag to acquire the permission
"""
obj = api.get_object(brain_or_object)
valid_roles = get_valid_roles_for(obj)
to_grant = list(get_roles_for_permission(permission, obj))
if isinstance(roles, basestring):
roles = [roles]
for role in roles:
if role not in to_grant:
if role not in valid_roles:
raise ValueError("The Role '{}' is invalid.".format(role))
# Append the role
to_grant.append(role)
manage_permission_for(obj, permission, to_grant, acquire=acquire) | def function[grant_permission_for, parameter[brain_or_object, permission, roles, acquire]]:
constant[Grant the permission for the object to the defined roles
Code extracted from `IRoleManager.manage_permission`
:param brain_or_object: Catalog brain or object
:param permission: The permission to be granted
:param roles: The roles the permission to be granted to
:param acquire: Flag to acquire the permission
]
variable[obj] assign[=] call[name[api].get_object, parameter[name[brain_or_object]]]
variable[valid_roles] assign[=] call[name[get_valid_roles_for], parameter[name[obj]]]
variable[to_grant] assign[=] call[name[list], parameter[call[name[get_roles_for_permission], parameter[name[permission], name[obj]]]]]
if call[name[isinstance], parameter[name[roles], name[basestring]]] begin[:]
variable[roles] assign[=] list[[<ast.Name object at 0x7da1b1d67eb0>]]
for taget[name[role]] in starred[name[roles]] begin[:]
if compare[name[role] <ast.NotIn object at 0x7da2590d7190> name[to_grant]] begin[:]
if compare[name[role] <ast.NotIn object at 0x7da2590d7190> name[valid_roles]] begin[:]
<ast.Raise object at 0x7da1b1d65060>
call[name[to_grant].append, parameter[name[role]]]
call[name[manage_permission_for], parameter[name[obj], name[permission], name[to_grant]]] | keyword[def] identifier[grant_permission_for] ( identifier[brain_or_object] , identifier[permission] , identifier[roles] , identifier[acquire] = literal[int] ):
literal[string]
identifier[obj] = identifier[api] . identifier[get_object] ( identifier[brain_or_object] )
identifier[valid_roles] = identifier[get_valid_roles_for] ( identifier[obj] )
identifier[to_grant] = identifier[list] ( identifier[get_roles_for_permission] ( identifier[permission] , identifier[obj] ))
keyword[if] identifier[isinstance] ( identifier[roles] , identifier[basestring] ):
identifier[roles] =[ identifier[roles] ]
keyword[for] identifier[role] keyword[in] identifier[roles] :
keyword[if] identifier[role] keyword[not] keyword[in] identifier[to_grant] :
keyword[if] identifier[role] keyword[not] keyword[in] identifier[valid_roles] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[role] ))
identifier[to_grant] . identifier[append] ( identifier[role] )
identifier[manage_permission_for] ( identifier[obj] , identifier[permission] , identifier[to_grant] , identifier[acquire] = identifier[acquire] ) | def grant_permission_for(brain_or_object, permission, roles, acquire=0):
"""Grant the permission for the object to the defined roles
Code extracted from `IRoleManager.manage_permission`
:param brain_or_object: Catalog brain or object
:param permission: The permission to be granted
:param roles: The roles the permission to be granted to
:param acquire: Flag to acquire the permission
"""
obj = api.get_object(brain_or_object)
valid_roles = get_valid_roles_for(obj)
to_grant = list(get_roles_for_permission(permission, obj))
if isinstance(roles, basestring):
roles = [roles] # depends on [control=['if'], data=[]]
for role in roles:
if role not in to_grant:
if role not in valid_roles:
raise ValueError("The Role '{}' is invalid.".format(role)) # depends on [control=['if'], data=['role']]
# Append the role
to_grant.append(role) # depends on [control=['if'], data=['role', 'to_grant']] # depends on [control=['for'], data=['role']]
manage_permission_for(obj, permission, to_grant, acquire=acquire) |
def deriv(f,c,dx=0.0001):
"""
deriv(f,c,dx) --> float
Returns f'(x), computed as a symmetric difference quotient.
"""
return (f(c+dx)-f(c-dx))/(2*dx) | def function[deriv, parameter[f, c, dx]]:
constant[
deriv(f,c,dx) --> float
Returns f'(x), computed as a symmetric difference quotient.
]
return[binary_operation[binary_operation[call[name[f], parameter[binary_operation[name[c] + name[dx]]]] - call[name[f], parameter[binary_operation[name[c] - name[dx]]]]] / binary_operation[constant[2] * name[dx]]]] | keyword[def] identifier[deriv] ( identifier[f] , identifier[c] , identifier[dx] = literal[int] ):
literal[string]
keyword[return] ( identifier[f] ( identifier[c] + identifier[dx] )- identifier[f] ( identifier[c] - identifier[dx] ))/( literal[int] * identifier[dx] ) | def deriv(f, c, dx=0.0001):
"""
deriv(f,c,dx) --> float
Returns f'(x), computed as a symmetric difference quotient.
"""
return (f(c + dx) - f(c - dx)) / (2 * dx) |
def dispatch_event(event):
""" Dispatch the event being represented by the Event object.
Args:
event: Object holding information about the request to be dispatched to the Optimizely backend.
"""
try:
if event.http_verb == enums.HTTPVerbs.GET:
requests.get(event.url, params=event.params, timeout=REQUEST_TIMEOUT).raise_for_status()
elif event.http_verb == enums.HTTPVerbs.POST:
requests.post(
event.url, data=json.dumps(event.params), headers=event.headers, timeout=REQUEST_TIMEOUT
).raise_for_status()
except request_exception.RequestException as error:
logging.error('Dispatch event failed. Error: %s' % str(error)) | def function[dispatch_event, parameter[event]]:
constant[ Dispatch the event being represented by the Event object.
Args:
event: Object holding information about the request to be dispatched to the Optimizely backend.
]
<ast.Try object at 0x7da18bc72aa0> | keyword[def] identifier[dispatch_event] ( identifier[event] ):
literal[string]
keyword[try] :
keyword[if] identifier[event] . identifier[http_verb] == identifier[enums] . identifier[HTTPVerbs] . identifier[GET] :
identifier[requests] . identifier[get] ( identifier[event] . identifier[url] , identifier[params] = identifier[event] . identifier[params] , identifier[timeout] = identifier[REQUEST_TIMEOUT] ). identifier[raise_for_status] ()
keyword[elif] identifier[event] . identifier[http_verb] == identifier[enums] . identifier[HTTPVerbs] . identifier[POST] :
identifier[requests] . identifier[post] (
identifier[event] . identifier[url] , identifier[data] = identifier[json] . identifier[dumps] ( identifier[event] . identifier[params] ), identifier[headers] = identifier[event] . identifier[headers] , identifier[timeout] = identifier[REQUEST_TIMEOUT]
). identifier[raise_for_status] ()
keyword[except] identifier[request_exception] . identifier[RequestException] keyword[as] identifier[error] :
identifier[logging] . identifier[error] ( literal[string] % identifier[str] ( identifier[error] )) | def dispatch_event(event):
""" Dispatch the event being represented by the Event object.
Args:
event: Object holding information about the request to be dispatched to the Optimizely backend.
"""
try:
if event.http_verb == enums.HTTPVerbs.GET:
requests.get(event.url, params=event.params, timeout=REQUEST_TIMEOUT).raise_for_status() # depends on [control=['if'], data=[]]
elif event.http_verb == enums.HTTPVerbs.POST:
requests.post(event.url, data=json.dumps(event.params), headers=event.headers, timeout=REQUEST_TIMEOUT).raise_for_status() # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except request_exception.RequestException as error:
logging.error('Dispatch event failed. Error: %s' % str(error)) # depends on [control=['except'], data=['error']] |
def unlink_learners(self):
"""
Iterate over each learner and unlink inactive SAP channel learners.
This method iterates over each enterprise learner and unlink learner
from the enterprise if the learner is marked inactive in the related
integrated channel.
"""
sap_inactive_learners = self.client.get_inactive_sap_learners()
enterprise_customer = self.enterprise_configuration.enterprise_customer
if not sap_inactive_learners:
LOGGER.info(
'Enterprise customer {%s} has no SAPSF inactive learners',
enterprise_customer.name
)
return
provider_id = enterprise_customer.identity_provider
tpa_provider = get_identity_provider(provider_id)
if not tpa_provider:
LOGGER.info(
'Enterprise customer {%s} has no associated identity provider',
enterprise_customer.name
)
return None
for sap_inactive_learner in sap_inactive_learners:
social_auth_user = get_user_from_social_auth(tpa_provider, sap_inactive_learner['studentID'])
if not social_auth_user:
continue
try:
# Unlink user email from related Enterprise Customer
EnterpriseCustomerUser.objects.unlink_user(
enterprise_customer=enterprise_customer,
user_email=social_auth_user.email,
)
except (EnterpriseCustomerUser.DoesNotExist, PendingEnterpriseCustomerUser.DoesNotExist):
LOGGER.info(
'Learner with email {%s} is not associated with Enterprise Customer {%s}',
social_auth_user.email,
enterprise_customer.name
) | def function[unlink_learners, parameter[self]]:
constant[
Iterate over each learner and unlink inactive SAP channel learners.
This method iterates over each enterprise learner and unlink learner
from the enterprise if the learner is marked inactive in the related
integrated channel.
]
variable[sap_inactive_learners] assign[=] call[name[self].client.get_inactive_sap_learners, parameter[]]
variable[enterprise_customer] assign[=] name[self].enterprise_configuration.enterprise_customer
if <ast.UnaryOp object at 0x7da1b013d510> begin[:]
call[name[LOGGER].info, parameter[constant[Enterprise customer {%s} has no SAPSF inactive learners], name[enterprise_customer].name]]
return[None]
variable[provider_id] assign[=] name[enterprise_customer].identity_provider
variable[tpa_provider] assign[=] call[name[get_identity_provider], parameter[name[provider_id]]]
if <ast.UnaryOp object at 0x7da18f09dd50> begin[:]
call[name[LOGGER].info, parameter[constant[Enterprise customer {%s} has no associated identity provider], name[enterprise_customer].name]]
return[constant[None]]
for taget[name[sap_inactive_learner]] in starred[name[sap_inactive_learners]] begin[:]
variable[social_auth_user] assign[=] call[name[get_user_from_social_auth], parameter[name[tpa_provider], call[name[sap_inactive_learner]][constant[studentID]]]]
if <ast.UnaryOp object at 0x7da18f09d270> begin[:]
continue
<ast.Try object at 0x7da18f09e1d0> | keyword[def] identifier[unlink_learners] ( identifier[self] ):
literal[string]
identifier[sap_inactive_learners] = identifier[self] . identifier[client] . identifier[get_inactive_sap_learners] ()
identifier[enterprise_customer] = identifier[self] . identifier[enterprise_configuration] . identifier[enterprise_customer]
keyword[if] keyword[not] identifier[sap_inactive_learners] :
identifier[LOGGER] . identifier[info] (
literal[string] ,
identifier[enterprise_customer] . identifier[name]
)
keyword[return]
identifier[provider_id] = identifier[enterprise_customer] . identifier[identity_provider]
identifier[tpa_provider] = identifier[get_identity_provider] ( identifier[provider_id] )
keyword[if] keyword[not] identifier[tpa_provider] :
identifier[LOGGER] . identifier[info] (
literal[string] ,
identifier[enterprise_customer] . identifier[name]
)
keyword[return] keyword[None]
keyword[for] identifier[sap_inactive_learner] keyword[in] identifier[sap_inactive_learners] :
identifier[social_auth_user] = identifier[get_user_from_social_auth] ( identifier[tpa_provider] , identifier[sap_inactive_learner] [ literal[string] ])
keyword[if] keyword[not] identifier[social_auth_user] :
keyword[continue]
keyword[try] :
identifier[EnterpriseCustomerUser] . identifier[objects] . identifier[unlink_user] (
identifier[enterprise_customer] = identifier[enterprise_customer] ,
identifier[user_email] = identifier[social_auth_user] . identifier[email] ,
)
keyword[except] ( identifier[EnterpriseCustomerUser] . identifier[DoesNotExist] , identifier[PendingEnterpriseCustomerUser] . identifier[DoesNotExist] ):
identifier[LOGGER] . identifier[info] (
literal[string] ,
identifier[social_auth_user] . identifier[email] ,
identifier[enterprise_customer] . identifier[name]
) | def unlink_learners(self):
"""
Iterate over each learner and unlink inactive SAP channel learners.
This method iterates over each enterprise learner and unlink learner
from the enterprise if the learner is marked inactive in the related
integrated channel.
"""
sap_inactive_learners = self.client.get_inactive_sap_learners()
enterprise_customer = self.enterprise_configuration.enterprise_customer
if not sap_inactive_learners:
LOGGER.info('Enterprise customer {%s} has no SAPSF inactive learners', enterprise_customer.name)
return # depends on [control=['if'], data=[]]
provider_id = enterprise_customer.identity_provider
tpa_provider = get_identity_provider(provider_id)
if not tpa_provider:
LOGGER.info('Enterprise customer {%s} has no associated identity provider', enterprise_customer.name)
return None # depends on [control=['if'], data=[]]
for sap_inactive_learner in sap_inactive_learners:
social_auth_user = get_user_from_social_auth(tpa_provider, sap_inactive_learner['studentID'])
if not social_auth_user:
continue # depends on [control=['if'], data=[]]
try:
# Unlink user email from related Enterprise Customer
EnterpriseCustomerUser.objects.unlink_user(enterprise_customer=enterprise_customer, user_email=social_auth_user.email) # depends on [control=['try'], data=[]]
except (EnterpriseCustomerUser.DoesNotExist, PendingEnterpriseCustomerUser.DoesNotExist):
LOGGER.info('Learner with email {%s} is not associated with Enterprise Customer {%s}', social_auth_user.email, enterprise_customer.name) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['sap_inactive_learner']] |
def split(self, meta=False):
"""
split disconnected structure to connected substructures
:param meta: copy metadata to each substructure
:return: list of substructures
"""
return [self.substructure(c, meta, False) for c in connected_components(self)] | def function[split, parameter[self, meta]]:
constant[
split disconnected structure to connected substructures
:param meta: copy metadata to each substructure
:return: list of substructures
]
return[<ast.ListComp object at 0x7da20c6c4a30>] | keyword[def] identifier[split] ( identifier[self] , identifier[meta] = keyword[False] ):
literal[string]
keyword[return] [ identifier[self] . identifier[substructure] ( identifier[c] , identifier[meta] , keyword[False] ) keyword[for] identifier[c] keyword[in] identifier[connected_components] ( identifier[self] )] | def split(self, meta=False):
"""
split disconnected structure to connected substructures
:param meta: copy metadata to each substructure
:return: list of substructures
"""
return [self.substructure(c, meta, False) for c in connected_components(self)] |
def capture_payment(request):
"""
Capture the payment for a basket and create an order
request.data should contain:
'address': Dict with the following fields:
shipping_name
shipping_address_line1
shipping_address_city
shipping_address_zip
shipping_address_country
billing_name
billing_address_line1
billing_address_city
billing_address_zip
billing_address_country
'email': Email address of the customer
'shipping': The shipping rate (in the sites' currency)
"""
# get request data
address = request.data['address']
email = request.data.get('email', None)
shipping_option = request.data.get('shipping_option', None)
# Capture the payment
order = create_order(
email,
request,
addresses=address,
shipping_option=shipping_option,
capture_payment=True
)
response = Response(data={"order_id": order.id},
status=status.HTTP_201_CREATED)
return response | def function[capture_payment, parameter[request]]:
constant[
Capture the payment for a basket and create an order
request.data should contain:
'address': Dict with the following fields:
shipping_name
shipping_address_line1
shipping_address_city
shipping_address_zip
shipping_address_country
billing_name
billing_address_line1
billing_address_city
billing_address_zip
billing_address_country
'email': Email address of the customer
'shipping': The shipping rate (in the sites' currency)
]
variable[address] assign[=] call[name[request].data][constant[address]]
variable[email] assign[=] call[name[request].data.get, parameter[constant[email], constant[None]]]
variable[shipping_option] assign[=] call[name[request].data.get, parameter[constant[shipping_option], constant[None]]]
variable[order] assign[=] call[name[create_order], parameter[name[email], name[request]]]
variable[response] assign[=] call[name[Response], parameter[]]
return[name[response]] | keyword[def] identifier[capture_payment] ( identifier[request] ):
literal[string]
identifier[address] = identifier[request] . identifier[data] [ literal[string] ]
identifier[email] = identifier[request] . identifier[data] . identifier[get] ( literal[string] , keyword[None] )
identifier[shipping_option] = identifier[request] . identifier[data] . identifier[get] ( literal[string] , keyword[None] )
identifier[order] = identifier[create_order] (
identifier[email] ,
identifier[request] ,
identifier[addresses] = identifier[address] ,
identifier[shipping_option] = identifier[shipping_option] ,
identifier[capture_payment] = keyword[True]
)
identifier[response] = identifier[Response] ( identifier[data] ={ literal[string] : identifier[order] . identifier[id] },
identifier[status] = identifier[status] . identifier[HTTP_201_CREATED] )
keyword[return] identifier[response] | def capture_payment(request):
"""
Capture the payment for a basket and create an order
request.data should contain:
'address': Dict with the following fields:
shipping_name
shipping_address_line1
shipping_address_city
shipping_address_zip
shipping_address_country
billing_name
billing_address_line1
billing_address_city
billing_address_zip
billing_address_country
'email': Email address of the customer
'shipping': The shipping rate (in the sites' currency)
"""
# get request data
address = request.data['address']
email = request.data.get('email', None)
shipping_option = request.data.get('shipping_option', None)
# Capture the payment
order = create_order(email, request, addresses=address, shipping_option=shipping_option, capture_payment=True)
response = Response(data={'order_id': order.id}, status=status.HTTP_201_CREATED)
return response |
def scatter(self, x, y, s=None, c=None, **kwds):
"""
Create a scatter plot with varying marker point size and color.
The coordinates of each point are defined by two dataframe columns and
filled circles are used to represent each point. This kind of plot is
useful to see complex correlations between two variables. Points could
be for instance natural 2D coordinates like longitude and latitude in
a map or, in general, any pair of metrics that can be plotted against
each other.
Parameters
----------
x : int or str
The column name or column position to be used as horizontal
coordinates for each point.
y : int or str
The column name or column position to be used as vertical
coordinates for each point.
s : scalar or array_like, optional
The size of each point. Possible values are:
- A single scalar so all points have the same size.
- A sequence of scalars, which will be used for each point's size
recursively. For instance, when passing [2,14] all points size
will be either 2 or 14, alternatively.
c : str, int or array_like, optional
The color of each point. Possible values are:
- A single color string referred to by name, RGB or RGBA code,
for instance 'red' or '#a98d19'.
- A sequence of color strings referred to by name, RGB or RGBA
code, which will be used for each point's color recursively. For
instance ['green','yellow'] all points will be filled in green or
yellow, alternatively.
- A column name or position whose values will be used to color the
marker points according to a colormap.
**kwds
Keyword arguments to pass on to :meth:`DataFrame.plot`.
Returns
-------
:class:`matplotlib.axes.Axes` or numpy.ndarray of them
See Also
--------
matplotlib.pyplot.scatter : Scatter plot using multiple input data
formats.
Examples
--------
Let's see how to draw a scatter plot using coordinates from the values
in a DataFrame's columns.
.. plot::
:context: close-figs
>>> df = pd.DataFrame([[5.1, 3.5, 0], [4.9, 3.0, 0], [7.0, 3.2, 1],
... [6.4, 3.2, 1], [5.9, 3.0, 2]],
... columns=['length', 'width', 'species'])
>>> ax1 = df.plot.scatter(x='length',
... y='width',
... c='DarkBlue')
And now with the color determined by a column as well.
.. plot::
:context: close-figs
>>> ax2 = df.plot.scatter(x='length',
... y='width',
... c='species',
... colormap='viridis')
"""
return self(kind='scatter', x=x, y=y, c=c, s=s, **kwds) | def function[scatter, parameter[self, x, y, s, c]]:
constant[
Create a scatter plot with varying marker point size and color.
The coordinates of each point are defined by two dataframe columns and
filled circles are used to represent each point. This kind of plot is
useful to see complex correlations between two variables. Points could
be for instance natural 2D coordinates like longitude and latitude in
a map or, in general, any pair of metrics that can be plotted against
each other.
Parameters
----------
x : int or str
The column name or column position to be used as horizontal
coordinates for each point.
y : int or str
The column name or column position to be used as vertical
coordinates for each point.
s : scalar or array_like, optional
The size of each point. Possible values are:
- A single scalar so all points have the same size.
- A sequence of scalars, which will be used for each point's size
recursively. For instance, when passing [2,14] all points size
will be either 2 or 14, alternatively.
c : str, int or array_like, optional
The color of each point. Possible values are:
- A single color string referred to by name, RGB or RGBA code,
for instance 'red' or '#a98d19'.
- A sequence of color strings referred to by name, RGB or RGBA
code, which will be used for each point's color recursively. For
instance ['green','yellow'] all points will be filled in green or
yellow, alternatively.
- A column name or position whose values will be used to color the
marker points according to a colormap.
**kwds
Keyword arguments to pass on to :meth:`DataFrame.plot`.
Returns
-------
:class:`matplotlib.axes.Axes` or numpy.ndarray of them
See Also
--------
matplotlib.pyplot.scatter : Scatter plot using multiple input data
formats.
Examples
--------
Let's see how to draw a scatter plot using coordinates from the values
in a DataFrame's columns.
.. plot::
:context: close-figs
>>> df = pd.DataFrame([[5.1, 3.5, 0], [4.9, 3.0, 0], [7.0, 3.2, 1],
... [6.4, 3.2, 1], [5.9, 3.0, 2]],
... columns=['length', 'width', 'species'])
>>> ax1 = df.plot.scatter(x='length',
... y='width',
... c='DarkBlue')
And now with the color determined by a column as well.
.. plot::
:context: close-figs
>>> ax2 = df.plot.scatter(x='length',
... y='width',
... c='species',
... colormap='viridis')
]
return[call[name[self], parameter[]]] | keyword[def] identifier[scatter] ( identifier[self] , identifier[x] , identifier[y] , identifier[s] = keyword[None] , identifier[c] = keyword[None] ,** identifier[kwds] ):
literal[string]
keyword[return] identifier[self] ( identifier[kind] = literal[string] , identifier[x] = identifier[x] , identifier[y] = identifier[y] , identifier[c] = identifier[c] , identifier[s] = identifier[s] ,** identifier[kwds] ) | def scatter(self, x, y, s=None, c=None, **kwds):
"""
Create a scatter plot with varying marker point size and color.
The coordinates of each point are defined by two dataframe columns and
filled circles are used to represent each point. This kind of plot is
useful to see complex correlations between two variables. Points could
be for instance natural 2D coordinates like longitude and latitude in
a map or, in general, any pair of metrics that can be plotted against
each other.
Parameters
----------
x : int or str
The column name or column position to be used as horizontal
coordinates for each point.
y : int or str
The column name or column position to be used as vertical
coordinates for each point.
s : scalar or array_like, optional
The size of each point. Possible values are:
- A single scalar so all points have the same size.
- A sequence of scalars, which will be used for each point's size
recursively. For instance, when passing [2,14] all points size
will be either 2 or 14, alternatively.
c : str, int or array_like, optional
The color of each point. Possible values are:
- A single color string referred to by name, RGB or RGBA code,
for instance 'red' or '#a98d19'.
- A sequence of color strings referred to by name, RGB or RGBA
code, which will be used for each point's color recursively. For
instance ['green','yellow'] all points will be filled in green or
yellow, alternatively.
- A column name or position whose values will be used to color the
marker points according to a colormap.
**kwds
Keyword arguments to pass on to :meth:`DataFrame.plot`.
Returns
-------
:class:`matplotlib.axes.Axes` or numpy.ndarray of them
See Also
--------
matplotlib.pyplot.scatter : Scatter plot using multiple input data
formats.
Examples
--------
Let's see how to draw a scatter plot using coordinates from the values
in a DataFrame's columns.
.. plot::
:context: close-figs
>>> df = pd.DataFrame([[5.1, 3.5, 0], [4.9, 3.0, 0], [7.0, 3.2, 1],
... [6.4, 3.2, 1], [5.9, 3.0, 2]],
... columns=['length', 'width', 'species'])
>>> ax1 = df.plot.scatter(x='length',
... y='width',
... c='DarkBlue')
And now with the color determined by a column as well.
.. plot::
:context: close-figs
>>> ax2 = df.plot.scatter(x='length',
... y='width',
... c='species',
... colormap='viridis')
"""
return self(kind='scatter', x=x, y=y, c=c, s=s, **kwds) |
def validate_qparams(self):
"""
Check if the keys specified by the user in qparams are supported.
Raise:
`ValueError` if errors.
"""
# No validation for ShellAdapter.
if isinstance(self, ShellAdapter): return
# Parse the template so that we know the list of supported options.
err_msg = ""
for param in self.qparams:
if param not in self.supported_qparams:
err_msg += "Unsupported QUEUE parameter name %s\n" % param
err_msg += "Supported parameters:\n"
for param_sup in self.supported_qparams:
err_msg += " %s \n" % param_sup
if err_msg:
raise ValueError(err_msg) | def function[validate_qparams, parameter[self]]:
constant[
Check if the keys specified by the user in qparams are supported.
Raise:
`ValueError` if errors.
]
if call[name[isinstance], parameter[name[self], name[ShellAdapter]]] begin[:]
return[None]
variable[err_msg] assign[=] constant[]
for taget[name[param]] in starred[name[self].qparams] begin[:]
if compare[name[param] <ast.NotIn object at 0x7da2590d7190> name[self].supported_qparams] begin[:]
<ast.AugAssign object at 0x7da207f00640>
<ast.AugAssign object at 0x7da207f02c20>
for taget[name[param_sup]] in starred[name[self].supported_qparams] begin[:]
<ast.AugAssign object at 0x7da207f03100>
if name[err_msg] begin[:]
<ast.Raise object at 0x7da207f03730> | keyword[def] identifier[validate_qparams] ( identifier[self] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[self] , identifier[ShellAdapter] ): keyword[return]
identifier[err_msg] = literal[string]
keyword[for] identifier[param] keyword[in] identifier[self] . identifier[qparams] :
keyword[if] identifier[param] keyword[not] keyword[in] identifier[self] . identifier[supported_qparams] :
identifier[err_msg] += literal[string] % identifier[param]
identifier[err_msg] += literal[string]
keyword[for] identifier[param_sup] keyword[in] identifier[self] . identifier[supported_qparams] :
identifier[err_msg] += literal[string] % identifier[param_sup]
keyword[if] identifier[err_msg] :
keyword[raise] identifier[ValueError] ( identifier[err_msg] ) | def validate_qparams(self):
"""
Check if the keys specified by the user in qparams are supported.
Raise:
`ValueError` if errors.
"""
# No validation for ShellAdapter.
if isinstance(self, ShellAdapter):
return # depends on [control=['if'], data=[]]
# Parse the template so that we know the list of supported options.
err_msg = ''
for param in self.qparams:
if param not in self.supported_qparams:
err_msg += 'Unsupported QUEUE parameter name %s\n' % param
err_msg += 'Supported parameters:\n'
for param_sup in self.supported_qparams:
err_msg += ' %s \n' % param_sup # depends on [control=['for'], data=['param_sup']] # depends on [control=['if'], data=['param']] # depends on [control=['for'], data=['param']]
if err_msg:
raise ValueError(err_msg) # depends on [control=['if'], data=[]] |
def make_fits_keys_dict(keys):
"""
Returns a dictionary to translate to unique FITS header keys up to 8 characters long
This is similar to Windows making up 8-character names for filenames that
are longer than this
"The keyword names may be up to 8 characters long and can only contain
uppercase letters A to Z, the digits 0 to 9, the hyphen, and the underscore
character." [1]
Arguments:
keys -- list of strings
Returns:
dictionary whose keys are the elements in the "keys" argument, and whose
values are made-up uppercase names
References:
[1] http://fits.gsfc.nasa.gov/fits_primer.html
"""
key_dict = {}
new_keys = []
for key in keys:
# converts to valid FITS key according to reference [1] above
fits_key = valid_fits_key(key)
num_digits = 1
i = -1
i_max = 9
while fits_key in new_keys:
i += 1
if i > i_max:
i = 0
i_max = i_max * 10 + 9
num_digits += 1
fits_key = fits_key[:(8 - num_digits)] + (("%0{0:d}d".format(num_digits)) % i)
key_dict[key] = fits_key
new_keys.append(fits_key)
return key_dict | def function[make_fits_keys_dict, parameter[keys]]:
constant[
Returns a dictionary to translate to unique FITS header keys up to 8 characters long
This is similar to Windows making up 8-character names for filenames that
are longer than this
"The keyword names may be up to 8 characters long and can only contain
uppercase letters A to Z, the digits 0 to 9, the hyphen, and the underscore
character." [1]
Arguments:
keys -- list of strings
Returns:
dictionary whose keys are the elements in the "keys" argument, and whose
values are made-up uppercase names
References:
[1] http://fits.gsfc.nasa.gov/fits_primer.html
]
variable[key_dict] assign[=] dictionary[[], []]
variable[new_keys] assign[=] list[[]]
for taget[name[key]] in starred[name[keys]] begin[:]
variable[fits_key] assign[=] call[name[valid_fits_key], parameter[name[key]]]
variable[num_digits] assign[=] constant[1]
variable[i] assign[=] <ast.UnaryOp object at 0x7da1b287bf40>
variable[i_max] assign[=] constant[9]
while compare[name[fits_key] in name[new_keys]] begin[:]
<ast.AugAssign object at 0x7da1b2879d80>
if compare[name[i] greater[>] name[i_max]] begin[:]
variable[i] assign[=] constant[0]
variable[i_max] assign[=] binary_operation[binary_operation[name[i_max] * constant[10]] + constant[9]]
<ast.AugAssign object at 0x7da1b2879d50>
variable[fits_key] assign[=] binary_operation[call[name[fits_key]][<ast.Slice object at 0x7da1b287a950>] + binary_operation[call[constant[%0{0:d}d].format, parameter[name[num_digits]]] <ast.Mod object at 0x7da2590d6920> name[i]]]
call[name[key_dict]][name[key]] assign[=] name[fits_key]
call[name[new_keys].append, parameter[name[fits_key]]]
return[name[key_dict]] | keyword[def] identifier[make_fits_keys_dict] ( identifier[keys] ):
literal[string]
identifier[key_dict] ={}
identifier[new_keys] =[]
keyword[for] identifier[key] keyword[in] identifier[keys] :
identifier[fits_key] = identifier[valid_fits_key] ( identifier[key] )
identifier[num_digits] = literal[int]
identifier[i] =- literal[int]
identifier[i_max] = literal[int]
keyword[while] identifier[fits_key] keyword[in] identifier[new_keys] :
identifier[i] += literal[int]
keyword[if] identifier[i] > identifier[i_max] :
identifier[i] = literal[int]
identifier[i_max] = identifier[i_max] * literal[int] + literal[int]
identifier[num_digits] += literal[int]
identifier[fits_key] = identifier[fits_key] [:( literal[int] - identifier[num_digits] )]+(( literal[string] . identifier[format] ( identifier[num_digits] ))% identifier[i] )
identifier[key_dict] [ identifier[key] ]= identifier[fits_key]
identifier[new_keys] . identifier[append] ( identifier[fits_key] )
keyword[return] identifier[key_dict] | def make_fits_keys_dict(keys):
"""
Returns a dictionary to translate to unique FITS header keys up to 8 characters long
This is similar to Windows making up 8-character names for filenames that
are longer than this
"The keyword names may be up to 8 characters long and can only contain
uppercase letters A to Z, the digits 0 to 9, the hyphen, and the underscore
character." [1]
Arguments:
keys -- list of strings
Returns:
dictionary whose keys are the elements in the "keys" argument, and whose
values are made-up uppercase names
References:
[1] http://fits.gsfc.nasa.gov/fits_primer.html
"""
key_dict = {}
new_keys = []
for key in keys: # converts to valid FITS key according to reference [1] above
fits_key = valid_fits_key(key)
num_digits = 1
i = -1
i_max = 9
while fits_key in new_keys:
i += 1
if i > i_max:
i = 0
i_max = i_max * 10 + 9
num_digits += 1 # depends on [control=['if'], data=['i', 'i_max']]
fits_key = fits_key[:8 - num_digits] + '%0{0:d}d'.format(num_digits) % i # depends on [control=['while'], data=['fits_key']]
key_dict[key] = fits_key
new_keys.append(fits_key) # depends on [control=['for'], data=['key']]
return key_dict |
def _build_youtube_dl_coprocessor(cls, session: AppSession, proxy_port: int):
'''Build youtube-dl coprocessor.'''
# Test early for executable
wpull.processor.coprocessor.youtubedl.get_version(session.args.youtube_dl_exe)
coprocessor = session.factory.new(
'YoutubeDlCoprocessor',
session.args.youtube_dl_exe,
(session.args.proxy_server_address, proxy_port),
root_path=session.args.directory_prefix,
user_agent=session.args.user_agent or session.default_user_agent,
warc_recorder=session.factory.get('WARCRecorder'),
inet_family=session.args.inet_family,
# Proxy will always present a invalid MITM cert
#check_certificate=session.args.check_certificate
check_certificate=False
)
return coprocessor | def function[_build_youtube_dl_coprocessor, parameter[cls, session, proxy_port]]:
constant[Build youtube-dl coprocessor.]
call[name[wpull].processor.coprocessor.youtubedl.get_version, parameter[name[session].args.youtube_dl_exe]]
variable[coprocessor] assign[=] call[name[session].factory.new, parameter[constant[YoutubeDlCoprocessor], name[session].args.youtube_dl_exe, tuple[[<ast.Attribute object at 0x7da204344c70>, <ast.Name object at 0x7da204347af0>]]]]
return[name[coprocessor]] | keyword[def] identifier[_build_youtube_dl_coprocessor] ( identifier[cls] , identifier[session] : identifier[AppSession] , identifier[proxy_port] : identifier[int] ):
literal[string]
identifier[wpull] . identifier[processor] . identifier[coprocessor] . identifier[youtubedl] . identifier[get_version] ( identifier[session] . identifier[args] . identifier[youtube_dl_exe] )
identifier[coprocessor] = identifier[session] . identifier[factory] . identifier[new] (
literal[string] ,
identifier[session] . identifier[args] . identifier[youtube_dl_exe] ,
( identifier[session] . identifier[args] . identifier[proxy_server_address] , identifier[proxy_port] ),
identifier[root_path] = identifier[session] . identifier[args] . identifier[directory_prefix] ,
identifier[user_agent] = identifier[session] . identifier[args] . identifier[user_agent] keyword[or] identifier[session] . identifier[default_user_agent] ,
identifier[warc_recorder] = identifier[session] . identifier[factory] . identifier[get] ( literal[string] ),
identifier[inet_family] = identifier[session] . identifier[args] . identifier[inet_family] ,
identifier[check_certificate] = keyword[False]
)
keyword[return] identifier[coprocessor] | def _build_youtube_dl_coprocessor(cls, session: AppSession, proxy_port: int):
"""Build youtube-dl coprocessor."""
# Test early for executable
wpull.processor.coprocessor.youtubedl.get_version(session.args.youtube_dl_exe)
# Proxy will always present a invalid MITM cert
#check_certificate=session.args.check_certificate
coprocessor = session.factory.new('YoutubeDlCoprocessor', session.args.youtube_dl_exe, (session.args.proxy_server_address, proxy_port), root_path=session.args.directory_prefix, user_agent=session.args.user_agent or session.default_user_agent, warc_recorder=session.factory.get('WARCRecorder'), inet_family=session.args.inet_family, check_certificate=False)
return coprocessor |
def __check_for_extra_arguments(self, args_required, args_allowed):
"""
Report an error in case any extra arguments are detected.
Does nothing if reporting extra arguments as exceptions has not been
enabled.
May only be called after the argument processing has been completed.
"""
assert not self.active()
if not self.__extra_parameter_errors:
return
if self.__kwargs:
param_name = self.__kwargs.keys()[0]
if param_name in self.__params_with_arguments:
msg = "got multiple values for parameter '%s'"
else:
msg = "got an unexpected keyword argument '%s'"
self.__error(msg % (param_name,))
if self.__args:
def plural_suffix(count):
if count == 1:
return ""
return "s"
def plural_was_were(count):
if count == 1:
return "was"
return "were"
expected = args_required
if args_required != args_allowed:
expected = "%d to %d" % (args_required, args_allowed)
given = self.__args_count
msg_parts = ["takes %s positional argument" % (expected,),
plural_suffix(expected), " but %d " % (given,),
plural_was_were(given), " given"]
self.__error("".join(msg_parts)) | def function[__check_for_extra_arguments, parameter[self, args_required, args_allowed]]:
constant[
Report an error in case any extra arguments are detected.
Does nothing if reporting extra arguments as exceptions has not been
enabled.
May only be called after the argument processing has been completed.
]
assert[<ast.UnaryOp object at 0x7da18f720580>]
if <ast.UnaryOp object at 0x7da18f721bd0> begin[:]
return[None]
if name[self].__kwargs begin[:]
variable[param_name] assign[=] call[call[name[self].__kwargs.keys, parameter[]]][constant[0]]
if compare[name[param_name] in name[self].__params_with_arguments] begin[:]
variable[msg] assign[=] constant[got multiple values for parameter '%s']
call[name[self].__error, parameter[binary_operation[name[msg] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c9938e0>]]]]]
if name[self].__args begin[:]
def function[plural_suffix, parameter[count]]:
if compare[name[count] equal[==] constant[1]] begin[:]
return[constant[]]
return[constant[s]]
def function[plural_was_were, parameter[count]]:
if compare[name[count] equal[==] constant[1]] begin[:]
return[constant[was]]
return[constant[were]]
variable[expected] assign[=] name[args_required]
if compare[name[args_required] not_equal[!=] name[args_allowed]] begin[:]
variable[expected] assign[=] binary_operation[constant[%d to %d] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c9917e0>, <ast.Name object at 0x7da20c993d90>]]]
variable[given] assign[=] name[self].__args_count
variable[msg_parts] assign[=] list[[<ast.BinOp object at 0x7da20c9900a0>, <ast.Call object at 0x7da20c992fb0>, <ast.BinOp object at 0x7da20c991180>, <ast.Call object at 0x7da20c993ca0>, <ast.Constant object at 0x7da20c992b00>]]
call[name[self].__error, parameter[call[constant[].join, parameter[name[msg_parts]]]]] | keyword[def] identifier[__check_for_extra_arguments] ( identifier[self] , identifier[args_required] , identifier[args_allowed] ):
literal[string]
keyword[assert] keyword[not] identifier[self] . identifier[active] ()
keyword[if] keyword[not] identifier[self] . identifier[__extra_parameter_errors] :
keyword[return]
keyword[if] identifier[self] . identifier[__kwargs] :
identifier[param_name] = identifier[self] . identifier[__kwargs] . identifier[keys] ()[ literal[int] ]
keyword[if] identifier[param_name] keyword[in] identifier[self] . identifier[__params_with_arguments] :
identifier[msg] = literal[string]
keyword[else] :
identifier[msg] = literal[string]
identifier[self] . identifier[__error] ( identifier[msg] %( identifier[param_name] ,))
keyword[if] identifier[self] . identifier[__args] :
keyword[def] identifier[plural_suffix] ( identifier[count] ):
keyword[if] identifier[count] == literal[int] :
keyword[return] literal[string]
keyword[return] literal[string]
keyword[def] identifier[plural_was_were] ( identifier[count] ):
keyword[if] identifier[count] == literal[int] :
keyword[return] literal[string]
keyword[return] literal[string]
identifier[expected] = identifier[args_required]
keyword[if] identifier[args_required] != identifier[args_allowed] :
identifier[expected] = literal[string] %( identifier[args_required] , identifier[args_allowed] )
identifier[given] = identifier[self] . identifier[__args_count]
identifier[msg_parts] =[ literal[string] %( identifier[expected] ,),
identifier[plural_suffix] ( identifier[expected] ), literal[string] %( identifier[given] ,),
identifier[plural_was_were] ( identifier[given] ), literal[string] ]
identifier[self] . identifier[__error] ( literal[string] . identifier[join] ( identifier[msg_parts] )) | def __check_for_extra_arguments(self, args_required, args_allowed):
"""
Report an error in case any extra arguments are detected.
Does nothing if reporting extra arguments as exceptions has not been
enabled.
May only be called after the argument processing has been completed.
"""
assert not self.active()
if not self.__extra_parameter_errors:
return # depends on [control=['if'], data=[]]
if self.__kwargs:
param_name = self.__kwargs.keys()[0]
if param_name in self.__params_with_arguments:
msg = "got multiple values for parameter '%s'" # depends on [control=['if'], data=[]]
else:
msg = "got an unexpected keyword argument '%s'"
self.__error(msg % (param_name,)) # depends on [control=['if'], data=[]]
if self.__args:
def plural_suffix(count):
if count == 1:
return '' # depends on [control=['if'], data=[]]
return 's'
def plural_was_were(count):
if count == 1:
return 'was' # depends on [control=['if'], data=[]]
return 'were'
expected = args_required
if args_required != args_allowed:
expected = '%d to %d' % (args_required, args_allowed) # depends on [control=['if'], data=['args_required', 'args_allowed']]
given = self.__args_count
msg_parts = ['takes %s positional argument' % (expected,), plural_suffix(expected), ' but %d ' % (given,), plural_was_were(given), ' given']
self.__error(''.join(msg_parts)) # depends on [control=['if'], data=[]] |
def data64_send(self, type, len, data, force_mavlink1=False):
'''
Data packet, size 64
type : data type (uint8_t)
len : data length (uint8_t)
data : raw data (uint8_t)
'''
return self.send(self.data64_encode(type, len, data), force_mavlink1=force_mavlink1) | def function[data64_send, parameter[self, type, len, data, force_mavlink1]]:
constant[
Data packet, size 64
type : data type (uint8_t)
len : data length (uint8_t)
data : raw data (uint8_t)
]
return[call[name[self].send, parameter[call[name[self].data64_encode, parameter[name[type], name[len], name[data]]]]]] | keyword[def] identifier[data64_send] ( identifier[self] , identifier[type] , identifier[len] , identifier[data] , identifier[force_mavlink1] = keyword[False] ):
literal[string]
keyword[return] identifier[self] . identifier[send] ( identifier[self] . identifier[data64_encode] ( identifier[type] , identifier[len] , identifier[data] ), identifier[force_mavlink1] = identifier[force_mavlink1] ) | def data64_send(self, type, len, data, force_mavlink1=False):
"""
Data packet, size 64
type : data type (uint8_t)
len : data length (uint8_t)
data : raw data (uint8_t)
"""
return self.send(self.data64_encode(type, len, data), force_mavlink1=force_mavlink1) |
def execstr(self, local_name):
"""returns a string which when evaluated will
add the stored variables to the current namespace
localname is the name of the variable in the current scope
* use locals().update(dyn.to_dict()) instead
"""
execstr = ''
for (key, val) in six.iteritems(self.__dict__):
if key not in self._printable_exclude:
execstr += key + ' = ' + local_name + '.' + key + '\n'
return execstr | def function[execstr, parameter[self, local_name]]:
constant[returns a string which when evaluated will
add the stored variables to the current namespace
localname is the name of the variable in the current scope
* use locals().update(dyn.to_dict()) instead
]
variable[execstr] assign[=] constant[]
for taget[tuple[[<ast.Name object at 0x7da1b253b9a0>, <ast.Name object at 0x7da1b253a290>]]] in starred[call[name[six].iteritems, parameter[name[self].__dict__]]] begin[:]
if compare[name[key] <ast.NotIn object at 0x7da2590d7190> name[self]._printable_exclude] begin[:]
<ast.AugAssign object at 0x7da1b24b6710>
return[name[execstr]] | keyword[def] identifier[execstr] ( identifier[self] , identifier[local_name] ):
literal[string]
identifier[execstr] = literal[string]
keyword[for] ( identifier[key] , identifier[val] ) keyword[in] identifier[six] . identifier[iteritems] ( identifier[self] . identifier[__dict__] ):
keyword[if] identifier[key] keyword[not] keyword[in] identifier[self] . identifier[_printable_exclude] :
identifier[execstr] += identifier[key] + literal[string] + identifier[local_name] + literal[string] + identifier[key] + literal[string]
keyword[return] identifier[execstr] | def execstr(self, local_name):
"""returns a string which when evaluated will
add the stored variables to the current namespace
localname is the name of the variable in the current scope
* use locals().update(dyn.to_dict()) instead
"""
execstr = ''
for (key, val) in six.iteritems(self.__dict__):
if key not in self._printable_exclude:
execstr += key + ' = ' + local_name + '.' + key + '\n' # depends on [control=['if'], data=['key']] # depends on [control=['for'], data=[]]
return execstr |
def poke(self, context):
"""
Check for message on subscribed queue and write to xcom the message with key ``messages``
:param context: the context object
:type context: dict
:return: ``True`` if message is available or ``False``
"""
sqs_hook = SQSHook(aws_conn_id=self.aws_conn_id)
sqs_conn = sqs_hook.get_conn()
self.log.info('SQSSensor checking for message on queue: %s', self.sqs_queue)
messages = sqs_conn.receive_message(QueueUrl=self.sqs_queue,
MaxNumberOfMessages=self.max_messages,
WaitTimeSeconds=self.wait_time_seconds)
self.log.info("reveived message %s", str(messages))
if 'Messages' in messages and len(messages['Messages']) > 0:
entries = [{'Id': message['MessageId'], 'ReceiptHandle': message['ReceiptHandle']}
for message in messages['Messages']]
result = sqs_conn.delete_message_batch(QueueUrl=self.sqs_queue,
Entries=entries)
if 'Successful' in result:
context['ti'].xcom_push(key='messages', value=messages)
return True
else:
raise AirflowException(
'Delete SQS Messages failed ' + str(result) + ' for messages ' + str(messages))
return False | def function[poke, parameter[self, context]]:
constant[
Check for message on subscribed queue and write to xcom the message with key ``messages``
:param context: the context object
:type context: dict
:return: ``True`` if message is available or ``False``
]
variable[sqs_hook] assign[=] call[name[SQSHook], parameter[]]
variable[sqs_conn] assign[=] call[name[sqs_hook].get_conn, parameter[]]
call[name[self].log.info, parameter[constant[SQSSensor checking for message on queue: %s], name[self].sqs_queue]]
variable[messages] assign[=] call[name[sqs_conn].receive_message, parameter[]]
call[name[self].log.info, parameter[constant[reveived message %s], call[name[str], parameter[name[messages]]]]]
if <ast.BoolOp object at 0x7da1b05bc670> begin[:]
variable[entries] assign[=] <ast.ListComp object at 0x7da1b05be620>
variable[result] assign[=] call[name[sqs_conn].delete_message_batch, parameter[]]
if compare[constant[Successful] in name[result]] begin[:]
call[call[name[context]][constant[ti]].xcom_push, parameter[]]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[poke] ( identifier[self] , identifier[context] ):
literal[string]
identifier[sqs_hook] = identifier[SQSHook] ( identifier[aws_conn_id] = identifier[self] . identifier[aws_conn_id] )
identifier[sqs_conn] = identifier[sqs_hook] . identifier[get_conn] ()
identifier[self] . identifier[log] . identifier[info] ( literal[string] , identifier[self] . identifier[sqs_queue] )
identifier[messages] = identifier[sqs_conn] . identifier[receive_message] ( identifier[QueueUrl] = identifier[self] . identifier[sqs_queue] ,
identifier[MaxNumberOfMessages] = identifier[self] . identifier[max_messages] ,
identifier[WaitTimeSeconds] = identifier[self] . identifier[wait_time_seconds] )
identifier[self] . identifier[log] . identifier[info] ( literal[string] , identifier[str] ( identifier[messages] ))
keyword[if] literal[string] keyword[in] identifier[messages] keyword[and] identifier[len] ( identifier[messages] [ literal[string] ])> literal[int] :
identifier[entries] =[{ literal[string] : identifier[message] [ literal[string] ], literal[string] : identifier[message] [ literal[string] ]}
keyword[for] identifier[message] keyword[in] identifier[messages] [ literal[string] ]]
identifier[result] = identifier[sqs_conn] . identifier[delete_message_batch] ( identifier[QueueUrl] = identifier[self] . identifier[sqs_queue] ,
identifier[Entries] = identifier[entries] )
keyword[if] literal[string] keyword[in] identifier[result] :
identifier[context] [ literal[string] ]. identifier[xcom_push] ( identifier[key] = literal[string] , identifier[value] = identifier[messages] )
keyword[return] keyword[True]
keyword[else] :
keyword[raise] identifier[AirflowException] (
literal[string] + identifier[str] ( identifier[result] )+ literal[string] + identifier[str] ( identifier[messages] ))
keyword[return] keyword[False] | def poke(self, context):
"""
Check for message on subscribed queue and write to xcom the message with key ``messages``
:param context: the context object
:type context: dict
:return: ``True`` if message is available or ``False``
"""
sqs_hook = SQSHook(aws_conn_id=self.aws_conn_id)
sqs_conn = sqs_hook.get_conn()
self.log.info('SQSSensor checking for message on queue: %s', self.sqs_queue)
messages = sqs_conn.receive_message(QueueUrl=self.sqs_queue, MaxNumberOfMessages=self.max_messages, WaitTimeSeconds=self.wait_time_seconds)
self.log.info('reveived message %s', str(messages))
if 'Messages' in messages and len(messages['Messages']) > 0:
entries = [{'Id': message['MessageId'], 'ReceiptHandle': message['ReceiptHandle']} for message in messages['Messages']]
result = sqs_conn.delete_message_batch(QueueUrl=self.sqs_queue, Entries=entries)
if 'Successful' in result:
context['ti'].xcom_push(key='messages', value=messages)
return True # depends on [control=['if'], data=[]]
else:
raise AirflowException('Delete SQS Messages failed ' + str(result) + ' for messages ' + str(messages)) # depends on [control=['if'], data=[]]
return False |
def _eval_target_brutal(state, ip, limit):
"""
The traditional way of evaluating symbolic jump targets.
:param state: A SimState instance.
:param ip: The AST of the instruction pointer to evaluate.
:param limit: The maximum number of concrete IPs.
:return: A list of conditions and the corresponding concrete IPs.
:rtype: list
"""
addrs = state.solver.eval_upto(ip, limit)
return [ (ip == addr, addr) for addr in addrs ] | def function[_eval_target_brutal, parameter[state, ip, limit]]:
constant[
The traditional way of evaluating symbolic jump targets.
:param state: A SimState instance.
:param ip: The AST of the instruction pointer to evaluate.
:param limit: The maximum number of concrete IPs.
:return: A list of conditions and the corresponding concrete IPs.
:rtype: list
]
variable[addrs] assign[=] call[name[state].solver.eval_upto, parameter[name[ip], name[limit]]]
return[<ast.ListComp object at 0x7da20c6c53f0>] | keyword[def] identifier[_eval_target_brutal] ( identifier[state] , identifier[ip] , identifier[limit] ):
literal[string]
identifier[addrs] = identifier[state] . identifier[solver] . identifier[eval_upto] ( identifier[ip] , identifier[limit] )
keyword[return] [( identifier[ip] == identifier[addr] , identifier[addr] ) keyword[for] identifier[addr] keyword[in] identifier[addrs] ] | def _eval_target_brutal(state, ip, limit):
"""
The traditional way of evaluating symbolic jump targets.
:param state: A SimState instance.
:param ip: The AST of the instruction pointer to evaluate.
:param limit: The maximum number of concrete IPs.
:return: A list of conditions and the corresponding concrete IPs.
:rtype: list
"""
addrs = state.solver.eval_upto(ip, limit)
return [(ip == addr, addr) for addr in addrs] |
def BEQ(self, params):
"""
BEQ label
Branch to the instruction at label if the Z flag is set
"""
label = self.get_one_parameter(self.ONE_PARAMETER, params)
self.check_arguments(label_exists=(label,))
# BEQ label
def BEQ_func():
if self.is_Z_set():
self.register['PC'] = self.labels[label]
return BEQ_func | def function[BEQ, parameter[self, params]]:
constant[
BEQ label
Branch to the instruction at label if the Z flag is set
]
variable[label] assign[=] call[name[self].get_one_parameter, parameter[name[self].ONE_PARAMETER, name[params]]]
call[name[self].check_arguments, parameter[]]
def function[BEQ_func, parameter[]]:
if call[name[self].is_Z_set, parameter[]] begin[:]
call[name[self].register][constant[PC]] assign[=] call[name[self].labels][name[label]]
return[name[BEQ_func]] | keyword[def] identifier[BEQ] ( identifier[self] , identifier[params] ):
literal[string]
identifier[label] = identifier[self] . identifier[get_one_parameter] ( identifier[self] . identifier[ONE_PARAMETER] , identifier[params] )
identifier[self] . identifier[check_arguments] ( identifier[label_exists] =( identifier[label] ,))
keyword[def] identifier[BEQ_func] ():
keyword[if] identifier[self] . identifier[is_Z_set] ():
identifier[self] . identifier[register] [ literal[string] ]= identifier[self] . identifier[labels] [ identifier[label] ]
keyword[return] identifier[BEQ_func] | def BEQ(self, params):
"""
BEQ label
Branch to the instruction at label if the Z flag is set
"""
label = self.get_one_parameter(self.ONE_PARAMETER, params)
self.check_arguments(label_exists=(label,))
# BEQ label
def BEQ_func():
if self.is_Z_set():
self.register['PC'] = self.labels[label] # depends on [control=['if'], data=[]]
return BEQ_func |
def stop(self):
'''
stop display
:rtype: self
'''
self.redirect_display(False)
EasyProcess.stop(self)
if self.use_xauth:
self._clear_xauth()
return self | def function[stop, parameter[self]]:
constant[
stop display
:rtype: self
]
call[name[self].redirect_display, parameter[constant[False]]]
call[name[EasyProcess].stop, parameter[name[self]]]
if name[self].use_xauth begin[:]
call[name[self]._clear_xauth, parameter[]]
return[name[self]] | keyword[def] identifier[stop] ( identifier[self] ):
literal[string]
identifier[self] . identifier[redirect_display] ( keyword[False] )
identifier[EasyProcess] . identifier[stop] ( identifier[self] )
keyword[if] identifier[self] . identifier[use_xauth] :
identifier[self] . identifier[_clear_xauth] ()
keyword[return] identifier[self] | def stop(self):
"""
stop display
:rtype: self
"""
self.redirect_display(False)
EasyProcess.stop(self)
if self.use_xauth:
self._clear_xauth() # depends on [control=['if'], data=[]]
return self |
def read_config(self, config_file):
"""
Parses the specified configuration file and stores the values. Raises
an InvalidConfigurationFile exception if the file is not well-formed.
"""
cfg = ConfigParser.SafeConfigParser()
try:
cfg.read(config_file)
except ConfigParser.MissingSectionHeaderError as e:
# The file exists, but doesn't have the correct format.
raise exc.InvalidConfigurationFile(e)
def safe_get(section, option, default=None):
try:
return cfg.get(section, option)
except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
return default
# A common mistake is including credentials in the config file. If any
# values are found, issue a warning so that the developer can correct
# this problem.
creds_found = False
for section in cfg.sections():
if section == "settings":
section_name = "default"
self._default_set = True
else:
section_name = section
# Check for included credentials
for key in ("username", "password", "api_key"):
if creds_found:
break
if safe_get(section, key):
creds_found = True
dct = self._settings[section_name] = {}
dct["region"] = safe_get(section, "region", default_region)
ityp = safe_get(section, "identity_type")
if ityp:
dct["identity_type"] = _id_type(ityp)
dct["identity_class"] = _import_identity(ityp)
# Handle both the old and new names for this setting.
debug = safe_get(section, "debug")
if debug is None:
debug = safe_get(section, "http_debug", "False")
dct["http_debug"] = debug == "True"
verify_ssl = safe_get(section, "verify_ssl", "True")
dct["verify_ssl"] = verify_ssl == "True"
dct["keyring_username"] = safe_get(section, "keyring_username")
dct["encoding"] = safe_get(section, "encoding", default_encoding)
dct["auth_endpoint"] = safe_get(section, "auth_endpoint")
dct["tenant_name"] = safe_get(section, "tenant_name")
dct["tenant_id"] = safe_get(section, "tenant_id")
use_servicenet = safe_get(section, "use_servicenet", "False")
dct["use_servicenet"] = use_servicenet == "True"
app_agent = safe_get(section, "custom_user_agent")
if app_agent:
# Customize the user-agent string with the app name.
dct["user_agent"] = "%s %s" % (app_agent, USER_AGENT)
else:
dct["user_agent"] = USER_AGENT
# If this is the first section, make it the default
if not self._default_set:
self._settings["default"] = self._settings[section]
self._default_set = True
if creds_found:
warnings.warn("Login credentials were detected in your .pyrax.cfg "
"file. These have been ignored, but you should remove "
"them and either place them in a credential file, or "
"consider using another means of authentication. More "
"information on the use of credential files can be found "
"in the 'docs/getting_started.md' document.") | def function[read_config, parameter[self, config_file]]:
constant[
Parses the specified configuration file and stores the values. Raises
an InvalidConfigurationFile exception if the file is not well-formed.
]
variable[cfg] assign[=] call[name[ConfigParser].SafeConfigParser, parameter[]]
<ast.Try object at 0x7da2054a6500>
def function[safe_get, parameter[section, option, default]]:
<ast.Try object at 0x7da2054a7790>
variable[creds_found] assign[=] constant[False]
for taget[name[section]] in starred[call[name[cfg].sections, parameter[]]] begin[:]
if compare[name[section] equal[==] constant[settings]] begin[:]
variable[section_name] assign[=] constant[default]
name[self]._default_set assign[=] constant[True]
for taget[name[key]] in starred[tuple[[<ast.Constant object at 0x7da2054a6c50>, <ast.Constant object at 0x7da2054a7c70>, <ast.Constant object at 0x7da2054a6590>]]] begin[:]
if name[creds_found] begin[:]
break
if call[name[safe_get], parameter[name[section], name[key]]] begin[:]
variable[creds_found] assign[=] constant[True]
variable[dct] assign[=] dictionary[[], []]
call[name[dct]][constant[region]] assign[=] call[name[safe_get], parameter[name[section], constant[region], name[default_region]]]
variable[ityp] assign[=] call[name[safe_get], parameter[name[section], constant[identity_type]]]
if name[ityp] begin[:]
call[name[dct]][constant[identity_type]] assign[=] call[name[_id_type], parameter[name[ityp]]]
call[name[dct]][constant[identity_class]] assign[=] call[name[_import_identity], parameter[name[ityp]]]
variable[debug] assign[=] call[name[safe_get], parameter[name[section], constant[debug]]]
if compare[name[debug] is constant[None]] begin[:]
variable[debug] assign[=] call[name[safe_get], parameter[name[section], constant[http_debug], constant[False]]]
call[name[dct]][constant[http_debug]] assign[=] compare[name[debug] equal[==] constant[True]]
variable[verify_ssl] assign[=] call[name[safe_get], parameter[name[section], constant[verify_ssl], constant[True]]]
call[name[dct]][constant[verify_ssl]] assign[=] compare[name[verify_ssl] equal[==] constant[True]]
call[name[dct]][constant[keyring_username]] assign[=] call[name[safe_get], parameter[name[section], constant[keyring_username]]]
call[name[dct]][constant[encoding]] assign[=] call[name[safe_get], parameter[name[section], constant[encoding], name[default_encoding]]]
call[name[dct]][constant[auth_endpoint]] assign[=] call[name[safe_get], parameter[name[section], constant[auth_endpoint]]]
call[name[dct]][constant[tenant_name]] assign[=] call[name[safe_get], parameter[name[section], constant[tenant_name]]]
call[name[dct]][constant[tenant_id]] assign[=] call[name[safe_get], parameter[name[section], constant[tenant_id]]]
variable[use_servicenet] assign[=] call[name[safe_get], parameter[name[section], constant[use_servicenet], constant[False]]]
call[name[dct]][constant[use_servicenet]] assign[=] compare[name[use_servicenet] equal[==] constant[True]]
variable[app_agent] assign[=] call[name[safe_get], parameter[name[section], constant[custom_user_agent]]]
if name[app_agent] begin[:]
call[name[dct]][constant[user_agent]] assign[=] binary_operation[constant[%s %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da2054a6fe0>, <ast.Name object at 0x7da2054a73d0>]]]
if <ast.UnaryOp object at 0x7da2054a5b40> begin[:]
call[name[self]._settings][constant[default]] assign[=] call[name[self]._settings][name[section]]
name[self]._default_set assign[=] constant[True]
if name[creds_found] begin[:]
call[name[warnings].warn, parameter[constant[Login credentials were detected in your .pyrax.cfg file. These have been ignored, but you should remove them and either place them in a credential file, or consider using another means of authentication. More information on the use of credential files can be found in the 'docs/getting_started.md' document.]]] | keyword[def] identifier[read_config] ( identifier[self] , identifier[config_file] ):
literal[string]
identifier[cfg] = identifier[ConfigParser] . identifier[SafeConfigParser] ()
keyword[try] :
identifier[cfg] . identifier[read] ( identifier[config_file] )
keyword[except] identifier[ConfigParser] . identifier[MissingSectionHeaderError] keyword[as] identifier[e] :
keyword[raise] identifier[exc] . identifier[InvalidConfigurationFile] ( identifier[e] )
keyword[def] identifier[safe_get] ( identifier[section] , identifier[option] , identifier[default] = keyword[None] ):
keyword[try] :
keyword[return] identifier[cfg] . identifier[get] ( identifier[section] , identifier[option] )
keyword[except] ( identifier[ConfigParser] . identifier[NoSectionError] , identifier[ConfigParser] . identifier[NoOptionError] ):
keyword[return] identifier[default]
identifier[creds_found] = keyword[False]
keyword[for] identifier[section] keyword[in] identifier[cfg] . identifier[sections] ():
keyword[if] identifier[section] == literal[string] :
identifier[section_name] = literal[string]
identifier[self] . identifier[_default_set] = keyword[True]
keyword[else] :
identifier[section_name] = identifier[section]
keyword[for] identifier[key] keyword[in] ( literal[string] , literal[string] , literal[string] ):
keyword[if] identifier[creds_found] :
keyword[break]
keyword[if] identifier[safe_get] ( identifier[section] , identifier[key] ):
identifier[creds_found] = keyword[True]
identifier[dct] = identifier[self] . identifier[_settings] [ identifier[section_name] ]={}
identifier[dct] [ literal[string] ]= identifier[safe_get] ( identifier[section] , literal[string] , identifier[default_region] )
identifier[ityp] = identifier[safe_get] ( identifier[section] , literal[string] )
keyword[if] identifier[ityp] :
identifier[dct] [ literal[string] ]= identifier[_id_type] ( identifier[ityp] )
identifier[dct] [ literal[string] ]= identifier[_import_identity] ( identifier[ityp] )
identifier[debug] = identifier[safe_get] ( identifier[section] , literal[string] )
keyword[if] identifier[debug] keyword[is] keyword[None] :
identifier[debug] = identifier[safe_get] ( identifier[section] , literal[string] , literal[string] )
identifier[dct] [ literal[string] ]= identifier[debug] == literal[string]
identifier[verify_ssl] = identifier[safe_get] ( identifier[section] , literal[string] , literal[string] )
identifier[dct] [ literal[string] ]= identifier[verify_ssl] == literal[string]
identifier[dct] [ literal[string] ]= identifier[safe_get] ( identifier[section] , literal[string] )
identifier[dct] [ literal[string] ]= identifier[safe_get] ( identifier[section] , literal[string] , identifier[default_encoding] )
identifier[dct] [ literal[string] ]= identifier[safe_get] ( identifier[section] , literal[string] )
identifier[dct] [ literal[string] ]= identifier[safe_get] ( identifier[section] , literal[string] )
identifier[dct] [ literal[string] ]= identifier[safe_get] ( identifier[section] , literal[string] )
identifier[use_servicenet] = identifier[safe_get] ( identifier[section] , literal[string] , literal[string] )
identifier[dct] [ literal[string] ]= identifier[use_servicenet] == literal[string]
identifier[app_agent] = identifier[safe_get] ( identifier[section] , literal[string] )
keyword[if] identifier[app_agent] :
identifier[dct] [ literal[string] ]= literal[string] %( identifier[app_agent] , identifier[USER_AGENT] )
keyword[else] :
identifier[dct] [ literal[string] ]= identifier[USER_AGENT]
keyword[if] keyword[not] identifier[self] . identifier[_default_set] :
identifier[self] . identifier[_settings] [ literal[string] ]= identifier[self] . identifier[_settings] [ identifier[section] ]
identifier[self] . identifier[_default_set] = keyword[True]
keyword[if] identifier[creds_found] :
identifier[warnings] . identifier[warn] ( literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string] ) | def read_config(self, config_file):
"""
Parses the specified configuration file and stores the values. Raises
an InvalidConfigurationFile exception if the file is not well-formed.
"""
cfg = ConfigParser.SafeConfigParser()
try:
cfg.read(config_file) # depends on [control=['try'], data=[]]
except ConfigParser.MissingSectionHeaderError as e:
# The file exists, but doesn't have the correct format.
raise exc.InvalidConfigurationFile(e) # depends on [control=['except'], data=['e']]
def safe_get(section, option, default=None):
try:
return cfg.get(section, option) # depends on [control=['try'], data=[]]
except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
return default # depends on [control=['except'], data=[]]
# A common mistake is including credentials in the config file. If any
# values are found, issue a warning so that the developer can correct
# this problem.
creds_found = False
for section in cfg.sections():
if section == 'settings':
section_name = 'default'
self._default_set = True # depends on [control=['if'], data=[]]
else:
section_name = section
# Check for included credentials
for key in ('username', 'password', 'api_key'):
if creds_found:
break # depends on [control=['if'], data=[]]
if safe_get(section, key):
creds_found = True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']]
dct = self._settings[section_name] = {}
dct['region'] = safe_get(section, 'region', default_region)
ityp = safe_get(section, 'identity_type')
if ityp:
dct['identity_type'] = _id_type(ityp)
dct['identity_class'] = _import_identity(ityp) # depends on [control=['if'], data=[]]
# Handle both the old and new names for this setting.
debug = safe_get(section, 'debug')
if debug is None:
debug = safe_get(section, 'http_debug', 'False') # depends on [control=['if'], data=['debug']]
dct['http_debug'] = debug == 'True'
verify_ssl = safe_get(section, 'verify_ssl', 'True')
dct['verify_ssl'] = verify_ssl == 'True'
dct['keyring_username'] = safe_get(section, 'keyring_username')
dct['encoding'] = safe_get(section, 'encoding', default_encoding)
dct['auth_endpoint'] = safe_get(section, 'auth_endpoint')
dct['tenant_name'] = safe_get(section, 'tenant_name')
dct['tenant_id'] = safe_get(section, 'tenant_id')
use_servicenet = safe_get(section, 'use_servicenet', 'False')
dct['use_servicenet'] = use_servicenet == 'True'
app_agent = safe_get(section, 'custom_user_agent')
if app_agent:
# Customize the user-agent string with the app name.
dct['user_agent'] = '%s %s' % (app_agent, USER_AGENT) # depends on [control=['if'], data=[]]
else:
dct['user_agent'] = USER_AGENT
# If this is the first section, make it the default
if not self._default_set:
self._settings['default'] = self._settings[section]
self._default_set = True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['section']]
if creds_found:
warnings.warn("Login credentials were detected in your .pyrax.cfg file. These have been ignored, but you should remove them and either place them in a credential file, or consider using another means of authentication. More information on the use of credential files can be found in the 'docs/getting_started.md' document.") # depends on [control=['if'], data=[]] |
def is_ancestor(self, ancestor_rev, rev):
"""Check if a commit is an ancestor of another
:param ancestor_rev: Rev which should be an ancestor
:param rev: Rev to test against ancestor_rev
:return: ``True``, ancestor_rev is an accestor to rev.
"""
try:
self.git.merge_base(ancestor_rev, rev, is_ancestor=True)
except GitCommandError as err:
if err.status == 1:
return False
raise
return True | def function[is_ancestor, parameter[self, ancestor_rev, rev]]:
constant[Check if a commit is an ancestor of another
:param ancestor_rev: Rev which should be an ancestor
:param rev: Rev to test against ancestor_rev
:return: ``True``, ancestor_rev is an accestor to rev.
]
<ast.Try object at 0x7da1b1d6df00>
return[constant[True]] | keyword[def] identifier[is_ancestor] ( identifier[self] , identifier[ancestor_rev] , identifier[rev] ):
literal[string]
keyword[try] :
identifier[self] . identifier[git] . identifier[merge_base] ( identifier[ancestor_rev] , identifier[rev] , identifier[is_ancestor] = keyword[True] )
keyword[except] identifier[GitCommandError] keyword[as] identifier[err] :
keyword[if] identifier[err] . identifier[status] == literal[int] :
keyword[return] keyword[False]
keyword[raise]
keyword[return] keyword[True] | def is_ancestor(self, ancestor_rev, rev):
"""Check if a commit is an ancestor of another
:param ancestor_rev: Rev which should be an ancestor
:param rev: Rev to test against ancestor_rev
:return: ``True``, ancestor_rev is an accestor to rev.
"""
try:
self.git.merge_base(ancestor_rev, rev, is_ancestor=True) # depends on [control=['try'], data=[]]
except GitCommandError as err:
if err.status == 1:
return False # depends on [control=['if'], data=[]]
raise # depends on [control=['except'], data=['err']]
return True |
def to_app(app):
"""Serializes app to id string
:param app: object to serialize
:return: string id
"""
from sevenbridges.models.app import App
if not app:
raise SbgError('App is required!')
elif isinstance(app, App):
return app.id
elif isinstance(app, six.string_types):
return app
else:
raise SbgError('Invalid app parameter!') | def function[to_app, parameter[app]]:
constant[Serializes app to id string
:param app: object to serialize
:return: string id
]
from relative_module[sevenbridges.models.app] import module[App]
if <ast.UnaryOp object at 0x7da18fe902e0> begin[:]
<ast.Raise object at 0x7da18fe90e20> | keyword[def] identifier[to_app] ( identifier[app] ):
literal[string]
keyword[from] identifier[sevenbridges] . identifier[models] . identifier[app] keyword[import] identifier[App]
keyword[if] keyword[not] identifier[app] :
keyword[raise] identifier[SbgError] ( literal[string] )
keyword[elif] identifier[isinstance] ( identifier[app] , identifier[App] ):
keyword[return] identifier[app] . identifier[id]
keyword[elif] identifier[isinstance] ( identifier[app] , identifier[six] . identifier[string_types] ):
keyword[return] identifier[app]
keyword[else] :
keyword[raise] identifier[SbgError] ( literal[string] ) | def to_app(app):
"""Serializes app to id string
:param app: object to serialize
:return: string id
"""
from sevenbridges.models.app import App
if not app:
raise SbgError('App is required!') # depends on [control=['if'], data=[]]
elif isinstance(app, App):
return app.id # depends on [control=['if'], data=[]]
elif isinstance(app, six.string_types):
return app # depends on [control=['if'], data=[]]
else:
raise SbgError('Invalid app parameter!') |
def map_is_in_fov(m: tcod.map.Map, x: int, y: int) -> bool:
"""Return True if the cell at x,y is lit by the last field-of-view
algorithm.
.. note::
This function is slow.
.. deprecated:: 4.5
Use :any:`tcod.map.Map.fov` to check this property.
"""
return bool(lib.TCOD_map_is_in_fov(m.map_c, x, y)) | def function[map_is_in_fov, parameter[m, x, y]]:
constant[Return True if the cell at x,y is lit by the last field-of-view
algorithm.
.. note::
This function is slow.
.. deprecated:: 4.5
Use :any:`tcod.map.Map.fov` to check this property.
]
return[call[name[bool], parameter[call[name[lib].TCOD_map_is_in_fov, parameter[name[m].map_c, name[x], name[y]]]]]] | keyword[def] identifier[map_is_in_fov] ( identifier[m] : identifier[tcod] . identifier[map] . identifier[Map] , identifier[x] : identifier[int] , identifier[y] : identifier[int] )-> identifier[bool] :
literal[string]
keyword[return] identifier[bool] ( identifier[lib] . identifier[TCOD_map_is_in_fov] ( identifier[m] . identifier[map_c] , identifier[x] , identifier[y] )) | def map_is_in_fov(m: tcod.map.Map, x: int, y: int) -> bool:
"""Return True if the cell at x,y is lit by the last field-of-view
algorithm.
.. note::
This function is slow.
.. deprecated:: 4.5
Use :any:`tcod.map.Map.fov` to check this property.
"""
return bool(lib.TCOD_map_is_in_fov(m.map_c, x, y)) |
def text_has_been_edited(self, text):
"""Find text has been edited (this slot won't be triggered when
setting the search pattern combo box text programmatically)"""
self.find(changed=True, forward=True, start_highlight_timer=True) | def function[text_has_been_edited, parameter[self, text]]:
constant[Find text has been edited (this slot won't be triggered when
setting the search pattern combo box text programmatically)]
call[name[self].find, parameter[]] | keyword[def] identifier[text_has_been_edited] ( identifier[self] , identifier[text] ):
literal[string]
identifier[self] . identifier[find] ( identifier[changed] = keyword[True] , identifier[forward] = keyword[True] , identifier[start_highlight_timer] = keyword[True] ) | def text_has_been_edited(self, text):
"""Find text has been edited (this slot won't be triggered when
setting the search pattern combo box text programmatically)"""
self.find(changed=True, forward=True, start_highlight_timer=True) |
def add_dependency(id=None, name=None, dependency_id=None, dependency_name=None):
"""
Add an existing BuildConfiguration as a dependency to another BuildConfiguration.
"""
data = add_dependency_raw(id, name, dependency_id, dependency_name)
if data:
return utils.format_json_list(data) | def function[add_dependency, parameter[id, name, dependency_id, dependency_name]]:
constant[
Add an existing BuildConfiguration as a dependency to another BuildConfiguration.
]
variable[data] assign[=] call[name[add_dependency_raw], parameter[name[id], name[name], name[dependency_id], name[dependency_name]]]
if name[data] begin[:]
return[call[name[utils].format_json_list, parameter[name[data]]]] | keyword[def] identifier[add_dependency] ( identifier[id] = keyword[None] , identifier[name] = keyword[None] , identifier[dependency_id] = keyword[None] , identifier[dependency_name] = keyword[None] ):
literal[string]
identifier[data] = identifier[add_dependency_raw] ( identifier[id] , identifier[name] , identifier[dependency_id] , identifier[dependency_name] )
keyword[if] identifier[data] :
keyword[return] identifier[utils] . identifier[format_json_list] ( identifier[data] ) | def add_dependency(id=None, name=None, dependency_id=None, dependency_name=None):
"""
Add an existing BuildConfiguration as a dependency to another BuildConfiguration.
"""
data = add_dependency_raw(id, name, dependency_id, dependency_name)
if data:
return utils.format_json_list(data) # depends on [control=['if'], data=[]] |
def clean(self):
"""Delete all of the records"""
# Deleting seems to be really weird and unrelable.
self._session \
.query(Process) \
.filter(Process.d_vid == self._d_vid) \
.delete(synchronize_session='fetch')
for r in self.records:
self._session.delete(r)
self._session.commit() | def function[clean, parameter[self]]:
constant[Delete all of the records]
call[call[call[name[self]._session.query, parameter[name[Process]]].filter, parameter[compare[name[Process].d_vid equal[==] name[self]._d_vid]]].delete, parameter[]]
for taget[name[r]] in starred[name[self].records] begin[:]
call[name[self]._session.delete, parameter[name[r]]]
call[name[self]._session.commit, parameter[]] | keyword[def] identifier[clean] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_session] . identifier[query] ( identifier[Process] ). identifier[filter] ( identifier[Process] . identifier[d_vid] == identifier[self] . identifier[_d_vid] ). identifier[delete] ( identifier[synchronize_session] = literal[string] )
keyword[for] identifier[r] keyword[in] identifier[self] . identifier[records] :
identifier[self] . identifier[_session] . identifier[delete] ( identifier[r] )
identifier[self] . identifier[_session] . identifier[commit] () | def clean(self):
"""Delete all of the records"""
# Deleting seems to be really weird and unrelable.
self._session.query(Process).filter(Process.d_vid == self._d_vid).delete(synchronize_session='fetch')
for r in self.records:
self._session.delete(r) # depends on [control=['for'], data=['r']]
self._session.commit() |
def __needs_encoding(self, s):
"""
Get whether string I{s} contains special characters.
@param s: A string to check.
@type s: str
@return: True if needs encoding.
@rtype: boolean
"""
if isinstance(s, basestring):
for c in self.special:
if c in s:
return True | def function[__needs_encoding, parameter[self, s]]:
constant[
Get whether string I{s} contains special characters.
@param s: A string to check.
@type s: str
@return: True if needs encoding.
@rtype: boolean
]
if call[name[isinstance], parameter[name[s], name[basestring]]] begin[:]
for taget[name[c]] in starred[name[self].special] begin[:]
if compare[name[c] in name[s]] begin[:]
return[constant[True]] | keyword[def] identifier[__needs_encoding] ( identifier[self] , identifier[s] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[s] , identifier[basestring] ):
keyword[for] identifier[c] keyword[in] identifier[self] . identifier[special] :
keyword[if] identifier[c] keyword[in] identifier[s] :
keyword[return] keyword[True] | def __needs_encoding(self, s):
"""
Get whether string I{s} contains special characters.
@param s: A string to check.
@type s: str
@return: True if needs encoding.
@rtype: boolean
"""
if isinstance(s, basestring):
for c in self.special:
if c in s:
return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['c']] # depends on [control=['if'], data=[]] |
def _normalise(self):
"""Object is guaranteed to be a unit quaternion after calling this
operation UNLESS the object is equivalent to Quaternion(0)
"""
if not self.is_unit():
n = self.norm
if n > 0:
self.q = self.q / n | def function[_normalise, parameter[self]]:
constant[Object is guaranteed to be a unit quaternion after calling this
operation UNLESS the object is equivalent to Quaternion(0)
]
if <ast.UnaryOp object at 0x7da1b08b1030> begin[:]
variable[n] assign[=] name[self].norm
if compare[name[n] greater[>] constant[0]] begin[:]
name[self].q assign[=] binary_operation[name[self].q / name[n]] | keyword[def] identifier[_normalise] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[is_unit] ():
identifier[n] = identifier[self] . identifier[norm]
keyword[if] identifier[n] > literal[int] :
identifier[self] . identifier[q] = identifier[self] . identifier[q] / identifier[n] | def _normalise(self):
"""Object is guaranteed to be a unit quaternion after calling this
operation UNLESS the object is equivalent to Quaternion(0)
"""
if not self.is_unit():
n = self.norm
if n > 0:
self.q = self.q / n # depends on [control=['if'], data=['n']] # depends on [control=['if'], data=[]] |
def join(self, mucjid, nick, *,
password=None, history=None, autorejoin=True):
"""
Join a multi-user chat and create a conversation for it.
:param mucjid: The bare JID of the room to join.
:type mucjid: :class:`~aioxmpp.JID`.
:param nick: The nickname to use in the room.
:type nick: :class:`str`
:param password: The password to join the room, if required.
:type password: :class:`str`
:param history: Specification for how much and which history to fetch.
:type history: :class:`.xso.History`
:param autorejoin: Flag to indicate that the MUC should be
automatically rejoined after a disconnect.
:type autorejoin: :class:`bool`
:raises ValueError: if the MUC JID is invalid.
:return: The :term:`Conversation` and a future on the join.
:rtype: tuple of :class:`~.Room` and :class:`asyncio.Future`.
Join a multi-user chat at `mucjid` with `nick`. Return a :class:`Room`
instance which is used to track the MUC locally and a
:class:`aioxmpp.Future` which becomes done when the join succeeded
(with a :data:`None` value) or failed (with an exception).
In addition, the :meth:`~.ConversationService.on_conversation_added`
signal is emitted immediately with the new :class:`Room`.
It is recommended to attach the desired signals to the :class:`Room`
before yielding next (e.g. in a non-deferred event handler to the
:meth:`~.ConversationService.on_conversation_added` signal), to avoid
races with the server. It is guaranteed that no signals are emitted
before the next yield, and thus, it is safe to attach the signals right
after :meth:`join` returned. (This is also the reason why :meth:`join`
is not a coroutine, but instead returns the room and a future to wait
for.)
Any other interaction with the room must go through the :class:`Room`
instance.
If the multi-user chat at `mucjid` is already or currently being
joined, the existing :class:`Room` and future is returned. The `nick`
and other options for the new join are ignored.
If the `mucjid` is not a bare JID, :class:`ValueError` is raised.
`password` may be a string used as password for the MUC. It will be
remembered and stored at the returned :class:`Room` instance.
`history` may be a :class:`History` instance to request a specific
amount of history; otherwise, the server will return a default amount
of history.
If `autorejoin` is true, the MUC will be re-joined after the stream has
been destroyed and re-established. In that case, the service will
request history since the stream destruction and ignore the `history`
object passed here.
If the stream is currently not established, the join is deferred until
the stream is established.
"""
if history is not None and not isinstance(history, muc_xso.History):
raise TypeError("history must be {!s}, got {!r}".format(
muc_xso.History.__name__,
history))
if not mucjid.is_bare:
raise ValueError("MUC JID must be bare")
try:
room, fut, *_ = self._pending_mucs[mucjid]
except KeyError:
pass
else:
return room, fut
try:
room = self._joined_mucs[mucjid]
except KeyError:
pass
else:
fut = asyncio.Future()
fut.set_result(None)
return room, fut
room = Room(self, mucjid)
room.muc_autorejoin = autorejoin
room.muc_password = password
room.on_exit.connect(
functools.partial(
self._muc_exited,
room
)
)
room.on_muc_enter.connect(
self._pending_on_enter,
)
fut = asyncio.Future()
fut.add_done_callback(functools.partial(
self._pending_join_done,
mucjid,
room,
))
self._pending_mucs[mucjid] = room, fut, nick, history
if self.client.established:
self._send_join_presence(mucjid, history, nick, password)
self.on_conversation_new(room)
self.dependencies[
aioxmpp.im.service.ConversationService
]._add_conversation(room)
return room, fut | def function[join, parameter[self, mucjid, nick]]:
constant[
Join a multi-user chat and create a conversation for it.
:param mucjid: The bare JID of the room to join.
:type mucjid: :class:`~aioxmpp.JID`.
:param nick: The nickname to use in the room.
:type nick: :class:`str`
:param password: The password to join the room, if required.
:type password: :class:`str`
:param history: Specification for how much and which history to fetch.
:type history: :class:`.xso.History`
:param autorejoin: Flag to indicate that the MUC should be
automatically rejoined after a disconnect.
:type autorejoin: :class:`bool`
:raises ValueError: if the MUC JID is invalid.
:return: The :term:`Conversation` and a future on the join.
:rtype: tuple of :class:`~.Room` and :class:`asyncio.Future`.
Join a multi-user chat at `mucjid` with `nick`. Return a :class:`Room`
instance which is used to track the MUC locally and a
:class:`aioxmpp.Future` which becomes done when the join succeeded
(with a :data:`None` value) or failed (with an exception).
In addition, the :meth:`~.ConversationService.on_conversation_added`
signal is emitted immediately with the new :class:`Room`.
It is recommended to attach the desired signals to the :class:`Room`
before yielding next (e.g. in a non-deferred event handler to the
:meth:`~.ConversationService.on_conversation_added` signal), to avoid
races with the server. It is guaranteed that no signals are emitted
before the next yield, and thus, it is safe to attach the signals right
after :meth:`join` returned. (This is also the reason why :meth:`join`
is not a coroutine, but instead returns the room and a future to wait
for.)
Any other interaction with the room must go through the :class:`Room`
instance.
If the multi-user chat at `mucjid` is already or currently being
joined, the existing :class:`Room` and future is returned. The `nick`
and other options for the new join are ignored.
If the `mucjid` is not a bare JID, :class:`ValueError` is raised.
`password` may be a string used as password for the MUC. It will be
remembered and stored at the returned :class:`Room` instance.
`history` may be a :class:`History` instance to request a specific
amount of history; otherwise, the server will return a default amount
of history.
If `autorejoin` is true, the MUC will be re-joined after the stream has
been destroyed and re-established. In that case, the service will
request history since the stream destruction and ignore the `history`
object passed here.
If the stream is currently not established, the join is deferred until
the stream is established.
]
if <ast.BoolOp object at 0x7da1b2346350> begin[:]
<ast.Raise object at 0x7da1b2346fb0>
if <ast.UnaryOp object at 0x7da1b2347010> begin[:]
<ast.Raise object at 0x7da1b2345840>
<ast.Try object at 0x7da1b23470d0>
<ast.Try object at 0x7da1b2347bb0>
variable[room] assign[=] call[name[Room], parameter[name[self], name[mucjid]]]
name[room].muc_autorejoin assign[=] name[autorejoin]
name[room].muc_password assign[=] name[password]
call[name[room].on_exit.connect, parameter[call[name[functools].partial, parameter[name[self]._muc_exited, name[room]]]]]
call[name[room].on_muc_enter.connect, parameter[name[self]._pending_on_enter]]
variable[fut] assign[=] call[name[asyncio].Future, parameter[]]
call[name[fut].add_done_callback, parameter[call[name[functools].partial, parameter[name[self]._pending_join_done, name[mucjid], name[room]]]]]
call[name[self]._pending_mucs][name[mucjid]] assign[=] tuple[[<ast.Name object at 0x7da1b23453c0>, <ast.Name object at 0x7da1b2347610>, <ast.Name object at 0x7da1b2344c40>, <ast.Name object at 0x7da1b2346ce0>]]
if name[self].client.established begin[:]
call[name[self]._send_join_presence, parameter[name[mucjid], name[history], name[nick], name[password]]]
call[name[self].on_conversation_new, parameter[name[room]]]
call[call[name[self].dependencies][name[aioxmpp].im.service.ConversationService]._add_conversation, parameter[name[room]]]
return[tuple[[<ast.Name object at 0x7da1b2345900>, <ast.Name object at 0x7da1b2345db0>]]] | keyword[def] identifier[join] ( identifier[self] , identifier[mucjid] , identifier[nick] ,*,
identifier[password] = keyword[None] , identifier[history] = keyword[None] , identifier[autorejoin] = keyword[True] ):
literal[string]
keyword[if] identifier[history] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[isinstance] ( identifier[history] , identifier[muc_xso] . identifier[History] ):
keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] (
identifier[muc_xso] . identifier[History] . identifier[__name__] ,
identifier[history] ))
keyword[if] keyword[not] identifier[mucjid] . identifier[is_bare] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[try] :
identifier[room] , identifier[fut] ,* identifier[_] = identifier[self] . identifier[_pending_mucs] [ identifier[mucjid] ]
keyword[except] identifier[KeyError] :
keyword[pass]
keyword[else] :
keyword[return] identifier[room] , identifier[fut]
keyword[try] :
identifier[room] = identifier[self] . identifier[_joined_mucs] [ identifier[mucjid] ]
keyword[except] identifier[KeyError] :
keyword[pass]
keyword[else] :
identifier[fut] = identifier[asyncio] . identifier[Future] ()
identifier[fut] . identifier[set_result] ( keyword[None] )
keyword[return] identifier[room] , identifier[fut]
identifier[room] = identifier[Room] ( identifier[self] , identifier[mucjid] )
identifier[room] . identifier[muc_autorejoin] = identifier[autorejoin]
identifier[room] . identifier[muc_password] = identifier[password]
identifier[room] . identifier[on_exit] . identifier[connect] (
identifier[functools] . identifier[partial] (
identifier[self] . identifier[_muc_exited] ,
identifier[room]
)
)
identifier[room] . identifier[on_muc_enter] . identifier[connect] (
identifier[self] . identifier[_pending_on_enter] ,
)
identifier[fut] = identifier[asyncio] . identifier[Future] ()
identifier[fut] . identifier[add_done_callback] ( identifier[functools] . identifier[partial] (
identifier[self] . identifier[_pending_join_done] ,
identifier[mucjid] ,
identifier[room] ,
))
identifier[self] . identifier[_pending_mucs] [ identifier[mucjid] ]= identifier[room] , identifier[fut] , identifier[nick] , identifier[history]
keyword[if] identifier[self] . identifier[client] . identifier[established] :
identifier[self] . identifier[_send_join_presence] ( identifier[mucjid] , identifier[history] , identifier[nick] , identifier[password] )
identifier[self] . identifier[on_conversation_new] ( identifier[room] )
identifier[self] . identifier[dependencies] [
identifier[aioxmpp] . identifier[im] . identifier[service] . identifier[ConversationService]
]. identifier[_add_conversation] ( identifier[room] )
keyword[return] identifier[room] , identifier[fut] | def join(self, mucjid, nick, *, password=None, history=None, autorejoin=True):
"""
Join a multi-user chat and create a conversation for it.
:param mucjid: The bare JID of the room to join.
:type mucjid: :class:`~aioxmpp.JID`.
:param nick: The nickname to use in the room.
:type nick: :class:`str`
:param password: The password to join the room, if required.
:type password: :class:`str`
:param history: Specification for how much and which history to fetch.
:type history: :class:`.xso.History`
:param autorejoin: Flag to indicate that the MUC should be
automatically rejoined after a disconnect.
:type autorejoin: :class:`bool`
:raises ValueError: if the MUC JID is invalid.
:return: The :term:`Conversation` and a future on the join.
:rtype: tuple of :class:`~.Room` and :class:`asyncio.Future`.
Join a multi-user chat at `mucjid` with `nick`. Return a :class:`Room`
instance which is used to track the MUC locally and a
:class:`aioxmpp.Future` which becomes done when the join succeeded
(with a :data:`None` value) or failed (with an exception).
In addition, the :meth:`~.ConversationService.on_conversation_added`
signal is emitted immediately with the new :class:`Room`.
It is recommended to attach the desired signals to the :class:`Room`
before yielding next (e.g. in a non-deferred event handler to the
:meth:`~.ConversationService.on_conversation_added` signal), to avoid
races with the server. It is guaranteed that no signals are emitted
before the next yield, and thus, it is safe to attach the signals right
after :meth:`join` returned. (This is also the reason why :meth:`join`
is not a coroutine, but instead returns the room and a future to wait
for.)
Any other interaction with the room must go through the :class:`Room`
instance.
If the multi-user chat at `mucjid` is already or currently being
joined, the existing :class:`Room` and future is returned. The `nick`
and other options for the new join are ignored.
If the `mucjid` is not a bare JID, :class:`ValueError` is raised.
`password` may be a string used as password for the MUC. It will be
remembered and stored at the returned :class:`Room` instance.
`history` may be a :class:`History` instance to request a specific
amount of history; otherwise, the server will return a default amount
of history.
If `autorejoin` is true, the MUC will be re-joined after the stream has
been destroyed and re-established. In that case, the service will
request history since the stream destruction and ignore the `history`
object passed here.
If the stream is currently not established, the join is deferred until
the stream is established.
"""
if history is not None and (not isinstance(history, muc_xso.History)):
raise TypeError('history must be {!s}, got {!r}'.format(muc_xso.History.__name__, history)) # depends on [control=['if'], data=[]]
if not mucjid.is_bare:
raise ValueError('MUC JID must be bare') # depends on [control=['if'], data=[]]
try:
(room, fut, *_) = self._pending_mucs[mucjid] # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]]
else:
return (room, fut)
try:
room = self._joined_mucs[mucjid] # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]]
else:
fut = asyncio.Future()
fut.set_result(None)
return (room, fut)
room = Room(self, mucjid)
room.muc_autorejoin = autorejoin
room.muc_password = password
room.on_exit.connect(functools.partial(self._muc_exited, room))
room.on_muc_enter.connect(self._pending_on_enter)
fut = asyncio.Future()
fut.add_done_callback(functools.partial(self._pending_join_done, mucjid, room))
self._pending_mucs[mucjid] = (room, fut, nick, history)
if self.client.established:
self._send_join_presence(mucjid, history, nick, password) # depends on [control=['if'], data=[]]
self.on_conversation_new(room)
self.dependencies[aioxmpp.im.service.ConversationService]._add_conversation(room)
return (room, fut) |
def point_to_t(self, point):
"""If the point lies on the Line, returns its `t` parameter.
If the point does not lie on the Line, returns None."""
# Single-precision floats have only 7 significant figures of
# resolution, so test that we're within 6 sig figs.
if np.isclose(point, self.start, rtol=0, atol=1e-6):
return 0.0
elif np.isclose(point, self.end, rtol=0, atol=1e-6):
return 1.0
# Finding the point "by hand" here is much faster than calling
# radialrange(), see the discussion on PR #40:
# https://github.com/mathandy/svgpathtools/pull/40#issuecomment-358134261
p = self.poly()
# p(t) = (p_1 * t) + p_0 = point
# t = (point - p_0) / p_1
t = (point - p[0]) / p[1]
if np.isclose(t.imag, 0) and (t.real >= 0.0) and (t.real <= 1.0):
return t.real
return None | def function[point_to_t, parameter[self, point]]:
constant[If the point lies on the Line, returns its `t` parameter.
If the point does not lie on the Line, returns None.]
if call[name[np].isclose, parameter[name[point], name[self].start]] begin[:]
return[constant[0.0]]
variable[p] assign[=] call[name[self].poly, parameter[]]
variable[t] assign[=] binary_operation[binary_operation[name[point] - call[name[p]][constant[0]]] / call[name[p]][constant[1]]]
if <ast.BoolOp object at 0x7da2054a7f10> begin[:]
return[name[t].real]
return[constant[None]] | keyword[def] identifier[point_to_t] ( identifier[self] , identifier[point] ):
literal[string]
keyword[if] identifier[np] . identifier[isclose] ( identifier[point] , identifier[self] . identifier[start] , identifier[rtol] = literal[int] , identifier[atol] = literal[int] ):
keyword[return] literal[int]
keyword[elif] identifier[np] . identifier[isclose] ( identifier[point] , identifier[self] . identifier[end] , identifier[rtol] = literal[int] , identifier[atol] = literal[int] ):
keyword[return] literal[int]
identifier[p] = identifier[self] . identifier[poly] ()
identifier[t] =( identifier[point] - identifier[p] [ literal[int] ])/ identifier[p] [ literal[int] ]
keyword[if] identifier[np] . identifier[isclose] ( identifier[t] . identifier[imag] , literal[int] ) keyword[and] ( identifier[t] . identifier[real] >= literal[int] ) keyword[and] ( identifier[t] . identifier[real] <= literal[int] ):
keyword[return] identifier[t] . identifier[real]
keyword[return] keyword[None] | def point_to_t(self, point):
"""If the point lies on the Line, returns its `t` parameter.
If the point does not lie on the Line, returns None."""
# Single-precision floats have only 7 significant figures of
# resolution, so test that we're within 6 sig figs.
if np.isclose(point, self.start, rtol=0, atol=1e-06):
return 0.0 # depends on [control=['if'], data=[]]
elif np.isclose(point, self.end, rtol=0, atol=1e-06):
return 1.0 # depends on [control=['if'], data=[]]
# Finding the point "by hand" here is much faster than calling
# radialrange(), see the discussion on PR #40:
# https://github.com/mathandy/svgpathtools/pull/40#issuecomment-358134261
p = self.poly()
# p(t) = (p_1 * t) + p_0 = point
# t = (point - p_0) / p_1
t = (point - p[0]) / p[1]
if np.isclose(t.imag, 0) and t.real >= 0.0 and (t.real <= 1.0):
return t.real # depends on [control=['if'], data=[]]
return None |
def _generate_altered_sql_dependencies(self, dep_changed_keys):
"""
Generate forward operations for changing/creating SQL item dependencies.
Dependencies are only in-memory and should be reflecting database dependencies, so
changing them in SQL config does not alter database. Such actions are persisted in separate
type operation - `AlterSQLState`.
Args:
dep_changed_keys (list): Data about keys, that have their dependencies changed.
List of tuples (key, removed depndencies, added_dependencies).
"""
for key, removed_deps, added_deps in dep_changed_keys:
app_label, sql_name = key
operation = AlterSQLState(sql_name, add_dependencies=tuple(added_deps),
remove_dependencies=tuple(removed_deps))
sql_deps = [key]
self.add_sql_operation(app_label, sql_name, operation, sql_deps) | def function[_generate_altered_sql_dependencies, parameter[self, dep_changed_keys]]:
constant[
Generate forward operations for changing/creating SQL item dependencies.
Dependencies are only in-memory and should be reflecting database dependencies, so
changing them in SQL config does not alter database. Such actions are persisted in separate
type operation - `AlterSQLState`.
Args:
dep_changed_keys (list): Data about keys, that have their dependencies changed.
List of tuples (key, removed depndencies, added_dependencies).
]
for taget[tuple[[<ast.Name object at 0x7da2044c0760>, <ast.Name object at 0x7da2044c0160>, <ast.Name object at 0x7da2044c29e0>]]] in starred[name[dep_changed_keys]] begin[:]
<ast.Tuple object at 0x7da2044c16f0> assign[=] name[key]
variable[operation] assign[=] call[name[AlterSQLState], parameter[name[sql_name]]]
variable[sql_deps] assign[=] list[[<ast.Name object at 0x7da2044c3880>]]
call[name[self].add_sql_operation, parameter[name[app_label], name[sql_name], name[operation], name[sql_deps]]] | keyword[def] identifier[_generate_altered_sql_dependencies] ( identifier[self] , identifier[dep_changed_keys] ):
literal[string]
keyword[for] identifier[key] , identifier[removed_deps] , identifier[added_deps] keyword[in] identifier[dep_changed_keys] :
identifier[app_label] , identifier[sql_name] = identifier[key]
identifier[operation] = identifier[AlterSQLState] ( identifier[sql_name] , identifier[add_dependencies] = identifier[tuple] ( identifier[added_deps] ),
identifier[remove_dependencies] = identifier[tuple] ( identifier[removed_deps] ))
identifier[sql_deps] =[ identifier[key] ]
identifier[self] . identifier[add_sql_operation] ( identifier[app_label] , identifier[sql_name] , identifier[operation] , identifier[sql_deps] ) | def _generate_altered_sql_dependencies(self, dep_changed_keys):
"""
Generate forward operations for changing/creating SQL item dependencies.
Dependencies are only in-memory and should be reflecting database dependencies, so
changing them in SQL config does not alter database. Such actions are persisted in separate
type operation - `AlterSQLState`.
Args:
dep_changed_keys (list): Data about keys, that have their dependencies changed.
List of tuples (key, removed depndencies, added_dependencies).
"""
for (key, removed_deps, added_deps) in dep_changed_keys:
(app_label, sql_name) = key
operation = AlterSQLState(sql_name, add_dependencies=tuple(added_deps), remove_dependencies=tuple(removed_deps))
sql_deps = [key]
self.add_sql_operation(app_label, sql_name, operation, sql_deps) # depends on [control=['for'], data=[]] |
def create(cls, amount, counterparty_alias, description,
monetary_account_id=None, attachment=None,
merchant_reference=None, allow_bunqto=None, custom_headers=None):
"""
Create a new Payment.
:type user_id: int
:type monetary_account_id: int
:param amount: The Amount to transfer with the Payment. Must be bigger
than 0 and smaller than the MonetaryAccount's balance.
:type amount: object_.Amount
:param counterparty_alias: The Alias of the party we are transferring
the money to. Can be an Alias of type EMAIL or PHONE_NUMBER (for bunq
MonetaryAccounts or bunq.to payments) or IBAN (for external bank
account).
:type counterparty_alias: object_.Pointer
:param description: The description for the Payment. Maximum 140
characters for Payments to external IBANs, 9000 characters for Payments
to only other bunq MonetaryAccounts. Field is required but can be an
empty string.
:type description: str
:param attachment: The Attachments to attach to the Payment.
:type attachment: list[object_.AttachmentMonetaryAccountPayment]
:param merchant_reference: Optional data to be included with the Payment
specific to the merchant.
:type merchant_reference: str
:param allow_bunqto: Whether or not sending a bunq.to payment is
allowed.
:type allow_bunqto: bool
:type custom_headers: dict[str, str]|None
:rtype: BunqResponseInt
"""
if custom_headers is None:
custom_headers = {}
request_map = {
cls.FIELD_AMOUNT: amount,
cls.FIELD_COUNTERPARTY_ALIAS: counterparty_alias,
cls.FIELD_DESCRIPTION: description,
cls.FIELD_ATTACHMENT: attachment,
cls.FIELD_MERCHANT_REFERENCE: merchant_reference,
cls.FIELD_ALLOW_BUNQTO: allow_bunqto
}
request_map_string = converter.class_to_json(request_map)
request_map_string = cls._remove_field_for_request(request_map_string)
api_client = client.ApiClient(cls._get_api_context())
request_bytes = request_map_string.encode()
endpoint_url = cls._ENDPOINT_URL_CREATE.format(cls._determine_user_id(),
cls._determine_monetary_account_id(
monetary_account_id))
response_raw = api_client.post(endpoint_url, request_bytes,
custom_headers)
return BunqResponseInt.cast_from_bunq_response(
cls._process_for_id(response_raw)
) | def function[create, parameter[cls, amount, counterparty_alias, description, monetary_account_id, attachment, merchant_reference, allow_bunqto, custom_headers]]:
constant[
Create a new Payment.
:type user_id: int
:type monetary_account_id: int
:param amount: The Amount to transfer with the Payment. Must be bigger
than 0 and smaller than the MonetaryAccount's balance.
:type amount: object_.Amount
:param counterparty_alias: The Alias of the party we are transferring
the money to. Can be an Alias of type EMAIL or PHONE_NUMBER (for bunq
MonetaryAccounts or bunq.to payments) or IBAN (for external bank
account).
:type counterparty_alias: object_.Pointer
:param description: The description for the Payment. Maximum 140
characters for Payments to external IBANs, 9000 characters for Payments
to only other bunq MonetaryAccounts. Field is required but can be an
empty string.
:type description: str
:param attachment: The Attachments to attach to the Payment.
:type attachment: list[object_.AttachmentMonetaryAccountPayment]
:param merchant_reference: Optional data to be included with the Payment
specific to the merchant.
:type merchant_reference: str
:param allow_bunqto: Whether or not sending a bunq.to payment is
allowed.
:type allow_bunqto: bool
:type custom_headers: dict[str, str]|None
:rtype: BunqResponseInt
]
if compare[name[custom_headers] is constant[None]] begin[:]
variable[custom_headers] assign[=] dictionary[[], []]
variable[request_map] assign[=] dictionary[[<ast.Attribute object at 0x7da1b07a8a60>, <ast.Attribute object at 0x7da1b07a9450>, <ast.Attribute object at 0x7da1b07a8cd0>, <ast.Attribute object at 0x7da1b07a85b0>, <ast.Attribute object at 0x7da1b07a93f0>, <ast.Attribute object at 0x7da1b07aa980>], [<ast.Name object at 0x7da1b07ab2b0>, <ast.Name object at 0x7da1b07ab970>, <ast.Name object at 0x7da1b07a9150>, <ast.Name object at 0x7da1b07a9fc0>, <ast.Name object at 0x7da1b07a81c0>, <ast.Name object at 0x7da1b07aa4a0>]]
variable[request_map_string] assign[=] call[name[converter].class_to_json, parameter[name[request_map]]]
variable[request_map_string] assign[=] call[name[cls]._remove_field_for_request, parameter[name[request_map_string]]]
variable[api_client] assign[=] call[name[client].ApiClient, parameter[call[name[cls]._get_api_context, parameter[]]]]
variable[request_bytes] assign[=] call[name[request_map_string].encode, parameter[]]
variable[endpoint_url] assign[=] call[name[cls]._ENDPOINT_URL_CREATE.format, parameter[call[name[cls]._determine_user_id, parameter[]], call[name[cls]._determine_monetary_account_id, parameter[name[monetary_account_id]]]]]
variable[response_raw] assign[=] call[name[api_client].post, parameter[name[endpoint_url], name[request_bytes], name[custom_headers]]]
return[call[name[BunqResponseInt].cast_from_bunq_response, parameter[call[name[cls]._process_for_id, parameter[name[response_raw]]]]]] | keyword[def] identifier[create] ( identifier[cls] , identifier[amount] , identifier[counterparty_alias] , identifier[description] ,
identifier[monetary_account_id] = keyword[None] , identifier[attachment] = keyword[None] ,
identifier[merchant_reference] = keyword[None] , identifier[allow_bunqto] = keyword[None] , identifier[custom_headers] = keyword[None] ):
literal[string]
keyword[if] identifier[custom_headers] keyword[is] keyword[None] :
identifier[custom_headers] ={}
identifier[request_map] ={
identifier[cls] . identifier[FIELD_AMOUNT] : identifier[amount] ,
identifier[cls] . identifier[FIELD_COUNTERPARTY_ALIAS] : identifier[counterparty_alias] ,
identifier[cls] . identifier[FIELD_DESCRIPTION] : identifier[description] ,
identifier[cls] . identifier[FIELD_ATTACHMENT] : identifier[attachment] ,
identifier[cls] . identifier[FIELD_MERCHANT_REFERENCE] : identifier[merchant_reference] ,
identifier[cls] . identifier[FIELD_ALLOW_BUNQTO] : identifier[allow_bunqto]
}
identifier[request_map_string] = identifier[converter] . identifier[class_to_json] ( identifier[request_map] )
identifier[request_map_string] = identifier[cls] . identifier[_remove_field_for_request] ( identifier[request_map_string] )
identifier[api_client] = identifier[client] . identifier[ApiClient] ( identifier[cls] . identifier[_get_api_context] ())
identifier[request_bytes] = identifier[request_map_string] . identifier[encode] ()
identifier[endpoint_url] = identifier[cls] . identifier[_ENDPOINT_URL_CREATE] . identifier[format] ( identifier[cls] . identifier[_determine_user_id] (),
identifier[cls] . identifier[_determine_monetary_account_id] (
identifier[monetary_account_id] ))
identifier[response_raw] = identifier[api_client] . identifier[post] ( identifier[endpoint_url] , identifier[request_bytes] ,
identifier[custom_headers] )
keyword[return] identifier[BunqResponseInt] . identifier[cast_from_bunq_response] (
identifier[cls] . identifier[_process_for_id] ( identifier[response_raw] )
) | def create(cls, amount, counterparty_alias, description, monetary_account_id=None, attachment=None, merchant_reference=None, allow_bunqto=None, custom_headers=None):
"""
Create a new Payment.
:type user_id: int
:type monetary_account_id: int
:param amount: The Amount to transfer with the Payment. Must be bigger
than 0 and smaller than the MonetaryAccount's balance.
:type amount: object_.Amount
:param counterparty_alias: The Alias of the party we are transferring
the money to. Can be an Alias of type EMAIL or PHONE_NUMBER (for bunq
MonetaryAccounts or bunq.to payments) or IBAN (for external bank
account).
:type counterparty_alias: object_.Pointer
:param description: The description for the Payment. Maximum 140
characters for Payments to external IBANs, 9000 characters for Payments
to only other bunq MonetaryAccounts. Field is required but can be an
empty string.
:type description: str
:param attachment: The Attachments to attach to the Payment.
:type attachment: list[object_.AttachmentMonetaryAccountPayment]
:param merchant_reference: Optional data to be included with the Payment
specific to the merchant.
:type merchant_reference: str
:param allow_bunqto: Whether or not sending a bunq.to payment is
allowed.
:type allow_bunqto: bool
:type custom_headers: dict[str, str]|None
:rtype: BunqResponseInt
"""
if custom_headers is None:
custom_headers = {} # depends on [control=['if'], data=['custom_headers']]
request_map = {cls.FIELD_AMOUNT: amount, cls.FIELD_COUNTERPARTY_ALIAS: counterparty_alias, cls.FIELD_DESCRIPTION: description, cls.FIELD_ATTACHMENT: attachment, cls.FIELD_MERCHANT_REFERENCE: merchant_reference, cls.FIELD_ALLOW_BUNQTO: allow_bunqto}
request_map_string = converter.class_to_json(request_map)
request_map_string = cls._remove_field_for_request(request_map_string)
api_client = client.ApiClient(cls._get_api_context())
request_bytes = request_map_string.encode()
endpoint_url = cls._ENDPOINT_URL_CREATE.format(cls._determine_user_id(), cls._determine_monetary_account_id(monetary_account_id))
response_raw = api_client.post(endpoint_url, request_bytes, custom_headers)
return BunqResponseInt.cast_from_bunq_response(cls._process_for_id(response_raw)) |
def count_funs(self) -> int:
""" Count function define by this scope """
n = 0
for s in self._hsig.values():
if hasattr(s, 'is_fun') and s.is_fun:
n += 1
return n | def function[count_funs, parameter[self]]:
constant[ Count function define by this scope ]
variable[n] assign[=] constant[0]
for taget[name[s]] in starred[call[name[self]._hsig.values, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da1b013d8a0> begin[:]
<ast.AugAssign object at 0x7da1b013ffd0>
return[name[n]] | keyword[def] identifier[count_funs] ( identifier[self] )-> identifier[int] :
literal[string]
identifier[n] = literal[int]
keyword[for] identifier[s] keyword[in] identifier[self] . identifier[_hsig] . identifier[values] ():
keyword[if] identifier[hasattr] ( identifier[s] , literal[string] ) keyword[and] identifier[s] . identifier[is_fun] :
identifier[n] += literal[int]
keyword[return] identifier[n] | def count_funs(self) -> int:
""" Count function define by this scope """
n = 0
for s in self._hsig.values():
if hasattr(s, 'is_fun') and s.is_fun:
n += 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['s']]
return n |
def insert_data_frame(col, df, int_col=None, binary_col=None, minimal_size=5):
"""Insert ``pandas.DataFrame``.
:param col: :class:`pymongo.collection.Collection` instance.
:param df: :class:`pandas.DataFrame` instance.
:param int_col: list of integer-type column.
:param binary_col: list of binary-type column.
"""
data = transform.to_dict_list_generic_type(df,
int_col=int_col,
binary_col=binary_col)
smart_insert(col, data, minimal_size) | def function[insert_data_frame, parameter[col, df, int_col, binary_col, minimal_size]]:
constant[Insert ``pandas.DataFrame``.
:param col: :class:`pymongo.collection.Collection` instance.
:param df: :class:`pandas.DataFrame` instance.
:param int_col: list of integer-type column.
:param binary_col: list of binary-type column.
]
variable[data] assign[=] call[name[transform].to_dict_list_generic_type, parameter[name[df]]]
call[name[smart_insert], parameter[name[col], name[data], name[minimal_size]]] | keyword[def] identifier[insert_data_frame] ( identifier[col] , identifier[df] , identifier[int_col] = keyword[None] , identifier[binary_col] = keyword[None] , identifier[minimal_size] = literal[int] ):
literal[string]
identifier[data] = identifier[transform] . identifier[to_dict_list_generic_type] ( identifier[df] ,
identifier[int_col] = identifier[int_col] ,
identifier[binary_col] = identifier[binary_col] )
identifier[smart_insert] ( identifier[col] , identifier[data] , identifier[minimal_size] ) | def insert_data_frame(col, df, int_col=None, binary_col=None, minimal_size=5):
"""Insert ``pandas.DataFrame``.
:param col: :class:`pymongo.collection.Collection` instance.
:param df: :class:`pandas.DataFrame` instance.
:param int_col: list of integer-type column.
:param binary_col: list of binary-type column.
"""
data = transform.to_dict_list_generic_type(df, int_col=int_col, binary_col=binary_col)
smart_insert(col, data, minimal_size) |
def movieframe(args):
"""
%prog movieframe tour test.clm contigs.ref.anchors
Draw heatmap and synteny in the same plot.
"""
p = OptionParser(movieframe.__doc__)
p.add_option("--label", help="Figure title")
p.set_beds()
p.set_outfile(outfile=None)
opts, args, iopts = p.set_image_options(args, figsize="16x8",
style="white", cmap="coolwarm",
format="png", dpi=120)
if len(args) != 3:
sys.exit(not p.print_help())
tour, clmfile, anchorsfile = args
tour = tour.split(",")
image_name = opts.outfile or ("movieframe." + iopts.format)
label = opts.label or op.basename(image_name).rsplit(".", 1)[0]
clm = CLMFile(clmfile)
totalbins, bins, breaks = make_bins(tour, clm.tig_to_size)
M = read_clm(clm, totalbins, bins)
fig = plt.figure(1, (iopts.w, iopts.h))
root = fig.add_axes([0, 0, 1, 1]) # whole canvas
ax1 = fig.add_axes([.05, .1, .4, .8]) # heatmap
ax2 = fig.add_axes([.55, .1, .4, .8]) # dot plot
ax2_root = fig.add_axes([.5, 0, .5, 1]) # dot plot canvas
# Left axis: heatmap
plot_heatmap(ax1, M, breaks, iopts)
# Right axis: synteny
qbed, sbed, qorder, sorder, is_self = check_beds(anchorsfile, p, opts,
sorted=False)
dotplot(anchorsfile, qbed, sbed, fig, ax2_root, ax2, sep=False, title="")
root.text(.5, .98, clm.name, color="g", ha="center", va="center")
root.text(.5, .95, label, color="darkslategray", ha="center", va="center")
normalize_axes(root)
savefig(image_name, dpi=iopts.dpi, iopts=iopts) | def function[movieframe, parameter[args]]:
constant[
%prog movieframe tour test.clm contigs.ref.anchors
Draw heatmap and synteny in the same plot.
]
variable[p] assign[=] call[name[OptionParser], parameter[name[movieframe].__doc__]]
call[name[p].add_option, parameter[constant[--label]]]
call[name[p].set_beds, parameter[]]
call[name[p].set_outfile, parameter[]]
<ast.Tuple object at 0x7da2047e9ba0> assign[=] call[name[p].set_image_options, parameter[name[args]]]
if compare[call[name[len], parameter[name[args]]] not_equal[!=] constant[3]] begin[:]
call[name[sys].exit, parameter[<ast.UnaryOp object at 0x7da2047e92d0>]]
<ast.Tuple object at 0x7da2047e8700> assign[=] name[args]
variable[tour] assign[=] call[name[tour].split, parameter[constant[,]]]
variable[image_name] assign[=] <ast.BoolOp object at 0x7da20c6e51b0>
variable[label] assign[=] <ast.BoolOp object at 0x7da18f7223b0>
variable[clm] assign[=] call[name[CLMFile], parameter[name[clmfile]]]
<ast.Tuple object at 0x7da18f721cf0> assign[=] call[name[make_bins], parameter[name[tour], name[clm].tig_to_size]]
variable[M] assign[=] call[name[read_clm], parameter[name[clm], name[totalbins], name[bins]]]
variable[fig] assign[=] call[name[plt].figure, parameter[constant[1], tuple[[<ast.Attribute object at 0x7da18f720250>, <ast.Attribute object at 0x7da18f720f10>]]]]
variable[root] assign[=] call[name[fig].add_axes, parameter[list[[<ast.Constant object at 0x7da18f721bd0>, <ast.Constant object at 0x7da18f722860>, <ast.Constant object at 0x7da18f723550>, <ast.Constant object at 0x7da18f7201c0>]]]]
variable[ax1] assign[=] call[name[fig].add_axes, parameter[list[[<ast.Constant object at 0x7da18f7219c0>, <ast.Constant object at 0x7da18f7211e0>, <ast.Constant object at 0x7da18f7220e0>, <ast.Constant object at 0x7da18f722980>]]]]
variable[ax2] assign[=] call[name[fig].add_axes, parameter[list[[<ast.Constant object at 0x7da18f720e80>, <ast.Constant object at 0x7da18f721750>, <ast.Constant object at 0x7da18f722110>, <ast.Constant object at 0x7da18f721930>]]]]
variable[ax2_root] assign[=] call[name[fig].add_axes, parameter[list[[<ast.Constant object at 0x7da18f722920>, <ast.Constant object at 0x7da18f720ee0>, <ast.Constant object at 0x7da18f721450>, <ast.Constant object at 0x7da18f721540>]]]]
call[name[plot_heatmap], parameter[name[ax1], name[M], name[breaks], name[iopts]]]
<ast.Tuple object at 0x7da18f723400> assign[=] call[name[check_beds], parameter[name[anchorsfile], name[p], name[opts]]]
call[name[dotplot], parameter[name[anchorsfile], name[qbed], name[sbed], name[fig], name[ax2_root], name[ax2]]]
call[name[root].text, parameter[constant[0.5], constant[0.98], name[clm].name]]
call[name[root].text, parameter[constant[0.5], constant[0.95], name[label]]]
call[name[normalize_axes], parameter[name[root]]]
call[name[savefig], parameter[name[image_name]]] | keyword[def] identifier[movieframe] ( identifier[args] ):
literal[string]
identifier[p] = identifier[OptionParser] ( identifier[movieframe] . identifier[__doc__] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[help] = literal[string] )
identifier[p] . identifier[set_beds] ()
identifier[p] . identifier[set_outfile] ( identifier[outfile] = keyword[None] )
identifier[opts] , identifier[args] , identifier[iopts] = identifier[p] . identifier[set_image_options] ( identifier[args] , identifier[figsize] = literal[string] ,
identifier[style] = literal[string] , identifier[cmap] = literal[string] ,
identifier[format] = literal[string] , identifier[dpi] = literal[int] )
keyword[if] identifier[len] ( identifier[args] )!= literal[int] :
identifier[sys] . identifier[exit] ( keyword[not] identifier[p] . identifier[print_help] ())
identifier[tour] , identifier[clmfile] , identifier[anchorsfile] = identifier[args]
identifier[tour] = identifier[tour] . identifier[split] ( literal[string] )
identifier[image_name] = identifier[opts] . identifier[outfile] keyword[or] ( literal[string] + identifier[iopts] . identifier[format] )
identifier[label] = identifier[opts] . identifier[label] keyword[or] identifier[op] . identifier[basename] ( identifier[image_name] ). identifier[rsplit] ( literal[string] , literal[int] )[ literal[int] ]
identifier[clm] = identifier[CLMFile] ( identifier[clmfile] )
identifier[totalbins] , identifier[bins] , identifier[breaks] = identifier[make_bins] ( identifier[tour] , identifier[clm] . identifier[tig_to_size] )
identifier[M] = identifier[read_clm] ( identifier[clm] , identifier[totalbins] , identifier[bins] )
identifier[fig] = identifier[plt] . identifier[figure] ( literal[int] ,( identifier[iopts] . identifier[w] , identifier[iopts] . identifier[h] ))
identifier[root] = identifier[fig] . identifier[add_axes] ([ literal[int] , literal[int] , literal[int] , literal[int] ])
identifier[ax1] = identifier[fig] . identifier[add_axes] ([ literal[int] , literal[int] , literal[int] , literal[int] ])
identifier[ax2] = identifier[fig] . identifier[add_axes] ([ literal[int] , literal[int] , literal[int] , literal[int] ])
identifier[ax2_root] = identifier[fig] . identifier[add_axes] ([ literal[int] , literal[int] , literal[int] , literal[int] ])
identifier[plot_heatmap] ( identifier[ax1] , identifier[M] , identifier[breaks] , identifier[iopts] )
identifier[qbed] , identifier[sbed] , identifier[qorder] , identifier[sorder] , identifier[is_self] = identifier[check_beds] ( identifier[anchorsfile] , identifier[p] , identifier[opts] ,
identifier[sorted] = keyword[False] )
identifier[dotplot] ( identifier[anchorsfile] , identifier[qbed] , identifier[sbed] , identifier[fig] , identifier[ax2_root] , identifier[ax2] , identifier[sep] = keyword[False] , identifier[title] = literal[string] )
identifier[root] . identifier[text] ( literal[int] , literal[int] , identifier[clm] . identifier[name] , identifier[color] = literal[string] , identifier[ha] = literal[string] , identifier[va] = literal[string] )
identifier[root] . identifier[text] ( literal[int] , literal[int] , identifier[label] , identifier[color] = literal[string] , identifier[ha] = literal[string] , identifier[va] = literal[string] )
identifier[normalize_axes] ( identifier[root] )
identifier[savefig] ( identifier[image_name] , identifier[dpi] = identifier[iopts] . identifier[dpi] , identifier[iopts] = identifier[iopts] ) | def movieframe(args):
"""
%prog movieframe tour test.clm contigs.ref.anchors
Draw heatmap and synteny in the same plot.
"""
p = OptionParser(movieframe.__doc__)
p.add_option('--label', help='Figure title')
p.set_beds()
p.set_outfile(outfile=None)
(opts, args, iopts) = p.set_image_options(args, figsize='16x8', style='white', cmap='coolwarm', format='png', dpi=120)
if len(args) != 3:
sys.exit(not p.print_help()) # depends on [control=['if'], data=[]]
(tour, clmfile, anchorsfile) = args
tour = tour.split(',')
image_name = opts.outfile or 'movieframe.' + iopts.format
label = opts.label or op.basename(image_name).rsplit('.', 1)[0]
clm = CLMFile(clmfile)
(totalbins, bins, breaks) = make_bins(tour, clm.tig_to_size)
M = read_clm(clm, totalbins, bins)
fig = plt.figure(1, (iopts.w, iopts.h))
root = fig.add_axes([0, 0, 1, 1]) # whole canvas
ax1 = fig.add_axes([0.05, 0.1, 0.4, 0.8]) # heatmap
ax2 = fig.add_axes([0.55, 0.1, 0.4, 0.8]) # dot plot
ax2_root = fig.add_axes([0.5, 0, 0.5, 1]) # dot plot canvas
# Left axis: heatmap
plot_heatmap(ax1, M, breaks, iopts)
# Right axis: synteny
(qbed, sbed, qorder, sorder, is_self) = check_beds(anchorsfile, p, opts, sorted=False)
dotplot(anchorsfile, qbed, sbed, fig, ax2_root, ax2, sep=False, title='')
root.text(0.5, 0.98, clm.name, color='g', ha='center', va='center')
root.text(0.5, 0.95, label, color='darkslategray', ha='center', va='center')
normalize_axes(root)
savefig(image_name, dpi=iopts.dpi, iopts=iopts) |
def __EncodedAttribute_encode_rgb24(self, rgb24, width=0, height=0):
"""Encode a 24 bit color image (no compression)
:param rgb24: an object containning image information
:type rgb24: :py:obj:`str` or :class:`numpy.ndarray` or seq< seq<element> >
:param width: image width. **MUST** be given if rgb24 is a string or
if it is a :class:`numpy.ndarray` with ndims != 3.
Otherwise it is calculated internally.
:type width: :py:obj:`int`
:param height: image height. **MUST** be given if rgb24 is a string
or if it is a :class:`numpy.ndarray` with ndims != 3.
Otherwise it is calculated internally.
:type height: :py:obj:`int`
.. note::
When :class:`numpy.ndarray` is given:
- rgb24 **MUST** be CONTIGUOUS, ALIGNED
- if rgb24.ndims != 3, width and height **MUST** be given and
rgb24.nbytes/3 **MUST** match width*height
- if rgb24.ndims == 3, rgb24.itemsize **MUST** be 1 (typically,
rgb24.dtype is one of `numpy.dtype.byte`, `numpy.dtype.ubyte`,
`numpy.dtype.int8` or `numpy.dtype.uint8`) and shape **MUST** be
(height, width, 3)
Example::
def read_myattr(self, attr):
enc = tango.EncodedAttribute()
# create an 'image' where each pixel is R=0x01, G=0x01, B=0x01
arr = numpy.ones((10,10,3), dtype=numpy.uint8)
enc.encode_rgb24(data)
attr.set_value(enc)
"""
self._generic_encode_rgb24(rgb24, width=width, height=height, format=_ImageFormat.RawImage) | def function[__EncodedAttribute_encode_rgb24, parameter[self, rgb24, width, height]]:
constant[Encode a 24 bit color image (no compression)
:param rgb24: an object containning image information
:type rgb24: :py:obj:`str` or :class:`numpy.ndarray` or seq< seq<element> >
:param width: image width. **MUST** be given if rgb24 is a string or
if it is a :class:`numpy.ndarray` with ndims != 3.
Otherwise it is calculated internally.
:type width: :py:obj:`int`
:param height: image height. **MUST** be given if rgb24 is a string
or if it is a :class:`numpy.ndarray` with ndims != 3.
Otherwise it is calculated internally.
:type height: :py:obj:`int`
.. note::
When :class:`numpy.ndarray` is given:
- rgb24 **MUST** be CONTIGUOUS, ALIGNED
- if rgb24.ndims != 3, width and height **MUST** be given and
rgb24.nbytes/3 **MUST** match width*height
- if rgb24.ndims == 3, rgb24.itemsize **MUST** be 1 (typically,
rgb24.dtype is one of `numpy.dtype.byte`, `numpy.dtype.ubyte`,
`numpy.dtype.int8` or `numpy.dtype.uint8`) and shape **MUST** be
(height, width, 3)
Example::
def read_myattr(self, attr):
enc = tango.EncodedAttribute()
# create an 'image' where each pixel is R=0x01, G=0x01, B=0x01
arr = numpy.ones((10,10,3), dtype=numpy.uint8)
enc.encode_rgb24(data)
attr.set_value(enc)
]
call[name[self]._generic_encode_rgb24, parameter[name[rgb24]]] | keyword[def] identifier[__EncodedAttribute_encode_rgb24] ( identifier[self] , identifier[rgb24] , identifier[width] = literal[int] , identifier[height] = literal[int] ):
literal[string]
identifier[self] . identifier[_generic_encode_rgb24] ( identifier[rgb24] , identifier[width] = identifier[width] , identifier[height] = identifier[height] , identifier[format] = identifier[_ImageFormat] . identifier[RawImage] ) | def __EncodedAttribute_encode_rgb24(self, rgb24, width=0, height=0):
"""Encode a 24 bit color image (no compression)
:param rgb24: an object containning image information
:type rgb24: :py:obj:`str` or :class:`numpy.ndarray` or seq< seq<element> >
:param width: image width. **MUST** be given if rgb24 is a string or
if it is a :class:`numpy.ndarray` with ndims != 3.
Otherwise it is calculated internally.
:type width: :py:obj:`int`
:param height: image height. **MUST** be given if rgb24 is a string
or if it is a :class:`numpy.ndarray` with ndims != 3.
Otherwise it is calculated internally.
:type height: :py:obj:`int`
.. note::
When :class:`numpy.ndarray` is given:
- rgb24 **MUST** be CONTIGUOUS, ALIGNED
- if rgb24.ndims != 3, width and height **MUST** be given and
rgb24.nbytes/3 **MUST** match width*height
- if rgb24.ndims == 3, rgb24.itemsize **MUST** be 1 (typically,
rgb24.dtype is one of `numpy.dtype.byte`, `numpy.dtype.ubyte`,
`numpy.dtype.int8` or `numpy.dtype.uint8`) and shape **MUST** be
(height, width, 3)
Example::
def read_myattr(self, attr):
enc = tango.EncodedAttribute()
# create an 'image' where each pixel is R=0x01, G=0x01, B=0x01
arr = numpy.ones((10,10,3), dtype=numpy.uint8)
enc.encode_rgb24(data)
attr.set_value(enc)
"""
self._generic_encode_rgb24(rgb24, width=width, height=height, format=_ImageFormat.RawImage) |
def get_api_endpoints(self, apiname):
"""Returns the API endpoints"""
try:
return self.services_by_name\
.get(apiname)\
.get("endpoints")\
.copy()
except AttributeError:
raise Exception(f"Couldn't find the API endpoints") | def function[get_api_endpoints, parameter[self, apiname]]:
constant[Returns the API endpoints]
<ast.Try object at 0x7da18f09e920> | keyword[def] identifier[get_api_endpoints] ( identifier[self] , identifier[apiname] ):
literal[string]
keyword[try] :
keyword[return] identifier[self] . identifier[services_by_name] . identifier[get] ( identifier[apiname] ). identifier[get] ( literal[string] ). identifier[copy] ()
keyword[except] identifier[AttributeError] :
keyword[raise] identifier[Exception] ( literal[string] ) | def get_api_endpoints(self, apiname):
"""Returns the API endpoints"""
try:
return self.services_by_name.get(apiname).get('endpoints').copy() # depends on [control=['try'], data=[]]
except AttributeError:
raise Exception(f"Couldn't find the API endpoints") # depends on [control=['except'], data=[]] |
def _get_consecutive_portions_of_front(front):
"""
Yields lists of the form [(f, s), (f, s)], one at a time from the given front (which is a list of the same form),
such that each list yielded is consecutive in frequency.
"""
last_f = None
ls = []
for f, s in front:
if last_f is not None and f != last_f + 1:
yield ls
ls = []
ls.append((f, s))
last_f = f
yield ls | def function[_get_consecutive_portions_of_front, parameter[front]]:
constant[
Yields lists of the form [(f, s), (f, s)], one at a time from the given front (which is a list of the same form),
such that each list yielded is consecutive in frequency.
]
variable[last_f] assign[=] constant[None]
variable[ls] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b032caf0>, <ast.Name object at 0x7da1b032f250>]]] in starred[name[front]] begin[:]
if <ast.BoolOp object at 0x7da1b032fd90> begin[:]
<ast.Yield object at 0x7da1b032ed40>
variable[ls] assign[=] list[[]]
call[name[ls].append, parameter[tuple[[<ast.Name object at 0x7da1b03965c0>, <ast.Name object at 0x7da1b0395a50>]]]]
variable[last_f] assign[=] name[f]
<ast.Yield object at 0x7da1b03960b0> | keyword[def] identifier[_get_consecutive_portions_of_front] ( identifier[front] ):
literal[string]
identifier[last_f] = keyword[None]
identifier[ls] =[]
keyword[for] identifier[f] , identifier[s] keyword[in] identifier[front] :
keyword[if] identifier[last_f] keyword[is] keyword[not] keyword[None] keyword[and] identifier[f] != identifier[last_f] + literal[int] :
keyword[yield] identifier[ls]
identifier[ls] =[]
identifier[ls] . identifier[append] (( identifier[f] , identifier[s] ))
identifier[last_f] = identifier[f]
keyword[yield] identifier[ls] | def _get_consecutive_portions_of_front(front):
"""
Yields lists of the form [(f, s), (f, s)], one at a time from the given front (which is a list of the same form),
such that each list yielded is consecutive in frequency.
"""
last_f = None
ls = []
for (f, s) in front:
if last_f is not None and f != last_f + 1:
yield ls
ls = [] # depends on [control=['if'], data=[]]
ls.append((f, s))
last_f = f # depends on [control=['for'], data=[]]
yield ls |
def wrap(self, word, width, hyphen='-'):
"""
Return the longest possible first part and the last part of the
hyphenated word. The first part has the hyphen already attached.
Returns None, if there is no hyphenation point before width, or
if the word could not be hyphenated.
"""
width -= len(hyphen)
for w1, w2 in self.iterate(word):
if len(w1) <= width:
return w1 + hyphen, w2 | def function[wrap, parameter[self, word, width, hyphen]]:
constant[
Return the longest possible first part and the last part of the
hyphenated word. The first part has the hyphen already attached.
Returns None, if there is no hyphenation point before width, or
if the word could not be hyphenated.
]
<ast.AugAssign object at 0x7da18f58c730>
for taget[tuple[[<ast.Name object at 0x7da204345e70>, <ast.Name object at 0x7da204345900>]]] in starred[call[name[self].iterate, parameter[name[word]]]] begin[:]
if compare[call[name[len], parameter[name[w1]]] less_or_equal[<=] name[width]] begin[:]
return[tuple[[<ast.BinOp object at 0x7da2043469b0>, <ast.Name object at 0x7da204347940>]]] | keyword[def] identifier[wrap] ( identifier[self] , identifier[word] , identifier[width] , identifier[hyphen] = literal[string] ):
literal[string]
identifier[width] -= identifier[len] ( identifier[hyphen] )
keyword[for] identifier[w1] , identifier[w2] keyword[in] identifier[self] . identifier[iterate] ( identifier[word] ):
keyword[if] identifier[len] ( identifier[w1] )<= identifier[width] :
keyword[return] identifier[w1] + identifier[hyphen] , identifier[w2] | def wrap(self, word, width, hyphen='-'):
"""
Return the longest possible first part and the last part of the
hyphenated word. The first part has the hyphen already attached.
Returns None, if there is no hyphenation point before width, or
if the word could not be hyphenated.
"""
width -= len(hyphen)
for (w1, w2) in self.iterate(word):
if len(w1) <= width:
return (w1 + hyphen, w2) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] |
def get_response_content_type(self):
"""Figure out what content type will be used in the response."""
if self._best_response_match is None:
settings = get_settings(self.application, force_instance=True)
acceptable = headers.parse_accept(
self.request.headers.get(
'Accept',
settings.default_content_type
if settings.default_content_type else '*/*'))
try:
selected, _ = algorithms.select_content_type(
acceptable, settings.available_content_types)
self._best_response_match = '/'.join(
[selected.content_type, selected.content_subtype])
if selected.content_suffix is not None:
self._best_response_match = '+'.join(
[self._best_response_match, selected.content_suffix])
except errors.NoMatch:
self._best_response_match = settings.default_content_type
return self._best_response_match | def function[get_response_content_type, parameter[self]]:
constant[Figure out what content type will be used in the response.]
if compare[name[self]._best_response_match is constant[None]] begin[:]
variable[settings] assign[=] call[name[get_settings], parameter[name[self].application]]
variable[acceptable] assign[=] call[name[headers].parse_accept, parameter[call[name[self].request.headers.get, parameter[constant[Accept], <ast.IfExp object at 0x7da20e74b2b0>]]]]
<ast.Try object at 0x7da20e748820>
return[name[self]._best_response_match] | keyword[def] identifier[get_response_content_type] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_best_response_match] keyword[is] keyword[None] :
identifier[settings] = identifier[get_settings] ( identifier[self] . identifier[application] , identifier[force_instance] = keyword[True] )
identifier[acceptable] = identifier[headers] . identifier[parse_accept] (
identifier[self] . identifier[request] . identifier[headers] . identifier[get] (
literal[string] ,
identifier[settings] . identifier[default_content_type]
keyword[if] identifier[settings] . identifier[default_content_type] keyword[else] literal[string] ))
keyword[try] :
identifier[selected] , identifier[_] = identifier[algorithms] . identifier[select_content_type] (
identifier[acceptable] , identifier[settings] . identifier[available_content_types] )
identifier[self] . identifier[_best_response_match] = literal[string] . identifier[join] (
[ identifier[selected] . identifier[content_type] , identifier[selected] . identifier[content_subtype] ])
keyword[if] identifier[selected] . identifier[content_suffix] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[_best_response_match] = literal[string] . identifier[join] (
[ identifier[self] . identifier[_best_response_match] , identifier[selected] . identifier[content_suffix] ])
keyword[except] identifier[errors] . identifier[NoMatch] :
identifier[self] . identifier[_best_response_match] = identifier[settings] . identifier[default_content_type]
keyword[return] identifier[self] . identifier[_best_response_match] | def get_response_content_type(self):
"""Figure out what content type will be used in the response."""
if self._best_response_match is None:
settings = get_settings(self.application, force_instance=True)
acceptable = headers.parse_accept(self.request.headers.get('Accept', settings.default_content_type if settings.default_content_type else '*/*'))
try:
(selected, _) = algorithms.select_content_type(acceptable, settings.available_content_types)
self._best_response_match = '/'.join([selected.content_type, selected.content_subtype])
if selected.content_suffix is not None:
self._best_response_match = '+'.join([self._best_response_match, selected.content_suffix]) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except errors.NoMatch:
self._best_response_match = settings.default_content_type # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
return self._best_response_match |
def conv2d(self, filter_size, output_channels, stride=1, padding='SAME', stoch=None, bn=True, test=False, activation_fn=tf.nn.relu, b_value=0.0, s_value=1.0):
"""
2D Convolutional Layer.
:param filter_size: int. assumes square filter
:param output_channels: int
:param stride: int
:param padding: 'VALID' or 'SAME'
:param activation_fn: tf.nn function
:param b_value: float
:param s_value: float
"""
self.count['conv'] += 1
scope = 'conv_' + str(self.count['conv'])
with tf.variable_scope(scope):
# Conv function
input_channels = self.input.get_shape()[3]
if filter_size == 0: # outputs a 1x1 feature map; used for FCN
filter_size = self.input.get_shape()[2]
padding = 'VALID'
output_shape = [filter_size, filter_size, input_channels, output_channels]
w = self.weight_variable(name='weights', shape=output_shape)
self.input = tf.nn.conv2d(self.input, w, strides=[1, stride, stride, 1], padding=padding)
# Additional functions
if stoch is not None: # Draw feature map values from a normal distribution
if stoch == 'N': # Normal
output_shape = [3, 3, output_channels, 1]
w2 = self.weight_variable(name='weights_mean', shape=output_shape)
mean = tf.nn.conv2d(self.input, w2, strides=[1, 1, 1, 1], padding=padding)
w3 = self.weight_variable(name='weights_std', shape=output_shape)
std = tf.nn.conv2d(self.input, w3, strides=[1, 1, 1, 1], padding=padding)
map_size = tf.pack([mean.get_shape()[1], mean.get_shape()[2]])
z = mean + tf.random_normal(map_size) * std
if stoch == 'B': # Bernoulli
mean = 0
map_size = tf.pack([mean.get_shape()[1], mean.get_shape()[2]])
with tf.variable_scope("stoch"):
output_shape = tf.pack([self.input.get_shape()[1], self.input.get_shape()[2], 1, 1])
w3 = self.weight_variable(name='weights_refinement', shape=output_shape)
self.input = self.input + z * w3
if bn is True: # batch normalization
self.input = self.batch_norm(self.input)
if b_value is not None: # bias value
b = self.const_variable(name='bias', shape=[output_channels], value=b_value)
self.input = tf.add(self.input, b)
if s_value is not None: # scale value
s = self.const_variable(name='scale', shape=[output_channels], value=s_value)
self.input = tf.multiply(self.input, s)
if activation_fn is not None: # activation function
self.input = activation_fn(self.input)
self.print_log(scope + ' output: ' + str(self.input.get_shape())) | def function[conv2d, parameter[self, filter_size, output_channels, stride, padding, stoch, bn, test, activation_fn, b_value, s_value]]:
constant[
2D Convolutional Layer.
:param filter_size: int. assumes square filter
:param output_channels: int
:param stride: int
:param padding: 'VALID' or 'SAME'
:param activation_fn: tf.nn function
:param b_value: float
:param s_value: float
]
<ast.AugAssign object at 0x7da18eb540a0>
variable[scope] assign[=] binary_operation[constant[conv_] + call[name[str], parameter[call[name[self].count][constant[conv]]]]]
with call[name[tf].variable_scope, parameter[name[scope]]] begin[:]
variable[input_channels] assign[=] call[call[name[self].input.get_shape, parameter[]]][constant[3]]
if compare[name[filter_size] equal[==] constant[0]] begin[:]
variable[filter_size] assign[=] call[call[name[self].input.get_shape, parameter[]]][constant[2]]
variable[padding] assign[=] constant[VALID]
variable[output_shape] assign[=] list[[<ast.Name object at 0x7da18eb55540>, <ast.Name object at 0x7da18eb56fe0>, <ast.Name object at 0x7da18eb55f60>, <ast.Name object at 0x7da18eb56d10>]]
variable[w] assign[=] call[name[self].weight_variable, parameter[]]
name[self].input assign[=] call[name[tf].nn.conv2d, parameter[name[self].input, name[w]]]
if compare[name[stoch] is_not constant[None]] begin[:]
if compare[name[stoch] equal[==] constant[N]] begin[:]
variable[output_shape] assign[=] list[[<ast.Constant object at 0x7da18eb57430>, <ast.Constant object at 0x7da18eb55b40>, <ast.Name object at 0x7da18eb576a0>, <ast.Constant object at 0x7da18eb57a30>]]
variable[w2] assign[=] call[name[self].weight_variable, parameter[]]
variable[mean] assign[=] call[name[tf].nn.conv2d, parameter[name[self].input, name[w2]]]
variable[w3] assign[=] call[name[self].weight_variable, parameter[]]
variable[std] assign[=] call[name[tf].nn.conv2d, parameter[name[self].input, name[w3]]]
variable[map_size] assign[=] call[name[tf].pack, parameter[list[[<ast.Subscript object at 0x7da2049626b0>, <ast.Subscript object at 0x7da204962da0>]]]]
variable[z] assign[=] binary_operation[name[mean] + binary_operation[call[name[tf].random_normal, parameter[name[map_size]]] * name[std]]]
if compare[name[stoch] equal[==] constant[B]] begin[:]
variable[mean] assign[=] constant[0]
variable[map_size] assign[=] call[name[tf].pack, parameter[list[[<ast.Subscript object at 0x7da18dc986d0>, <ast.Subscript object at 0x7da18dc9b6d0>]]]]
with call[name[tf].variable_scope, parameter[constant[stoch]]] begin[:]
variable[output_shape] assign[=] call[name[tf].pack, parameter[list[[<ast.Subscript object at 0x7da18dc9b970>, <ast.Subscript object at 0x7da18dc9b910>, <ast.Constant object at 0x7da18dc99540>, <ast.Constant object at 0x7da18dc98c40>]]]]
variable[w3] assign[=] call[name[self].weight_variable, parameter[]]
name[self].input assign[=] binary_operation[name[self].input + binary_operation[name[z] * name[w3]]]
if compare[name[bn] is constant[True]] begin[:]
name[self].input assign[=] call[name[self].batch_norm, parameter[name[self].input]]
if compare[name[b_value] is_not constant[None]] begin[:]
variable[b] assign[=] call[name[self].const_variable, parameter[]]
name[self].input assign[=] call[name[tf].add, parameter[name[self].input, name[b]]]
if compare[name[s_value] is_not constant[None]] begin[:]
variable[s] assign[=] call[name[self].const_variable, parameter[]]
name[self].input assign[=] call[name[tf].multiply, parameter[name[self].input, name[s]]]
if compare[name[activation_fn] is_not constant[None]] begin[:]
name[self].input assign[=] call[name[activation_fn], parameter[name[self].input]]
call[name[self].print_log, parameter[binary_operation[binary_operation[name[scope] + constant[ output: ]] + call[name[str], parameter[call[name[self].input.get_shape, parameter[]]]]]]] | keyword[def] identifier[conv2d] ( identifier[self] , identifier[filter_size] , identifier[output_channels] , identifier[stride] = literal[int] , identifier[padding] = literal[string] , identifier[stoch] = keyword[None] , identifier[bn] = keyword[True] , identifier[test] = keyword[False] , identifier[activation_fn] = identifier[tf] . identifier[nn] . identifier[relu] , identifier[b_value] = literal[int] , identifier[s_value] = literal[int] ):
literal[string]
identifier[self] . identifier[count] [ literal[string] ]+= literal[int]
identifier[scope] = literal[string] + identifier[str] ( identifier[self] . identifier[count] [ literal[string] ])
keyword[with] identifier[tf] . identifier[variable_scope] ( identifier[scope] ):
identifier[input_channels] = identifier[self] . identifier[input] . identifier[get_shape] ()[ literal[int] ]
keyword[if] identifier[filter_size] == literal[int] :
identifier[filter_size] = identifier[self] . identifier[input] . identifier[get_shape] ()[ literal[int] ]
identifier[padding] = literal[string]
identifier[output_shape] =[ identifier[filter_size] , identifier[filter_size] , identifier[input_channels] , identifier[output_channels] ]
identifier[w] = identifier[self] . identifier[weight_variable] ( identifier[name] = literal[string] , identifier[shape] = identifier[output_shape] )
identifier[self] . identifier[input] = identifier[tf] . identifier[nn] . identifier[conv2d] ( identifier[self] . identifier[input] , identifier[w] , identifier[strides] =[ literal[int] , identifier[stride] , identifier[stride] , literal[int] ], identifier[padding] = identifier[padding] )
keyword[if] identifier[stoch] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[stoch] == literal[string] :
identifier[output_shape] =[ literal[int] , literal[int] , identifier[output_channels] , literal[int] ]
identifier[w2] = identifier[self] . identifier[weight_variable] ( identifier[name] = literal[string] , identifier[shape] = identifier[output_shape] )
identifier[mean] = identifier[tf] . identifier[nn] . identifier[conv2d] ( identifier[self] . identifier[input] , identifier[w2] , identifier[strides] =[ literal[int] , literal[int] , literal[int] , literal[int] ], identifier[padding] = identifier[padding] )
identifier[w3] = identifier[self] . identifier[weight_variable] ( identifier[name] = literal[string] , identifier[shape] = identifier[output_shape] )
identifier[std] = identifier[tf] . identifier[nn] . identifier[conv2d] ( identifier[self] . identifier[input] , identifier[w3] , identifier[strides] =[ literal[int] , literal[int] , literal[int] , literal[int] ], identifier[padding] = identifier[padding] )
identifier[map_size] = identifier[tf] . identifier[pack] ([ identifier[mean] . identifier[get_shape] ()[ literal[int] ], identifier[mean] . identifier[get_shape] ()[ literal[int] ]])
identifier[z] = identifier[mean] + identifier[tf] . identifier[random_normal] ( identifier[map_size] )* identifier[std]
keyword[if] identifier[stoch] == literal[string] :
identifier[mean] = literal[int]
identifier[map_size] = identifier[tf] . identifier[pack] ([ identifier[mean] . identifier[get_shape] ()[ literal[int] ], identifier[mean] . identifier[get_shape] ()[ literal[int] ]])
keyword[with] identifier[tf] . identifier[variable_scope] ( literal[string] ):
identifier[output_shape] = identifier[tf] . identifier[pack] ([ identifier[self] . identifier[input] . identifier[get_shape] ()[ literal[int] ], identifier[self] . identifier[input] . identifier[get_shape] ()[ literal[int] ], literal[int] , literal[int] ])
identifier[w3] = identifier[self] . identifier[weight_variable] ( identifier[name] = literal[string] , identifier[shape] = identifier[output_shape] )
identifier[self] . identifier[input] = identifier[self] . identifier[input] + identifier[z] * identifier[w3]
keyword[if] identifier[bn] keyword[is] keyword[True] :
identifier[self] . identifier[input] = identifier[self] . identifier[batch_norm] ( identifier[self] . identifier[input] )
keyword[if] identifier[b_value] keyword[is] keyword[not] keyword[None] :
identifier[b] = identifier[self] . identifier[const_variable] ( identifier[name] = literal[string] , identifier[shape] =[ identifier[output_channels] ], identifier[value] = identifier[b_value] )
identifier[self] . identifier[input] = identifier[tf] . identifier[add] ( identifier[self] . identifier[input] , identifier[b] )
keyword[if] identifier[s_value] keyword[is] keyword[not] keyword[None] :
identifier[s] = identifier[self] . identifier[const_variable] ( identifier[name] = literal[string] , identifier[shape] =[ identifier[output_channels] ], identifier[value] = identifier[s_value] )
identifier[self] . identifier[input] = identifier[tf] . identifier[multiply] ( identifier[self] . identifier[input] , identifier[s] )
keyword[if] identifier[activation_fn] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[input] = identifier[activation_fn] ( identifier[self] . identifier[input] )
identifier[self] . identifier[print_log] ( identifier[scope] + literal[string] + identifier[str] ( identifier[self] . identifier[input] . identifier[get_shape] ())) | def conv2d(self, filter_size, output_channels, stride=1, padding='SAME', stoch=None, bn=True, test=False, activation_fn=tf.nn.relu, b_value=0.0, s_value=1.0):
"""
2D Convolutional Layer.
:param filter_size: int. assumes square filter
:param output_channels: int
:param stride: int
:param padding: 'VALID' or 'SAME'
:param activation_fn: tf.nn function
:param b_value: float
:param s_value: float
"""
self.count['conv'] += 1
scope = 'conv_' + str(self.count['conv'])
with tf.variable_scope(scope):
# Conv function
input_channels = self.input.get_shape()[3]
if filter_size == 0: # outputs a 1x1 feature map; used for FCN
filter_size = self.input.get_shape()[2]
padding = 'VALID' # depends on [control=['if'], data=['filter_size']]
output_shape = [filter_size, filter_size, input_channels, output_channels]
w = self.weight_variable(name='weights', shape=output_shape)
self.input = tf.nn.conv2d(self.input, w, strides=[1, stride, stride, 1], padding=padding)
# Additional functions
if stoch is not None: # Draw feature map values from a normal distribution
if stoch == 'N': # Normal
output_shape = [3, 3, output_channels, 1]
w2 = self.weight_variable(name='weights_mean', shape=output_shape)
mean = tf.nn.conv2d(self.input, w2, strides=[1, 1, 1, 1], padding=padding)
w3 = self.weight_variable(name='weights_std', shape=output_shape)
std = tf.nn.conv2d(self.input, w3, strides=[1, 1, 1, 1], padding=padding)
map_size = tf.pack([mean.get_shape()[1], mean.get_shape()[2]])
z = mean + tf.random_normal(map_size) * std # depends on [control=['if'], data=[]]
if stoch == 'B': # Bernoulli
mean = 0
map_size = tf.pack([mean.get_shape()[1], mean.get_shape()[2]]) # depends on [control=['if'], data=[]]
with tf.variable_scope('stoch'):
output_shape = tf.pack([self.input.get_shape()[1], self.input.get_shape()[2], 1, 1])
w3 = self.weight_variable(name='weights_refinement', shape=output_shape)
self.input = self.input + z * w3 # depends on [control=['with'], data=[]] # depends on [control=['if'], data=['stoch']]
if bn is True: # batch normalization
self.input = self.batch_norm(self.input) # depends on [control=['if'], data=[]]
if b_value is not None: # bias value
b = self.const_variable(name='bias', shape=[output_channels], value=b_value)
self.input = tf.add(self.input, b) # depends on [control=['if'], data=['b_value']]
if s_value is not None: # scale value
s = self.const_variable(name='scale', shape=[output_channels], value=s_value)
self.input = tf.multiply(self.input, s) # depends on [control=['if'], data=['s_value']]
if activation_fn is not None: # activation function
self.input = activation_fn(self.input) # depends on [control=['if'], data=['activation_fn']] # depends on [control=['with'], data=[]]
self.print_log(scope + ' output: ' + str(self.input.get_shape())) |
def s3_cache_timeout(self):
"""
The socket timeout in seconds for connections to Amazon S3 (an integer).
This value is injected into Boto's configuration to override the
default socket timeout used for connections to Amazon S3.
- Environment variable: ``$PIP_ACCEL_S3_TIMEOUT``
- Configuration option: ``s3-timeout``
- Default: ``60`` (`Boto's default`_)
.. _Boto's default: http://boto.readthedocs.org/en/latest/boto_config_tut.html
"""
value = self.get(property_name='s3_cache_timeout',
environment_variable='PIP_ACCEL_S3_TIMEOUT',
configuration_option='s3-timeout')
try:
n = int(value)
if n >= 0:
return n
except:
return 60 | def function[s3_cache_timeout, parameter[self]]:
constant[
The socket timeout in seconds for connections to Amazon S3 (an integer).
This value is injected into Boto's configuration to override the
default socket timeout used for connections to Amazon S3.
- Environment variable: ``$PIP_ACCEL_S3_TIMEOUT``
- Configuration option: ``s3-timeout``
- Default: ``60`` (`Boto's default`_)
.. _Boto's default: http://boto.readthedocs.org/en/latest/boto_config_tut.html
]
variable[value] assign[=] call[name[self].get, parameter[]]
<ast.Try object at 0x7da1b0476f20> | keyword[def] identifier[s3_cache_timeout] ( identifier[self] ):
literal[string]
identifier[value] = identifier[self] . identifier[get] ( identifier[property_name] = literal[string] ,
identifier[environment_variable] = literal[string] ,
identifier[configuration_option] = literal[string] )
keyword[try] :
identifier[n] = identifier[int] ( identifier[value] )
keyword[if] identifier[n] >= literal[int] :
keyword[return] identifier[n]
keyword[except] :
keyword[return] literal[int] | def s3_cache_timeout(self):
"""
The socket timeout in seconds for connections to Amazon S3 (an integer).
This value is injected into Boto's configuration to override the
default socket timeout used for connections to Amazon S3.
- Environment variable: ``$PIP_ACCEL_S3_TIMEOUT``
- Configuration option: ``s3-timeout``
- Default: ``60`` (`Boto's default`_)
.. _Boto's default: http://boto.readthedocs.org/en/latest/boto_config_tut.html
"""
value = self.get(property_name='s3_cache_timeout', environment_variable='PIP_ACCEL_S3_TIMEOUT', configuration_option='s3-timeout')
try:
n = int(value)
if n >= 0:
return n # depends on [control=['if'], data=['n']] # depends on [control=['try'], data=[]]
except:
return 60 # depends on [control=['except'], data=[]] |
def get_datetime_issue_in_progress(self, issue):
"""
If the issue is in progress, gets that most recent time that the issue became 'In Progress'
"""
histories = issue.changelog.histories
for history in reversed(histories):
history_items = history.items
for item in history_items:
if item.field == 'status' and item.toString == "In Progress":
return dateutil.parser.parse(history.created) | def function[get_datetime_issue_in_progress, parameter[self, issue]]:
constant[
If the issue is in progress, gets that most recent time that the issue became 'In Progress'
]
variable[histories] assign[=] name[issue].changelog.histories
for taget[name[history]] in starred[call[name[reversed], parameter[name[histories]]]] begin[:]
variable[history_items] assign[=] name[history].items
for taget[name[item]] in starred[name[history_items]] begin[:]
if <ast.BoolOp object at 0x7da1b1606260> begin[:]
return[call[name[dateutil].parser.parse, parameter[name[history].created]]] | keyword[def] identifier[get_datetime_issue_in_progress] ( identifier[self] , identifier[issue] ):
literal[string]
identifier[histories] = identifier[issue] . identifier[changelog] . identifier[histories]
keyword[for] identifier[history] keyword[in] identifier[reversed] ( identifier[histories] ):
identifier[history_items] = identifier[history] . identifier[items]
keyword[for] identifier[item] keyword[in] identifier[history_items] :
keyword[if] identifier[item] . identifier[field] == literal[string] keyword[and] identifier[item] . identifier[toString] == literal[string] :
keyword[return] identifier[dateutil] . identifier[parser] . identifier[parse] ( identifier[history] . identifier[created] ) | def get_datetime_issue_in_progress(self, issue):
"""
If the issue is in progress, gets that most recent time that the issue became 'In Progress'
"""
histories = issue.changelog.histories
for history in reversed(histories):
history_items = history.items
for item in history_items:
if item.field == 'status' and item.toString == 'In Progress':
return dateutil.parser.parse(history.created) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']] # depends on [control=['for'], data=['history']] |
def _set_if_type(self, v, load=False):
"""
Setter method for if_type, mapped from YANG variable /mpls_state/dynamic_bypass/dynamic_bypass_interface/if_type (mpls-if-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_if_type is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_if_type() directly.
YANG Description: Interface type
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'loopback-interface': {'value': 7}, u'ethernet-interface': {'value': 2}, u'port-channel-interface': {'value': 5}, u'unknown-interface': {'value': 1}, u've-interface': {'value': 6}, u'fbr-channel-interface': {'value': 8}},), is_leaf=True, yang_name="if-type", rest_name="if-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='mpls-if-type', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """if_type must be of a type compatible with mpls-if-type""",
'defined-type': "brocade-mpls-operational:mpls-if-type",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'loopback-interface': {'value': 7}, u'ethernet-interface': {'value': 2}, u'port-channel-interface': {'value': 5}, u'unknown-interface': {'value': 1}, u've-interface': {'value': 6}, u'fbr-channel-interface': {'value': 8}},), is_leaf=True, yang_name="if-type", rest_name="if-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='mpls-if-type', is_config=False)""",
})
self.__if_type = t
if hasattr(self, '_set'):
self._set() | def function[_set_if_type, parameter[self, v, load]]:
constant[
Setter method for if_type, mapped from YANG variable /mpls_state/dynamic_bypass/dynamic_bypass_interface/if_type (mpls-if-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_if_type is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_if_type() directly.
YANG Description: Interface type
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da18bcc8580>
name[self].__if_type assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]] | keyword[def] identifier[_set_if_type] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[RestrictedClassType] ( identifier[base_type] = identifier[unicode] , identifier[restriction_type] = literal[string] , identifier[restriction_arg] ={ literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }},), identifier[is_leaf] = keyword[True] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[False] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__if_type] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] () | def _set_if_type(self, v, load=False):
"""
Setter method for if_type, mapped from YANG variable /mpls_state/dynamic_bypass/dynamic_bypass_interface/if_type (mpls-if-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_if_type is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_if_type() directly.
YANG Description: Interface type
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=RestrictedClassType(base_type=unicode, restriction_type='dict_key', restriction_arg={u'loopback-interface': {'value': 7}, u'ethernet-interface': {'value': 2}, u'port-channel-interface': {'value': 5}, u'unknown-interface': {'value': 1}, u've-interface': {'value': 6}, u'fbr-channel-interface': {'value': 8}}), is_leaf=True, yang_name='if-type', rest_name='if-type', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='mpls-if-type', is_config=False) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'if_type must be of a type compatible with mpls-if-type', 'defined-type': 'brocade-mpls-operational:mpls-if-type', 'generated-type': 'YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u\'loopback-interface\': {\'value\': 7}, u\'ethernet-interface\': {\'value\': 2}, u\'port-channel-interface\': {\'value\': 5}, u\'unknown-interface\': {\'value\': 1}, u\'ve-interface\': {\'value\': 6}, u\'fbr-channel-interface\': {\'value\': 8}},), is_leaf=True, yang_name="if-type", rest_name="if-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace=\'urn:brocade.com:mgmt:brocade-mpls-operational\', defining_module=\'brocade-mpls-operational\', yang_type=\'mpls-if-type\', is_config=False)'}) # depends on [control=['except'], data=[]]
self.__if_type = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]] |
def use(network=False):
"""
Creates a new isolated mock engine to be used via context manager.
Example::
with pook.use() as engine:
pook.mock('server.com/foo').reply(404)
res = requests.get('server.com/foo')
assert res.status_code == 404
"""
global _engine
# Create temporal engine
__engine = _engine
activated = __engine.active
if activated:
__engine.disable()
_engine = Engine(network=network)
_engine.activate()
# Yield enfine to be used by the context manager
yield _engine
# Restore engine state
_engine.disable()
if network:
_engine.disable_network()
# Restore previous engine
_engine = __engine
if activated:
_engine.activate() | def function[use, parameter[network]]:
constant[
Creates a new isolated mock engine to be used via context manager.
Example::
with pook.use() as engine:
pook.mock('server.com/foo').reply(404)
res = requests.get('server.com/foo')
assert res.status_code == 404
]
<ast.Global object at 0x7da1b02a4400>
variable[__engine] assign[=] name[_engine]
variable[activated] assign[=] name[__engine].active
if name[activated] begin[:]
call[name[__engine].disable, parameter[]]
variable[_engine] assign[=] call[name[Engine], parameter[]]
call[name[_engine].activate, parameter[]]
<ast.Yield object at 0x7da1b03e3250>
call[name[_engine].disable, parameter[]]
if name[network] begin[:]
call[name[_engine].disable_network, parameter[]]
variable[_engine] assign[=] name[__engine]
if name[activated] begin[:]
call[name[_engine].activate, parameter[]] | keyword[def] identifier[use] ( identifier[network] = keyword[False] ):
literal[string]
keyword[global] identifier[_engine]
identifier[__engine] = identifier[_engine]
identifier[activated] = identifier[__engine] . identifier[active]
keyword[if] identifier[activated] :
identifier[__engine] . identifier[disable] ()
identifier[_engine] = identifier[Engine] ( identifier[network] = identifier[network] )
identifier[_engine] . identifier[activate] ()
keyword[yield] identifier[_engine]
identifier[_engine] . identifier[disable] ()
keyword[if] identifier[network] :
identifier[_engine] . identifier[disable_network] ()
identifier[_engine] = identifier[__engine]
keyword[if] identifier[activated] :
identifier[_engine] . identifier[activate] () | def use(network=False):
"""
Creates a new isolated mock engine to be used via context manager.
Example::
with pook.use() as engine:
pook.mock('server.com/foo').reply(404)
res = requests.get('server.com/foo')
assert res.status_code == 404
"""
global _engine
# Create temporal engine
__engine = _engine
activated = __engine.active
if activated:
__engine.disable() # depends on [control=['if'], data=[]]
_engine = Engine(network=network)
_engine.activate()
# Yield enfine to be used by the context manager
yield _engine
# Restore engine state
_engine.disable()
if network:
_engine.disable_network() # depends on [control=['if'], data=[]]
# Restore previous engine
_engine = __engine
if activated:
_engine.activate() # depends on [control=['if'], data=[]] |
def inet_ntop(family, address):
"""Convert the binary form of a network address into its textual form.
@param family: the address family
@type family: int
@param address: the binary address
@type address: string
@raises NotImplementedError: the address family specified is not
implemented.
@rtype: string
"""
if family == AF_INET:
return dns.ipv4.inet_ntoa(address)
elif family == AF_INET6:
return dns.ipv6.inet_ntoa(address)
else:
raise NotImplementedError | def function[inet_ntop, parameter[family, address]]:
constant[Convert the binary form of a network address into its textual form.
@param family: the address family
@type family: int
@param address: the binary address
@type address: string
@raises NotImplementedError: the address family specified is not
implemented.
@rtype: string
]
if compare[name[family] equal[==] name[AF_INET]] begin[:]
return[call[name[dns].ipv4.inet_ntoa, parameter[name[address]]]] | keyword[def] identifier[inet_ntop] ( identifier[family] , identifier[address] ):
literal[string]
keyword[if] identifier[family] == identifier[AF_INET] :
keyword[return] identifier[dns] . identifier[ipv4] . identifier[inet_ntoa] ( identifier[address] )
keyword[elif] identifier[family] == identifier[AF_INET6] :
keyword[return] identifier[dns] . identifier[ipv6] . identifier[inet_ntoa] ( identifier[address] )
keyword[else] :
keyword[raise] identifier[NotImplementedError] | def inet_ntop(family, address):
"""Convert the binary form of a network address into its textual form.
@param family: the address family
@type family: int
@param address: the binary address
@type address: string
@raises NotImplementedError: the address family specified is not
implemented.
@rtype: string
"""
if family == AF_INET:
return dns.ipv4.inet_ntoa(address) # depends on [control=['if'], data=[]]
elif family == AF_INET6:
return dns.ipv6.inet_ntoa(address) # depends on [control=['if'], data=[]]
else:
raise NotImplementedError |
def cascade_delete(self, name):
"this fails under diamond inheritance"
for child in self[name].child_tables:
self.cascade_delete(child.name)
del self[name] | def function[cascade_delete, parameter[self, name]]:
constant[this fails under diamond inheritance]
for taget[name[child]] in starred[call[name[self]][name[name]].child_tables] begin[:]
call[name[self].cascade_delete, parameter[name[child].name]]
<ast.Delete object at 0x7da1b158b0d0> | keyword[def] identifier[cascade_delete] ( identifier[self] , identifier[name] ):
literal[string]
keyword[for] identifier[child] keyword[in] identifier[self] [ identifier[name] ]. identifier[child_tables] :
identifier[self] . identifier[cascade_delete] ( identifier[child] . identifier[name] )
keyword[del] identifier[self] [ identifier[name] ] | def cascade_delete(self, name):
"""this fails under diamond inheritance"""
for child in self[name].child_tables:
self.cascade_delete(child.name) # depends on [control=['for'], data=['child']]
del self[name] |
def get_framebuffer_size(window):
"""
Retrieves the size of the framebuffer of the specified window.
Wrapper for:
void glfwGetFramebufferSize(GLFWwindow* window, int* width, int* height);
"""
width_value = ctypes.c_int(0)
width = ctypes.pointer(width_value)
height_value = ctypes.c_int(0)
height = ctypes.pointer(height_value)
_glfw.glfwGetFramebufferSize(window, width, height)
return width_value.value, height_value.value | def function[get_framebuffer_size, parameter[window]]:
constant[
Retrieves the size of the framebuffer of the specified window.
Wrapper for:
void glfwGetFramebufferSize(GLFWwindow* window, int* width, int* height);
]
variable[width_value] assign[=] call[name[ctypes].c_int, parameter[constant[0]]]
variable[width] assign[=] call[name[ctypes].pointer, parameter[name[width_value]]]
variable[height_value] assign[=] call[name[ctypes].c_int, parameter[constant[0]]]
variable[height] assign[=] call[name[ctypes].pointer, parameter[name[height_value]]]
call[name[_glfw].glfwGetFramebufferSize, parameter[name[window], name[width], name[height]]]
return[tuple[[<ast.Attribute object at 0x7da18bc73af0>, <ast.Attribute object at 0x7da18bc708e0>]]] | keyword[def] identifier[get_framebuffer_size] ( identifier[window] ):
literal[string]
identifier[width_value] = identifier[ctypes] . identifier[c_int] ( literal[int] )
identifier[width] = identifier[ctypes] . identifier[pointer] ( identifier[width_value] )
identifier[height_value] = identifier[ctypes] . identifier[c_int] ( literal[int] )
identifier[height] = identifier[ctypes] . identifier[pointer] ( identifier[height_value] )
identifier[_glfw] . identifier[glfwGetFramebufferSize] ( identifier[window] , identifier[width] , identifier[height] )
keyword[return] identifier[width_value] . identifier[value] , identifier[height_value] . identifier[value] | def get_framebuffer_size(window):
"""
Retrieves the size of the framebuffer of the specified window.
Wrapper for:
void glfwGetFramebufferSize(GLFWwindow* window, int* width, int* height);
"""
width_value = ctypes.c_int(0)
width = ctypes.pointer(width_value)
height_value = ctypes.c_int(0)
height = ctypes.pointer(height_value)
_glfw.glfwGetFramebufferSize(window, width, height)
return (width_value.value, height_value.value) |
async def get_updates(self, offset: typing.Union[base.Integer, None] = None,
limit: typing.Union[base.Integer, None] = None,
timeout: typing.Union[base.Integer, None] = None,
allowed_updates:
typing.Union[typing.List[base.String], None] = None) -> typing.List[types.Update]:
"""
Use this method to receive incoming updates using long polling (wiki).
Notes
1. This method will not work if an outgoing webhook is set up.
2. In order to avoid getting duplicate updates, recalculate offset after each server response.
Source: https://core.telegram.org/bots/api#getupdates
:param offset: Identifier of the first update to be returned
:type offset: :obj:`typing.Union[base.Integer, None]`
:param limit: Limits the number of updates to be retrieved
:type limit: :obj:`typing.Union[base.Integer, None]`
:param timeout: Timeout in seconds for long polling
:type timeout: :obj:`typing.Union[base.Integer, None]`
:param allowed_updates: List the types of updates you want your bot to receive
:type allowed_updates: :obj:`typing.Union[typing.List[base.String], None]`
:return: An Array of Update objects is returned
:rtype: :obj:`typing.List[types.Update]`
"""
allowed_updates = prepare_arg(allowed_updates)
payload = generate_payload(**locals())
result = await self.request(api.Methods.GET_UPDATES, payload)
return [types.Update(**update) for update in result] | <ast.AsyncFunctionDef object at 0x7da1b17bb8b0> | keyword[async] keyword[def] identifier[get_updates] ( identifier[self] , identifier[offset] : identifier[typing] . identifier[Union] [ identifier[base] . identifier[Integer] , keyword[None] ]= keyword[None] ,
identifier[limit] : identifier[typing] . identifier[Union] [ identifier[base] . identifier[Integer] , keyword[None] ]= keyword[None] ,
identifier[timeout] : identifier[typing] . identifier[Union] [ identifier[base] . identifier[Integer] , keyword[None] ]= keyword[None] ,
identifier[allowed_updates] :
identifier[typing] . identifier[Union] [ identifier[typing] . identifier[List] [ identifier[base] . identifier[String] ], keyword[None] ]= keyword[None] )-> identifier[typing] . identifier[List] [ identifier[types] . identifier[Update] ]:
literal[string]
identifier[allowed_updates] = identifier[prepare_arg] ( identifier[allowed_updates] )
identifier[payload] = identifier[generate_payload] (** identifier[locals] ())
identifier[result] = keyword[await] identifier[self] . identifier[request] ( identifier[api] . identifier[Methods] . identifier[GET_UPDATES] , identifier[payload] )
keyword[return] [ identifier[types] . identifier[Update] (** identifier[update] ) keyword[for] identifier[update] keyword[in] identifier[result] ] | async def get_updates(self, offset: typing.Union[base.Integer, None]=None, limit: typing.Union[base.Integer, None]=None, timeout: typing.Union[base.Integer, None]=None, allowed_updates: typing.Union[typing.List[base.String], None]=None) -> typing.List[types.Update]:
"""
Use this method to receive incoming updates using long polling (wiki).
Notes
1. This method will not work if an outgoing webhook is set up.
2. In order to avoid getting duplicate updates, recalculate offset after each server response.
Source: https://core.telegram.org/bots/api#getupdates
:param offset: Identifier of the first update to be returned
:type offset: :obj:`typing.Union[base.Integer, None]`
:param limit: Limits the number of updates to be retrieved
:type limit: :obj:`typing.Union[base.Integer, None]`
:param timeout: Timeout in seconds for long polling
:type timeout: :obj:`typing.Union[base.Integer, None]`
:param allowed_updates: List the types of updates you want your bot to receive
:type allowed_updates: :obj:`typing.Union[typing.List[base.String], None]`
:return: An Array of Update objects is returned
:rtype: :obj:`typing.List[types.Update]`
"""
allowed_updates = prepare_arg(allowed_updates)
payload = generate_payload(**locals())
result = await self.request(api.Methods.GET_UPDATES, payload)
return [types.Update(**update) for update in result] |
def delete(sld, tld, nameserver):
'''
Deletes a nameserver. Returns ``True`` if the nameserver was deleted
successfully
sld
SLD of the domain name
tld
TLD of the domain name
nameserver
Nameserver to delete
CLI Example:
.. code-block:: bash
salt '*' namecheap_domains_ns.delete sld tld nameserver
'''
opts = salt.utils.namecheap.get_opts('namecheap.domains.ns.delete')
opts['SLD'] = sld
opts['TLD'] = tld
opts['Nameserver'] = nameserver
response_xml = salt.utils.namecheap.post_request(opts)
if response_xml is None:
return False
domainnsdeleteresult = response_xml.getElementsByTagName('DomainNSDeleteResult')[0]
return salt.utils.namecheap.string_to_value(domainnsdeleteresult.getAttribute('IsSuccess')) | def function[delete, parameter[sld, tld, nameserver]]:
constant[
Deletes a nameserver. Returns ``True`` if the nameserver was deleted
successfully
sld
SLD of the domain name
tld
TLD of the domain name
nameserver
Nameserver to delete
CLI Example:
.. code-block:: bash
salt '*' namecheap_domains_ns.delete sld tld nameserver
]
variable[opts] assign[=] call[name[salt].utils.namecheap.get_opts, parameter[constant[namecheap.domains.ns.delete]]]
call[name[opts]][constant[SLD]] assign[=] name[sld]
call[name[opts]][constant[TLD]] assign[=] name[tld]
call[name[opts]][constant[Nameserver]] assign[=] name[nameserver]
variable[response_xml] assign[=] call[name[salt].utils.namecheap.post_request, parameter[name[opts]]]
if compare[name[response_xml] is constant[None]] begin[:]
return[constant[False]]
variable[domainnsdeleteresult] assign[=] call[call[name[response_xml].getElementsByTagName, parameter[constant[DomainNSDeleteResult]]]][constant[0]]
return[call[name[salt].utils.namecheap.string_to_value, parameter[call[name[domainnsdeleteresult].getAttribute, parameter[constant[IsSuccess]]]]]] | keyword[def] identifier[delete] ( identifier[sld] , identifier[tld] , identifier[nameserver] ):
literal[string]
identifier[opts] = identifier[salt] . identifier[utils] . identifier[namecheap] . identifier[get_opts] ( literal[string] )
identifier[opts] [ literal[string] ]= identifier[sld]
identifier[opts] [ literal[string] ]= identifier[tld]
identifier[opts] [ literal[string] ]= identifier[nameserver]
identifier[response_xml] = identifier[salt] . identifier[utils] . identifier[namecheap] . identifier[post_request] ( identifier[opts] )
keyword[if] identifier[response_xml] keyword[is] keyword[None] :
keyword[return] keyword[False]
identifier[domainnsdeleteresult] = identifier[response_xml] . identifier[getElementsByTagName] ( literal[string] )[ literal[int] ]
keyword[return] identifier[salt] . identifier[utils] . identifier[namecheap] . identifier[string_to_value] ( identifier[domainnsdeleteresult] . identifier[getAttribute] ( literal[string] )) | def delete(sld, tld, nameserver):
"""
Deletes a nameserver. Returns ``True`` if the nameserver was deleted
successfully
sld
SLD of the domain name
tld
TLD of the domain name
nameserver
Nameserver to delete
CLI Example:
.. code-block:: bash
salt '*' namecheap_domains_ns.delete sld tld nameserver
"""
opts = salt.utils.namecheap.get_opts('namecheap.domains.ns.delete')
opts['SLD'] = sld
opts['TLD'] = tld
opts['Nameserver'] = nameserver
response_xml = salt.utils.namecheap.post_request(opts)
if response_xml is None:
return False # depends on [control=['if'], data=[]]
domainnsdeleteresult = response_xml.getElementsByTagName('DomainNSDeleteResult')[0]
return salt.utils.namecheap.string_to_value(domainnsdeleteresult.getAttribute('IsSuccess')) |
def _sasl_authenticate(self, stream, username, authzid):
"""Start SASL authentication process.
[initiating entity only]
:Parameters:
- `username`: user name.
- `authzid`: authorization ID.
- `mechanism`: SASL mechanism to use."""
if not stream.initiator:
raise SASLAuthenticationFailed("Only initiating entity start"
" SASL authentication")
if stream.features is None or not self.peer_sasl_mechanisms:
raise SASLNotAvailable("Peer doesn't support SASL")
props = dict(stream.auth_properties)
if not props.get("service-domain") and (
stream.peer and stream.peer.domain):
props["service-domain"] = stream.peer.domain
if username is not None:
props["username"] = username
if authzid is not None:
props["authzid"] = authzid
if "password" in self.settings:
props["password"] = self.settings["password"]
props["available_mechanisms"] = self.peer_sasl_mechanisms
enabled = sasl.filter_mechanism_list(
self.settings['sasl_mechanisms'], props,
self.settings['insecure_auth'])
if not enabled:
raise SASLNotAvailable(
"None of SASL mechanism selected can be used")
props["enabled_mechanisms"] = enabled
mechanism = None
for mech in enabled:
if mech in self.peer_sasl_mechanisms:
mechanism = mech
break
if not mechanism:
raise SASLMechanismNotAvailable("Peer doesn't support any of"
" our SASL mechanisms")
logger.debug("Our mechanism: {0!r}".format(mechanism))
stream.auth_method_used = mechanism
self.authenticator = sasl.client_authenticator_factory(mechanism)
initial_response = self.authenticator.start(props)
if not isinstance(initial_response, sasl.Response):
raise SASLAuthenticationFailed("SASL initiation failed")
element = ElementTree.Element(AUTH_TAG)
element.set("mechanism", mechanism)
if initial_response.data:
if initial_response.encode:
element.text = initial_response.encode()
else:
element.text = initial_response.data
stream.write_element(element) | def function[_sasl_authenticate, parameter[self, stream, username, authzid]]:
constant[Start SASL authentication process.
[initiating entity only]
:Parameters:
- `username`: user name.
- `authzid`: authorization ID.
- `mechanism`: SASL mechanism to use.]
if <ast.UnaryOp object at 0x7da18eb55e40> begin[:]
<ast.Raise object at 0x7da18eb57c70>
if <ast.BoolOp object at 0x7da18eb56b00> begin[:]
<ast.Raise object at 0x7da18eb54c40>
variable[props] assign[=] call[name[dict], parameter[name[stream].auth_properties]]
if <ast.BoolOp object at 0x7da18eb551b0> begin[:]
call[name[props]][constant[service-domain]] assign[=] name[stream].peer.domain
if compare[name[username] is_not constant[None]] begin[:]
call[name[props]][constant[username]] assign[=] name[username]
if compare[name[authzid] is_not constant[None]] begin[:]
call[name[props]][constant[authzid]] assign[=] name[authzid]
if compare[constant[password] in name[self].settings] begin[:]
call[name[props]][constant[password]] assign[=] call[name[self].settings][constant[password]]
call[name[props]][constant[available_mechanisms]] assign[=] name[self].peer_sasl_mechanisms
variable[enabled] assign[=] call[name[sasl].filter_mechanism_list, parameter[call[name[self].settings][constant[sasl_mechanisms]], name[props], call[name[self].settings][constant[insecure_auth]]]]
if <ast.UnaryOp object at 0x7da18eb54460> begin[:]
<ast.Raise object at 0x7da18eb55570>
call[name[props]][constant[enabled_mechanisms]] assign[=] name[enabled]
variable[mechanism] assign[=] constant[None]
for taget[name[mech]] in starred[name[enabled]] begin[:]
if compare[name[mech] in name[self].peer_sasl_mechanisms] begin[:]
variable[mechanism] assign[=] name[mech]
break
if <ast.UnaryOp object at 0x7da1b021eb60> begin[:]
<ast.Raise object at 0x7da1b021d210>
call[name[logger].debug, parameter[call[constant[Our mechanism: {0!r}].format, parameter[name[mechanism]]]]]
name[stream].auth_method_used assign[=] name[mechanism]
name[self].authenticator assign[=] call[name[sasl].client_authenticator_factory, parameter[name[mechanism]]]
variable[initial_response] assign[=] call[name[self].authenticator.start, parameter[name[props]]]
if <ast.UnaryOp object at 0x7da204963f40> begin[:]
<ast.Raise object at 0x7da1b021c5e0>
variable[element] assign[=] call[name[ElementTree].Element, parameter[name[AUTH_TAG]]]
call[name[element].set, parameter[constant[mechanism], name[mechanism]]]
if name[initial_response].data begin[:]
if name[initial_response].encode begin[:]
name[element].text assign[=] call[name[initial_response].encode, parameter[]]
call[name[stream].write_element, parameter[name[element]]] | keyword[def] identifier[_sasl_authenticate] ( identifier[self] , identifier[stream] , identifier[username] , identifier[authzid] ):
literal[string]
keyword[if] keyword[not] identifier[stream] . identifier[initiator] :
keyword[raise] identifier[SASLAuthenticationFailed] ( literal[string]
literal[string] )
keyword[if] identifier[stream] . identifier[features] keyword[is] keyword[None] keyword[or] keyword[not] identifier[self] . identifier[peer_sasl_mechanisms] :
keyword[raise] identifier[SASLNotAvailable] ( literal[string] )
identifier[props] = identifier[dict] ( identifier[stream] . identifier[auth_properties] )
keyword[if] keyword[not] identifier[props] . identifier[get] ( literal[string] ) keyword[and] (
identifier[stream] . identifier[peer] keyword[and] identifier[stream] . identifier[peer] . identifier[domain] ):
identifier[props] [ literal[string] ]= identifier[stream] . identifier[peer] . identifier[domain]
keyword[if] identifier[username] keyword[is] keyword[not] keyword[None] :
identifier[props] [ literal[string] ]= identifier[username]
keyword[if] identifier[authzid] keyword[is] keyword[not] keyword[None] :
identifier[props] [ literal[string] ]= identifier[authzid]
keyword[if] literal[string] keyword[in] identifier[self] . identifier[settings] :
identifier[props] [ literal[string] ]= identifier[self] . identifier[settings] [ literal[string] ]
identifier[props] [ literal[string] ]= identifier[self] . identifier[peer_sasl_mechanisms]
identifier[enabled] = identifier[sasl] . identifier[filter_mechanism_list] (
identifier[self] . identifier[settings] [ literal[string] ], identifier[props] ,
identifier[self] . identifier[settings] [ literal[string] ])
keyword[if] keyword[not] identifier[enabled] :
keyword[raise] identifier[SASLNotAvailable] (
literal[string] )
identifier[props] [ literal[string] ]= identifier[enabled]
identifier[mechanism] = keyword[None]
keyword[for] identifier[mech] keyword[in] identifier[enabled] :
keyword[if] identifier[mech] keyword[in] identifier[self] . identifier[peer_sasl_mechanisms] :
identifier[mechanism] = identifier[mech]
keyword[break]
keyword[if] keyword[not] identifier[mechanism] :
keyword[raise] identifier[SASLMechanismNotAvailable] ( literal[string]
literal[string] )
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[mechanism] ))
identifier[stream] . identifier[auth_method_used] = identifier[mechanism]
identifier[self] . identifier[authenticator] = identifier[sasl] . identifier[client_authenticator_factory] ( identifier[mechanism] )
identifier[initial_response] = identifier[self] . identifier[authenticator] . identifier[start] ( identifier[props] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[initial_response] , identifier[sasl] . identifier[Response] ):
keyword[raise] identifier[SASLAuthenticationFailed] ( literal[string] )
identifier[element] = identifier[ElementTree] . identifier[Element] ( identifier[AUTH_TAG] )
identifier[element] . identifier[set] ( literal[string] , identifier[mechanism] )
keyword[if] identifier[initial_response] . identifier[data] :
keyword[if] identifier[initial_response] . identifier[encode] :
identifier[element] . identifier[text] = identifier[initial_response] . identifier[encode] ()
keyword[else] :
identifier[element] . identifier[text] = identifier[initial_response] . identifier[data]
identifier[stream] . identifier[write_element] ( identifier[element] ) | def _sasl_authenticate(self, stream, username, authzid):
"""Start SASL authentication process.
[initiating entity only]
:Parameters:
- `username`: user name.
- `authzid`: authorization ID.
- `mechanism`: SASL mechanism to use."""
if not stream.initiator:
raise SASLAuthenticationFailed('Only initiating entity start SASL authentication') # depends on [control=['if'], data=[]]
if stream.features is None or not self.peer_sasl_mechanisms:
raise SASLNotAvailable("Peer doesn't support SASL") # depends on [control=['if'], data=[]]
props = dict(stream.auth_properties)
if not props.get('service-domain') and (stream.peer and stream.peer.domain):
props['service-domain'] = stream.peer.domain # depends on [control=['if'], data=[]]
if username is not None:
props['username'] = username # depends on [control=['if'], data=['username']]
if authzid is not None:
props['authzid'] = authzid # depends on [control=['if'], data=['authzid']]
if 'password' in self.settings:
props['password'] = self.settings['password'] # depends on [control=['if'], data=[]]
props['available_mechanisms'] = self.peer_sasl_mechanisms
enabled = sasl.filter_mechanism_list(self.settings['sasl_mechanisms'], props, self.settings['insecure_auth'])
if not enabled:
raise SASLNotAvailable('None of SASL mechanism selected can be used') # depends on [control=['if'], data=[]]
props['enabled_mechanisms'] = enabled
mechanism = None
for mech in enabled:
if mech in self.peer_sasl_mechanisms:
mechanism = mech
break # depends on [control=['if'], data=['mech']] # depends on [control=['for'], data=['mech']]
if not mechanism:
raise SASLMechanismNotAvailable("Peer doesn't support any of our SASL mechanisms") # depends on [control=['if'], data=[]]
logger.debug('Our mechanism: {0!r}'.format(mechanism))
stream.auth_method_used = mechanism
self.authenticator = sasl.client_authenticator_factory(mechanism)
initial_response = self.authenticator.start(props)
if not isinstance(initial_response, sasl.Response):
raise SASLAuthenticationFailed('SASL initiation failed') # depends on [control=['if'], data=[]]
element = ElementTree.Element(AUTH_TAG)
element.set('mechanism', mechanism)
if initial_response.data:
if initial_response.encode:
element.text = initial_response.encode() # depends on [control=['if'], data=[]]
else:
element.text = initial_response.data # depends on [control=['if'], data=[]]
stream.write_element(element) |
def get_param_WLS(A, C_D_inv, d, inv_bool=True):
"""
returns the parameter values given
:param A: response matrix Nd x Ns (Nd = # data points, Ns = # parameters)
:param C_D_inv: inverse covariance matrix of the data, Nd x Nd, diagonal form
:param d: data array, 1-d Nd
:param inv_bool: boolean, wheter returning also the inverse matrix or just solve the linear system
:return: 1-d array of parameter values
"""
M = A.T.dot(np.multiply(C_D_inv, A.T).T)
if inv_bool:
if np.linalg.cond(M) < 5/sys.float_info.epsilon:
try:
M_inv = np.linalg.inv(M)
except:
M_inv = np.zeros_like(M)
else:
M_inv = np.zeros_like(M)
R = A.T.dot(np.multiply(C_D_inv, d))
B = M_inv.dot(R)
else:
if np.linalg.cond(M) < 5/sys.float_info.epsilon:
R = A.T.dot(np.multiply(C_D_inv, d))
try:
B = np.linalg.solve(M, R).T
except:
B = np.zeros(len(A.T))
else:
B = np.zeros(len(A.T))
M_inv = None
image = A.dot(B)
return B, M_inv, image | def function[get_param_WLS, parameter[A, C_D_inv, d, inv_bool]]:
constant[
returns the parameter values given
:param A: response matrix Nd x Ns (Nd = # data points, Ns = # parameters)
:param C_D_inv: inverse covariance matrix of the data, Nd x Nd, diagonal form
:param d: data array, 1-d Nd
:param inv_bool: boolean, wheter returning also the inverse matrix or just solve the linear system
:return: 1-d array of parameter values
]
variable[M] assign[=] call[name[A].T.dot, parameter[call[name[np].multiply, parameter[name[C_D_inv], name[A].T]].T]]
if name[inv_bool] begin[:]
if compare[call[name[np].linalg.cond, parameter[name[M]]] less[<] binary_operation[constant[5] / name[sys].float_info.epsilon]] begin[:]
<ast.Try object at 0x7da18dc9b820>
variable[R] assign[=] call[name[A].T.dot, parameter[call[name[np].multiply, parameter[name[C_D_inv], name[d]]]]]
variable[B] assign[=] call[name[M_inv].dot, parameter[name[R]]]
variable[image] assign[=] call[name[A].dot, parameter[name[B]]]
return[tuple[[<ast.Name object at 0x7da1b26ac4c0>, <ast.Name object at 0x7da1b26af790>, <ast.Name object at 0x7da1b26ae8f0>]]] | keyword[def] identifier[get_param_WLS] ( identifier[A] , identifier[C_D_inv] , identifier[d] , identifier[inv_bool] = keyword[True] ):
literal[string]
identifier[M] = identifier[A] . identifier[T] . identifier[dot] ( identifier[np] . identifier[multiply] ( identifier[C_D_inv] , identifier[A] . identifier[T] ). identifier[T] )
keyword[if] identifier[inv_bool] :
keyword[if] identifier[np] . identifier[linalg] . identifier[cond] ( identifier[M] )< literal[int] / identifier[sys] . identifier[float_info] . identifier[epsilon] :
keyword[try] :
identifier[M_inv] = identifier[np] . identifier[linalg] . identifier[inv] ( identifier[M] )
keyword[except] :
identifier[M_inv] = identifier[np] . identifier[zeros_like] ( identifier[M] )
keyword[else] :
identifier[M_inv] = identifier[np] . identifier[zeros_like] ( identifier[M] )
identifier[R] = identifier[A] . identifier[T] . identifier[dot] ( identifier[np] . identifier[multiply] ( identifier[C_D_inv] , identifier[d] ))
identifier[B] = identifier[M_inv] . identifier[dot] ( identifier[R] )
keyword[else] :
keyword[if] identifier[np] . identifier[linalg] . identifier[cond] ( identifier[M] )< literal[int] / identifier[sys] . identifier[float_info] . identifier[epsilon] :
identifier[R] = identifier[A] . identifier[T] . identifier[dot] ( identifier[np] . identifier[multiply] ( identifier[C_D_inv] , identifier[d] ))
keyword[try] :
identifier[B] = identifier[np] . identifier[linalg] . identifier[solve] ( identifier[M] , identifier[R] ). identifier[T]
keyword[except] :
identifier[B] = identifier[np] . identifier[zeros] ( identifier[len] ( identifier[A] . identifier[T] ))
keyword[else] :
identifier[B] = identifier[np] . identifier[zeros] ( identifier[len] ( identifier[A] . identifier[T] ))
identifier[M_inv] = keyword[None]
identifier[image] = identifier[A] . identifier[dot] ( identifier[B] )
keyword[return] identifier[B] , identifier[M_inv] , identifier[image] | def get_param_WLS(A, C_D_inv, d, inv_bool=True):
"""
returns the parameter values given
:param A: response matrix Nd x Ns (Nd = # data points, Ns = # parameters)
:param C_D_inv: inverse covariance matrix of the data, Nd x Nd, diagonal form
:param d: data array, 1-d Nd
:param inv_bool: boolean, wheter returning also the inverse matrix or just solve the linear system
:return: 1-d array of parameter values
"""
M = A.T.dot(np.multiply(C_D_inv, A.T).T)
if inv_bool:
if np.linalg.cond(M) < 5 / sys.float_info.epsilon:
try:
M_inv = np.linalg.inv(M) # depends on [control=['try'], data=[]]
except:
M_inv = np.zeros_like(M) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
M_inv = np.zeros_like(M)
R = A.T.dot(np.multiply(C_D_inv, d))
B = M_inv.dot(R) # depends on [control=['if'], data=[]]
else:
if np.linalg.cond(M) < 5 / sys.float_info.epsilon:
R = A.T.dot(np.multiply(C_D_inv, d))
try:
B = np.linalg.solve(M, R).T # depends on [control=['try'], data=[]]
except:
B = np.zeros(len(A.T)) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
B = np.zeros(len(A.T))
M_inv = None
image = A.dot(B)
return (B, M_inv, image) |
def _w_within_shard(args: Dict[str, Any]):
"""Applies a W gate when the gate acts only within a shard."""
index = args['index']
half_turns = args['half_turns']
axis_half_turns = args['axis_half_turns']
state = _state_shard(args)
pm_vect = _pm_vects(args)[index]
num_shard_qubits = args['num_shard_qubits']
shard_size = 2 ** num_shard_qubits
reshape_tuple = (2 ** (num_shard_qubits - 1 - index), 2, 2 ** index)
perm_state = np.reshape(
np.reshape(state, reshape_tuple)[:, ::-1, :], shard_size)
cos = np.cos(-0.5 * np.pi * half_turns)
sin = np.sin(-0.5 * np.pi * half_turns)
cos_axis = np.cos(np.pi * axis_half_turns)
sin_axis = np.sin(np.pi * axis_half_turns)
new_state = cos * state + 1j * sin * perm_state * (
cos_axis - 1j * sin_axis * pm_vect)
np.copyto(state, new_state) | def function[_w_within_shard, parameter[args]]:
constant[Applies a W gate when the gate acts only within a shard.]
variable[index] assign[=] call[name[args]][constant[index]]
variable[half_turns] assign[=] call[name[args]][constant[half_turns]]
variable[axis_half_turns] assign[=] call[name[args]][constant[axis_half_turns]]
variable[state] assign[=] call[name[_state_shard], parameter[name[args]]]
variable[pm_vect] assign[=] call[call[name[_pm_vects], parameter[name[args]]]][name[index]]
variable[num_shard_qubits] assign[=] call[name[args]][constant[num_shard_qubits]]
variable[shard_size] assign[=] binary_operation[constant[2] ** name[num_shard_qubits]]
variable[reshape_tuple] assign[=] tuple[[<ast.BinOp object at 0x7da1b1c3d300>, <ast.Constant object at 0x7da1b1c3c700>, <ast.BinOp object at 0x7da1b1c3d180>]]
variable[perm_state] assign[=] call[name[np].reshape, parameter[call[call[name[np].reshape, parameter[name[state], name[reshape_tuple]]]][tuple[[<ast.Slice object at 0x7da1b1c3fd60>, <ast.Slice object at 0x7da1b1c3eef0>, <ast.Slice object at 0x7da1b1c3e470>]]], name[shard_size]]]
variable[cos] assign[=] call[name[np].cos, parameter[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da1b1c3cb80> * name[np].pi] * name[half_turns]]]]
variable[sin] assign[=] call[name[np].sin, parameter[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da1b1c3d780> * name[np].pi] * name[half_turns]]]]
variable[cos_axis] assign[=] call[name[np].cos, parameter[binary_operation[name[np].pi * name[axis_half_turns]]]]
variable[sin_axis] assign[=] call[name[np].sin, parameter[binary_operation[name[np].pi * name[axis_half_turns]]]]
variable[new_state] assign[=] binary_operation[binary_operation[name[cos] * name[state]] + binary_operation[binary_operation[binary_operation[constant[1j] * name[sin]] * name[perm_state]] * binary_operation[name[cos_axis] - binary_operation[binary_operation[constant[1j] * name[sin_axis]] * name[pm_vect]]]]]
call[name[np].copyto, parameter[name[state], name[new_state]]] | keyword[def] identifier[_w_within_shard] ( identifier[args] : identifier[Dict] [ identifier[str] , identifier[Any] ]):
literal[string]
identifier[index] = identifier[args] [ literal[string] ]
identifier[half_turns] = identifier[args] [ literal[string] ]
identifier[axis_half_turns] = identifier[args] [ literal[string] ]
identifier[state] = identifier[_state_shard] ( identifier[args] )
identifier[pm_vect] = identifier[_pm_vects] ( identifier[args] )[ identifier[index] ]
identifier[num_shard_qubits] = identifier[args] [ literal[string] ]
identifier[shard_size] = literal[int] ** identifier[num_shard_qubits]
identifier[reshape_tuple] =( literal[int] **( identifier[num_shard_qubits] - literal[int] - identifier[index] ), literal[int] , literal[int] ** identifier[index] )
identifier[perm_state] = identifier[np] . identifier[reshape] (
identifier[np] . identifier[reshape] ( identifier[state] , identifier[reshape_tuple] )[:,::- literal[int] ,:], identifier[shard_size] )
identifier[cos] = identifier[np] . identifier[cos] (- literal[int] * identifier[np] . identifier[pi] * identifier[half_turns] )
identifier[sin] = identifier[np] . identifier[sin] (- literal[int] * identifier[np] . identifier[pi] * identifier[half_turns] )
identifier[cos_axis] = identifier[np] . identifier[cos] ( identifier[np] . identifier[pi] * identifier[axis_half_turns] )
identifier[sin_axis] = identifier[np] . identifier[sin] ( identifier[np] . identifier[pi] * identifier[axis_half_turns] )
identifier[new_state] = identifier[cos] * identifier[state] + literal[int] * identifier[sin] * identifier[perm_state] *(
identifier[cos_axis] - literal[int] * identifier[sin_axis] * identifier[pm_vect] )
identifier[np] . identifier[copyto] ( identifier[state] , identifier[new_state] ) | def _w_within_shard(args: Dict[str, Any]):
"""Applies a W gate when the gate acts only within a shard."""
index = args['index']
half_turns = args['half_turns']
axis_half_turns = args['axis_half_turns']
state = _state_shard(args)
pm_vect = _pm_vects(args)[index]
num_shard_qubits = args['num_shard_qubits']
shard_size = 2 ** num_shard_qubits
reshape_tuple = (2 ** (num_shard_qubits - 1 - index), 2, 2 ** index)
perm_state = np.reshape(np.reshape(state, reshape_tuple)[:, ::-1, :], shard_size)
cos = np.cos(-0.5 * np.pi * half_turns)
sin = np.sin(-0.5 * np.pi * half_turns)
cos_axis = np.cos(np.pi * axis_half_turns)
sin_axis = np.sin(np.pi * axis_half_turns)
new_state = cos * state + 1j * sin * perm_state * (cos_axis - 1j * sin_axis * pm_vect)
np.copyto(state, new_state) |
def render_with(template=None, json=False, jsonp=False):
"""
Decorator to render the wrapped function with the given template (or dictionary
of mimetype keys to templates, where the template is a string name of a template
file or a callable that returns a Response). The function's return value must be
a dictionary and is passed to the template as parameters. Callable templates get
a single parameter with the function's return value. Usage::
@app.route('/myview')
@render_with('myview.html')
def myview():
return {'data': 'value'}
@app.route('/myview_with_json')
@render_with('myview.html', json=True)
def myview_no_json():
return {'data': 'value'}
@app.route('/otherview')
@render_with({
'text/html': 'otherview.html',
'text/xml': 'otherview.xml'})
def otherview():
return {'data': 'value'}
@app.route('/404view')
@render_with('myview.html')
def myview():
return {'error': '404 Not Found'}, 404
@app.route('/headerview')
@render_with('myview.html')
def myview():
return {'data': 'value'}, 200, {'X-Header': 'Header value'}
When a mimetype is specified and the template is not a callable, the response is
returned with the same mimetype. Callable templates must return Response objects
to ensure the correct mimetype is set.
If a dictionary of templates is provided and does not include a handler for ``*/*``,
render_with will attempt to use the handler for (in order) ``text/html``, ``text/plain``
and the various JSON types, falling back to rendering the value into a unicode string.
If the method is called outside a request context, the wrapped method's original
return value is returned. This is meant to facilitate testing and should not be
used to call the method from within another view handler as the presence of a
request context will trigger template rendering.
Rendering may also be suspended by calling the view handler with ``_render=False``.
render_with provides JSON and JSONP handlers for the ``application/json``,
``text/json`` and ``text/x-json`` mimetypes if ``json`` or ``jsonp`` is True
(default is False).
:param template: Single template, or dictionary of MIME type to templates. If the
template is a callable, it is called with the output of the wrapped function
:param json: Helper to add a JSON handler (default is False)
:param jsonp: Helper to add a JSONP handler (if True, also provides JSON, default is False)
"""
if jsonp:
templates = {
'application/json': dict_jsonp,
'application/javascript': dict_jsonp,
}
elif json:
templates = {
'application/json': dict_jsonify,
}
else:
templates = {}
if isinstance(template, six.string_types):
templates['text/html'] = template
elif isinstance(template, dict):
templates.update(template)
elif template is None and (json or jsonp):
pass
else: # pragma: no cover
raise ValueError("Expected string or dict for template")
default_mimetype = '*/*'
if '*/*' not in templates:
templates['*/*'] = six.text_type
default_mimetype = 'text/plain'
for mimetype in ('text/html', 'text/plain', 'application/json'):
if mimetype in templates:
templates['*/*'] = templates[mimetype]
default_mimetype = mimetype # Remember which mimetype's handler is serving for */*
break
template_mimetypes = list(templates.keys())
template_mimetypes.remove('*/*') # */* messes up matching, so supply it only as last resort
def inner(f):
@wraps(f)
def decorated_function(*args, **kwargs):
# Check if we need to bypass rendering
render = kwargs.pop('_render', True)
# Get the result
result = f(*args, **kwargs)
# Is the result a Response object? Don't attempt rendering
if isinstance(result, (Response, WerkzeugResponse, current_app.response_class)):
return result
# Did the result include status code and headers?
if isinstance(result, tuple):
resultset = result
result = resultset[0]
if len(resultset) > 1:
status_code = resultset[1]
else:
status_code = None
if len(resultset) > 2:
headers = Headers(resultset[2])
else:
headers = Headers()
else:
status_code = None
headers = Headers()
if len(templates) > 1: # If we have more than one template handler
if 'Vary' in headers:
vary_values = [item.strip() for item in headers['Vary'].split(',')]
if 'Accept' not in vary_values:
vary_values.append('Accept')
headers['Vary'] = ', '.join(vary_values)
else:
headers['Vary'] = 'Accept'
# Find a matching mimetype between Accept headers and available templates
use_mimetype = None
if render and request:
# We do not use request.accept_mimetypes.best_match because it turns out to
# be buggy: it returns the least match instead of the best match.
# use_mimetype = request.accept_mimetypes.best_match(template_mimetypes, '*/*')
use_mimetype = _best_mimetype_match(template_mimetypes, request.accept_mimetypes, '*/*')
# Now render the result with the template for the mimetype
if use_mimetype is not None:
if callable(templates[use_mimetype]):
rendered = templates[use_mimetype](result)
if isinstance(rendered, Response):
if status_code is not None:
rendered.status_code = status_code
if headers is not None:
rendered.headers.extend(headers)
else:
rendered = current_app.response_class(
rendered,
status=status_code,
headers=headers,
mimetype=default_mimetype if use_mimetype == '*/*' else use_mimetype)
else: # Not a callable mimetype. Render as a jinja2 template
rendered = current_app.response_class(
render_template(templates[use_mimetype], **result),
status=status_code or 200, headers=headers,
mimetype=default_mimetype if use_mimetype == '*/*' else use_mimetype)
return rendered
else:
return result
return decorated_function
return inner | def function[render_with, parameter[template, json, jsonp]]:
constant[
Decorator to render the wrapped function with the given template (or dictionary
of mimetype keys to templates, where the template is a string name of a template
file or a callable that returns a Response). The function's return value must be
a dictionary and is passed to the template as parameters. Callable templates get
a single parameter with the function's return value. Usage::
@app.route('/myview')
@render_with('myview.html')
def myview():
return {'data': 'value'}
@app.route('/myview_with_json')
@render_with('myview.html', json=True)
def myview_no_json():
return {'data': 'value'}
@app.route('/otherview')
@render_with({
'text/html': 'otherview.html',
'text/xml': 'otherview.xml'})
def otherview():
return {'data': 'value'}
@app.route('/404view')
@render_with('myview.html')
def myview():
return {'error': '404 Not Found'}, 404
@app.route('/headerview')
@render_with('myview.html')
def myview():
return {'data': 'value'}, 200, {'X-Header': 'Header value'}
When a mimetype is specified and the template is not a callable, the response is
returned with the same mimetype. Callable templates must return Response objects
to ensure the correct mimetype is set.
If a dictionary of templates is provided and does not include a handler for ``*/*``,
render_with will attempt to use the handler for (in order) ``text/html``, ``text/plain``
and the various JSON types, falling back to rendering the value into a unicode string.
If the method is called outside a request context, the wrapped method's original
return value is returned. This is meant to facilitate testing and should not be
used to call the method from within another view handler as the presence of a
request context will trigger template rendering.
Rendering may also be suspended by calling the view handler with ``_render=False``.
render_with provides JSON and JSONP handlers for the ``application/json``,
``text/json`` and ``text/x-json`` mimetypes if ``json`` or ``jsonp`` is True
(default is False).
:param template: Single template, or dictionary of MIME type to templates. If the
template is a callable, it is called with the output of the wrapped function
:param json: Helper to add a JSON handler (default is False)
:param jsonp: Helper to add a JSONP handler (if True, also provides JSON, default is False)
]
if name[jsonp] begin[:]
variable[templates] assign[=] dictionary[[<ast.Constant object at 0x7da18f720bb0>, <ast.Constant object at 0x7da18f7222c0>], [<ast.Name object at 0x7da18f7211e0>, <ast.Name object at 0x7da18f720d60>]]
if call[name[isinstance], parameter[name[template], name[six].string_types]] begin[:]
call[name[templates]][constant[text/html]] assign[=] name[template]
variable[default_mimetype] assign[=] constant[*/*]
if compare[constant[*/*] <ast.NotIn object at 0x7da2590d7190> name[templates]] begin[:]
call[name[templates]][constant[*/*]] assign[=] name[six].text_type
variable[default_mimetype] assign[=] constant[text/plain]
for taget[name[mimetype]] in starred[tuple[[<ast.Constant object at 0x7da18f721330>, <ast.Constant object at 0x7da18f723df0>, <ast.Constant object at 0x7da18f7231f0>]]] begin[:]
if compare[name[mimetype] in name[templates]] begin[:]
call[name[templates]][constant[*/*]] assign[=] call[name[templates]][name[mimetype]]
variable[default_mimetype] assign[=] name[mimetype]
break
variable[template_mimetypes] assign[=] call[name[list], parameter[call[name[templates].keys, parameter[]]]]
call[name[template_mimetypes].remove, parameter[constant[*/*]]]
def function[inner, parameter[f]]:
def function[decorated_function, parameter[]]:
variable[render] assign[=] call[name[kwargs].pop, parameter[constant[_render], constant[True]]]
variable[result] assign[=] call[name[f], parameter[<ast.Starred object at 0x7da18f720700>]]
if call[name[isinstance], parameter[name[result], tuple[[<ast.Name object at 0x7da18f721150>, <ast.Name object at 0x7da18f723ac0>, <ast.Attribute object at 0x7da18f723d60>]]]] begin[:]
return[name[result]]
if call[name[isinstance], parameter[name[result], name[tuple]]] begin[:]
variable[resultset] assign[=] name[result]
variable[result] assign[=] call[name[resultset]][constant[0]]
if compare[call[name[len], parameter[name[resultset]]] greater[>] constant[1]] begin[:]
variable[status_code] assign[=] call[name[resultset]][constant[1]]
if compare[call[name[len], parameter[name[resultset]]] greater[>] constant[2]] begin[:]
variable[headers] assign[=] call[name[Headers], parameter[call[name[resultset]][constant[2]]]]
if compare[call[name[len], parameter[name[templates]]] greater[>] constant[1]] begin[:]
if compare[constant[Vary] in name[headers]] begin[:]
variable[vary_values] assign[=] <ast.ListComp object at 0x7da18f00f820>
if compare[constant[Accept] <ast.NotIn object at 0x7da2590d7190> name[vary_values]] begin[:]
call[name[vary_values].append, parameter[constant[Accept]]]
call[name[headers]][constant[Vary]] assign[=] call[constant[, ].join, parameter[name[vary_values]]]
variable[use_mimetype] assign[=] constant[None]
if <ast.BoolOp object at 0x7da18f00fbb0> begin[:]
variable[use_mimetype] assign[=] call[name[_best_mimetype_match], parameter[name[template_mimetypes], name[request].accept_mimetypes, constant[*/*]]]
if compare[name[use_mimetype] is_not constant[None]] begin[:]
if call[name[callable], parameter[call[name[templates]][name[use_mimetype]]]] begin[:]
variable[rendered] assign[=] call[call[name[templates]][name[use_mimetype]], parameter[name[result]]]
if call[name[isinstance], parameter[name[rendered], name[Response]]] begin[:]
if compare[name[status_code] is_not constant[None]] begin[:]
name[rendered].status_code assign[=] name[status_code]
if compare[name[headers] is_not constant[None]] begin[:]
call[name[rendered].headers.extend, parameter[name[headers]]]
return[name[rendered]]
return[name[decorated_function]]
return[name[inner]] | keyword[def] identifier[render_with] ( identifier[template] = keyword[None] , identifier[json] = keyword[False] , identifier[jsonp] = keyword[False] ):
literal[string]
keyword[if] identifier[jsonp] :
identifier[templates] ={
literal[string] : identifier[dict_jsonp] ,
literal[string] : identifier[dict_jsonp] ,
}
keyword[elif] identifier[json] :
identifier[templates] ={
literal[string] : identifier[dict_jsonify] ,
}
keyword[else] :
identifier[templates] ={}
keyword[if] identifier[isinstance] ( identifier[template] , identifier[six] . identifier[string_types] ):
identifier[templates] [ literal[string] ]= identifier[template]
keyword[elif] identifier[isinstance] ( identifier[template] , identifier[dict] ):
identifier[templates] . identifier[update] ( identifier[template] )
keyword[elif] identifier[template] keyword[is] keyword[None] keyword[and] ( identifier[json] keyword[or] identifier[jsonp] ):
keyword[pass]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[default_mimetype] = literal[string]
keyword[if] literal[string] keyword[not] keyword[in] identifier[templates] :
identifier[templates] [ literal[string] ]= identifier[six] . identifier[text_type]
identifier[default_mimetype] = literal[string]
keyword[for] identifier[mimetype] keyword[in] ( literal[string] , literal[string] , literal[string] ):
keyword[if] identifier[mimetype] keyword[in] identifier[templates] :
identifier[templates] [ literal[string] ]= identifier[templates] [ identifier[mimetype] ]
identifier[default_mimetype] = identifier[mimetype]
keyword[break]
identifier[template_mimetypes] = identifier[list] ( identifier[templates] . identifier[keys] ())
identifier[template_mimetypes] . identifier[remove] ( literal[string] )
keyword[def] identifier[inner] ( identifier[f] ):
@ identifier[wraps] ( identifier[f] )
keyword[def] identifier[decorated_function] (* identifier[args] ,** identifier[kwargs] ):
identifier[render] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[True] )
identifier[result] = identifier[f] (* identifier[args] ,** identifier[kwargs] )
keyword[if] identifier[isinstance] ( identifier[result] ,( identifier[Response] , identifier[WerkzeugResponse] , identifier[current_app] . identifier[response_class] )):
keyword[return] identifier[result]
keyword[if] identifier[isinstance] ( identifier[result] , identifier[tuple] ):
identifier[resultset] = identifier[result]
identifier[result] = identifier[resultset] [ literal[int] ]
keyword[if] identifier[len] ( identifier[resultset] )> literal[int] :
identifier[status_code] = identifier[resultset] [ literal[int] ]
keyword[else] :
identifier[status_code] = keyword[None]
keyword[if] identifier[len] ( identifier[resultset] )> literal[int] :
identifier[headers] = identifier[Headers] ( identifier[resultset] [ literal[int] ])
keyword[else] :
identifier[headers] = identifier[Headers] ()
keyword[else] :
identifier[status_code] = keyword[None]
identifier[headers] = identifier[Headers] ()
keyword[if] identifier[len] ( identifier[templates] )> literal[int] :
keyword[if] literal[string] keyword[in] identifier[headers] :
identifier[vary_values] =[ identifier[item] . identifier[strip] () keyword[for] identifier[item] keyword[in] identifier[headers] [ literal[string] ]. identifier[split] ( literal[string] )]
keyword[if] literal[string] keyword[not] keyword[in] identifier[vary_values] :
identifier[vary_values] . identifier[append] ( literal[string] )
identifier[headers] [ literal[string] ]= literal[string] . identifier[join] ( identifier[vary_values] )
keyword[else] :
identifier[headers] [ literal[string] ]= literal[string]
identifier[use_mimetype] = keyword[None]
keyword[if] identifier[render] keyword[and] identifier[request] :
identifier[use_mimetype] = identifier[_best_mimetype_match] ( identifier[template_mimetypes] , identifier[request] . identifier[accept_mimetypes] , literal[string] )
keyword[if] identifier[use_mimetype] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[callable] ( identifier[templates] [ identifier[use_mimetype] ]):
identifier[rendered] = identifier[templates] [ identifier[use_mimetype] ]( identifier[result] )
keyword[if] identifier[isinstance] ( identifier[rendered] , identifier[Response] ):
keyword[if] identifier[status_code] keyword[is] keyword[not] keyword[None] :
identifier[rendered] . identifier[status_code] = identifier[status_code]
keyword[if] identifier[headers] keyword[is] keyword[not] keyword[None] :
identifier[rendered] . identifier[headers] . identifier[extend] ( identifier[headers] )
keyword[else] :
identifier[rendered] = identifier[current_app] . identifier[response_class] (
identifier[rendered] ,
identifier[status] = identifier[status_code] ,
identifier[headers] = identifier[headers] ,
identifier[mimetype] = identifier[default_mimetype] keyword[if] identifier[use_mimetype] == literal[string] keyword[else] identifier[use_mimetype] )
keyword[else] :
identifier[rendered] = identifier[current_app] . identifier[response_class] (
identifier[render_template] ( identifier[templates] [ identifier[use_mimetype] ],** identifier[result] ),
identifier[status] = identifier[status_code] keyword[or] literal[int] , identifier[headers] = identifier[headers] ,
identifier[mimetype] = identifier[default_mimetype] keyword[if] identifier[use_mimetype] == literal[string] keyword[else] identifier[use_mimetype] )
keyword[return] identifier[rendered]
keyword[else] :
keyword[return] identifier[result]
keyword[return] identifier[decorated_function]
keyword[return] identifier[inner] | def render_with(template=None, json=False, jsonp=False):
"""
Decorator to render the wrapped function with the given template (or dictionary
of mimetype keys to templates, where the template is a string name of a template
file or a callable that returns a Response). The function's return value must be
a dictionary and is passed to the template as parameters. Callable templates get
a single parameter with the function's return value. Usage::
@app.route('/myview')
@render_with('myview.html')
def myview():
return {'data': 'value'}
@app.route('/myview_with_json')
@render_with('myview.html', json=True)
def myview_no_json():
return {'data': 'value'}
@app.route('/otherview')
@render_with({
'text/html': 'otherview.html',
'text/xml': 'otherview.xml'})
def otherview():
return {'data': 'value'}
@app.route('/404view')
@render_with('myview.html')
def myview():
return {'error': '404 Not Found'}, 404
@app.route('/headerview')
@render_with('myview.html')
def myview():
return {'data': 'value'}, 200, {'X-Header': 'Header value'}
When a mimetype is specified and the template is not a callable, the response is
returned with the same mimetype. Callable templates must return Response objects
to ensure the correct mimetype is set.
If a dictionary of templates is provided and does not include a handler for ``*/*``,
render_with will attempt to use the handler for (in order) ``text/html``, ``text/plain``
and the various JSON types, falling back to rendering the value into a unicode string.
If the method is called outside a request context, the wrapped method's original
return value is returned. This is meant to facilitate testing and should not be
used to call the method from within another view handler as the presence of a
request context will trigger template rendering.
Rendering may also be suspended by calling the view handler with ``_render=False``.
render_with provides JSON and JSONP handlers for the ``application/json``,
``text/json`` and ``text/x-json`` mimetypes if ``json`` or ``jsonp`` is True
(default is False).
:param template: Single template, or dictionary of MIME type to templates. If the
template is a callable, it is called with the output of the wrapped function
:param json: Helper to add a JSON handler (default is False)
:param jsonp: Helper to add a JSONP handler (if True, also provides JSON, default is False)
"""
if jsonp:
templates = {'application/json': dict_jsonp, 'application/javascript': dict_jsonp} # depends on [control=['if'], data=[]]
elif json:
templates = {'application/json': dict_jsonify} # depends on [control=['if'], data=[]]
else:
templates = {}
if isinstance(template, six.string_types):
templates['text/html'] = template # depends on [control=['if'], data=[]]
elif isinstance(template, dict):
templates.update(template) # depends on [control=['if'], data=[]]
elif template is None and (json or jsonp):
pass # depends on [control=['if'], data=[]]
else: # pragma: no cover
raise ValueError('Expected string or dict for template')
default_mimetype = '*/*'
if '*/*' not in templates:
templates['*/*'] = six.text_type
default_mimetype = 'text/plain'
for mimetype in ('text/html', 'text/plain', 'application/json'):
if mimetype in templates:
templates['*/*'] = templates[mimetype]
default_mimetype = mimetype # Remember which mimetype's handler is serving for */*
break # depends on [control=['if'], data=['mimetype', 'templates']] # depends on [control=['for'], data=['mimetype']] # depends on [control=['if'], data=['templates']]
template_mimetypes = list(templates.keys())
template_mimetypes.remove('*/*') # */* messes up matching, so supply it only as last resort
def inner(f):
@wraps(f)
def decorated_function(*args, **kwargs):
# Check if we need to bypass rendering
render = kwargs.pop('_render', True)
# Get the result
result = f(*args, **kwargs)
# Is the result a Response object? Don't attempt rendering
if isinstance(result, (Response, WerkzeugResponse, current_app.response_class)):
return result # depends on [control=['if'], data=[]]
# Did the result include status code and headers?
if isinstance(result, tuple):
resultset = result
result = resultset[0]
if len(resultset) > 1:
status_code = resultset[1] # depends on [control=['if'], data=[]]
else:
status_code = None
if len(resultset) > 2:
headers = Headers(resultset[2]) # depends on [control=['if'], data=[]]
else:
headers = Headers() # depends on [control=['if'], data=[]]
else:
status_code = None
headers = Headers()
if len(templates) > 1: # If we have more than one template handler
if 'Vary' in headers:
vary_values = [item.strip() for item in headers['Vary'].split(',')]
if 'Accept' not in vary_values:
vary_values.append('Accept') # depends on [control=['if'], data=['vary_values']]
headers['Vary'] = ', '.join(vary_values) # depends on [control=['if'], data=['headers']]
else:
headers['Vary'] = 'Accept' # depends on [control=['if'], data=[]]
# Find a matching mimetype between Accept headers and available templates
use_mimetype = None
if render and request:
# We do not use request.accept_mimetypes.best_match because it turns out to
# be buggy: it returns the least match instead of the best match.
# use_mimetype = request.accept_mimetypes.best_match(template_mimetypes, '*/*')
use_mimetype = _best_mimetype_match(template_mimetypes, request.accept_mimetypes, '*/*') # depends on [control=['if'], data=[]]
# Now render the result with the template for the mimetype
if use_mimetype is not None:
if callable(templates[use_mimetype]):
rendered = templates[use_mimetype](result)
if isinstance(rendered, Response):
if status_code is not None:
rendered.status_code = status_code # depends on [control=['if'], data=['status_code']]
if headers is not None:
rendered.headers.extend(headers) # depends on [control=['if'], data=['headers']] # depends on [control=['if'], data=[]]
else:
rendered = current_app.response_class(rendered, status=status_code, headers=headers, mimetype=default_mimetype if use_mimetype == '*/*' else use_mimetype) # depends on [control=['if'], data=[]]
else: # Not a callable mimetype. Render as a jinja2 template
rendered = current_app.response_class(render_template(templates[use_mimetype], **result), status=status_code or 200, headers=headers, mimetype=default_mimetype if use_mimetype == '*/*' else use_mimetype)
return rendered # depends on [control=['if'], data=['use_mimetype']]
else:
return result
return decorated_function
return inner |
def _parallel_predict_proba(estimators, estimators_features, X, n_classes, combination, estimators_weight):
"""Private function used to compute (proba-)predictions within a job."""
n_samples = X.shape[0]
proba = np.zeros((n_samples, n_classes))
for estimator, features, weight in zip(estimators, estimators_features, estimators_weight):
proba_estimator = estimator.predict_proba(X[:, features])
if combination in ['weighted_voting', 'weighted_bmr']:
proba += proba_estimator * weight
else:
proba += proba_estimator
return proba | def function[_parallel_predict_proba, parameter[estimators, estimators_features, X, n_classes, combination, estimators_weight]]:
constant[Private function used to compute (proba-)predictions within a job.]
variable[n_samples] assign[=] call[name[X].shape][constant[0]]
variable[proba] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da18f00f700>, <ast.Name object at 0x7da18f00d840>]]]]
for taget[tuple[[<ast.Name object at 0x7da18f00f4f0>, <ast.Name object at 0x7da18f00c9d0>, <ast.Name object at 0x7da18f00e260>]]] in starred[call[name[zip], parameter[name[estimators], name[estimators_features], name[estimators_weight]]]] begin[:]
variable[proba_estimator] assign[=] call[name[estimator].predict_proba, parameter[call[name[X]][tuple[[<ast.Slice object at 0x7da1b138c040>, <ast.Name object at 0x7da1b138dff0>]]]]]
if compare[name[combination] in list[[<ast.Constant object at 0x7da1b138e140>, <ast.Constant object at 0x7da1b138d690>]]] begin[:]
<ast.AugAssign object at 0x7da1b138d6c0>
return[name[proba]] | keyword[def] identifier[_parallel_predict_proba] ( identifier[estimators] , identifier[estimators_features] , identifier[X] , identifier[n_classes] , identifier[combination] , identifier[estimators_weight] ):
literal[string]
identifier[n_samples] = identifier[X] . identifier[shape] [ literal[int] ]
identifier[proba] = identifier[np] . identifier[zeros] (( identifier[n_samples] , identifier[n_classes] ))
keyword[for] identifier[estimator] , identifier[features] , identifier[weight] keyword[in] identifier[zip] ( identifier[estimators] , identifier[estimators_features] , identifier[estimators_weight] ):
identifier[proba_estimator] = identifier[estimator] . identifier[predict_proba] ( identifier[X] [:, identifier[features] ])
keyword[if] identifier[combination] keyword[in] [ literal[string] , literal[string] ]:
identifier[proba] += identifier[proba_estimator] * identifier[weight]
keyword[else] :
identifier[proba] += identifier[proba_estimator]
keyword[return] identifier[proba] | def _parallel_predict_proba(estimators, estimators_features, X, n_classes, combination, estimators_weight):
"""Private function used to compute (proba-)predictions within a job."""
n_samples = X.shape[0]
proba = np.zeros((n_samples, n_classes))
for (estimator, features, weight) in zip(estimators, estimators_features, estimators_weight):
proba_estimator = estimator.predict_proba(X[:, features])
if combination in ['weighted_voting', 'weighted_bmr']:
proba += proba_estimator * weight # depends on [control=['if'], data=[]]
else:
proba += proba_estimator # depends on [control=['for'], data=[]]
return proba |
def get(self, id, seq, intf): # pylint: disable=invalid-name,redefined-builtin
"""Get a capture.
:param id: Result ID as an int.
:param seq: TestResult sequence ID as an int.
:param intf: Interface name as string.
:return: :class:`captures.Capture <captures.Capture>` object
:rtype: captures.Capture
"""
schema = CaptureSchema()
resp = self.service.get_id(self._base(id, seq), intf)
return self.service.decode(schema, resp) | def function[get, parameter[self, id, seq, intf]]:
constant[Get a capture.
:param id: Result ID as an int.
:param seq: TestResult sequence ID as an int.
:param intf: Interface name as string.
:return: :class:`captures.Capture <captures.Capture>` object
:rtype: captures.Capture
]
variable[schema] assign[=] call[name[CaptureSchema], parameter[]]
variable[resp] assign[=] call[name[self].service.get_id, parameter[call[name[self]._base, parameter[name[id], name[seq]]], name[intf]]]
return[call[name[self].service.decode, parameter[name[schema], name[resp]]]] | keyword[def] identifier[get] ( identifier[self] , identifier[id] , identifier[seq] , identifier[intf] ):
literal[string]
identifier[schema] = identifier[CaptureSchema] ()
identifier[resp] = identifier[self] . identifier[service] . identifier[get_id] ( identifier[self] . identifier[_base] ( identifier[id] , identifier[seq] ), identifier[intf] )
keyword[return] identifier[self] . identifier[service] . identifier[decode] ( identifier[schema] , identifier[resp] ) | def get(self, id, seq, intf): # pylint: disable=invalid-name,redefined-builtin
'Get a capture.\n\n :param id: Result ID as an int.\n :param seq: TestResult sequence ID as an int.\n :param intf: Interface name as string.\n :return: :class:`captures.Capture <captures.Capture>` object\n :rtype: captures.Capture\n '
schema = CaptureSchema()
resp = self.service.get_id(self._base(id, seq), intf)
return self.service.decode(schema, resp) |
def reader(path):
"""
Turns a path to a dump file into a file-like object of (decompressed)
XML data assuming that '7z' is installed and will know what to do.
:Parameters:
path : `str`
the path to the dump file to read
"""
p = subprocess.Popen(
['7z', 'e', '-so', path],
stdout=subprocess.PIPE,
stderr=file_open(os.devnull, "w")
)
return io.TextIOWrapper(p.stdout, encoding='utf-8',
errors='replace') | def function[reader, parameter[path]]:
constant[
Turns a path to a dump file into a file-like object of (decompressed)
XML data assuming that '7z' is installed and will know what to do.
:Parameters:
path : `str`
the path to the dump file to read
]
variable[p] assign[=] call[name[subprocess].Popen, parameter[list[[<ast.Constant object at 0x7da1b0ae3ca0>, <ast.Constant object at 0x7da1b0ae0b50>, <ast.Constant object at 0x7da1b0ae1450>, <ast.Name object at 0x7da1b0ae3fd0>]]]]
return[call[name[io].TextIOWrapper, parameter[name[p].stdout]]] | keyword[def] identifier[reader] ( identifier[path] ):
literal[string]
identifier[p] = identifier[subprocess] . identifier[Popen] (
[ literal[string] , literal[string] , literal[string] , identifier[path] ],
identifier[stdout] = identifier[subprocess] . identifier[PIPE] ,
identifier[stderr] = identifier[file_open] ( identifier[os] . identifier[devnull] , literal[string] )
)
keyword[return] identifier[io] . identifier[TextIOWrapper] ( identifier[p] . identifier[stdout] , identifier[encoding] = literal[string] ,
identifier[errors] = literal[string] ) | def reader(path):
"""
Turns a path to a dump file into a file-like object of (decompressed)
XML data assuming that '7z' is installed and will know what to do.
:Parameters:
path : `str`
the path to the dump file to read
"""
p = subprocess.Popen(['7z', 'e', '-so', path], stdout=subprocess.PIPE, stderr=file_open(os.devnull, 'w'))
return io.TextIOWrapper(p.stdout, encoding='utf-8', errors='replace') |
def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs):
"""Initializes a urllib3 PoolManager.
This method should not be called from user code, and is only
exposed for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param connections: The number of urllib3 connection pools to cache.
:param maxsize: The maximum number of connections to save in the pool.
:param block: Block when no free connections are available.
:param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
"""
# save these values for pickling
self._pool_connections = connections
self._pool_maxsize = maxsize
self._pool_block = block
self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize,
block=block, strict=True, **pool_kwargs) | def function[init_poolmanager, parameter[self, connections, maxsize, block]]:
constant[Initializes a urllib3 PoolManager.
This method should not be called from user code, and is only
exposed for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param connections: The number of urllib3 connection pools to cache.
:param maxsize: The maximum number of connections to save in the pool.
:param block: Block when no free connections are available.
:param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
]
name[self]._pool_connections assign[=] name[connections]
name[self]._pool_maxsize assign[=] name[maxsize]
name[self]._pool_block assign[=] name[block]
name[self].poolmanager assign[=] call[name[PoolManager], parameter[]] | keyword[def] identifier[init_poolmanager] ( identifier[self] , identifier[connections] , identifier[maxsize] , identifier[block] = identifier[DEFAULT_POOLBLOCK] ,** identifier[pool_kwargs] ):
literal[string]
identifier[self] . identifier[_pool_connections] = identifier[connections]
identifier[self] . identifier[_pool_maxsize] = identifier[maxsize]
identifier[self] . identifier[_pool_block] = identifier[block]
identifier[self] . identifier[poolmanager] = identifier[PoolManager] ( identifier[num_pools] = identifier[connections] , identifier[maxsize] = identifier[maxsize] ,
identifier[block] = identifier[block] , identifier[strict] = keyword[True] ,** identifier[pool_kwargs] ) | def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs):
"""Initializes a urllib3 PoolManager.
This method should not be called from user code, and is only
exposed for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param connections: The number of urllib3 connection pools to cache.
:param maxsize: The maximum number of connections to save in the pool.
:param block: Block when no free connections are available.
:param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
"""
# save these values for pickling
self._pool_connections = connections
self._pool_maxsize = maxsize
self._pool_block = block
self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize, block=block, strict=True, **pool_kwargs) |
def geneinfo(args):
"""
%prog geneinfo pineapple.20141004.bed liftover.bed pineapple.20150413.bed \
note.txt interproscan.txt
Build gene info table from various sources. The three beds contain
information on the original scaffolds, linkage groups, and final selected
loci (after removal of TEs and split loci). The final two text files contain
AHRD and domain data.
"""
p = OptionParser(geneinfo.__doc__)
opts, args = p.parse_args(args)
if len(args) != 5:
sys.exit(not p.print_help())
scfbed, liftoverbed, lgbed, note, ipr = args
note = DictFile(note, delimiter="\t")
scfbed = Bed(scfbed)
lgorder = Bed(lgbed).order
liftover = Bed(liftoverbed).order
header = "Accession Scaffold-position LG-position "\
"Description Interpro-domain Interpro-description "\
"GO-term KEGG".split()
ipr = read_interpro(ipr)
fw_clean = must_open("master.txt", "w")
fw_removed = must_open("master-removed.txt", "w")
for fw in (fw_clean, fw_removed):
print("\t".join(header), file=fw)
for b in scfbed:
accession = b.accn
scaffold_position = b.tag
if accession in liftover:
lg_position = liftover[accession][-1].tag
else:
lg_position = "split"
fw = fw_clean if accession in lgorder else fw_removed
description = note[accession]
interpro = interpro_description = go = kegg = ""
if accession in ipr:
interpro, interpro_description, go, kegg = ipr[accession]
print("\t".join((accession, scaffold_position, lg_position,
description, interpro, interpro_description, go, kegg)), file=fw)
fw.close() | def function[geneinfo, parameter[args]]:
constant[
%prog geneinfo pineapple.20141004.bed liftover.bed pineapple.20150413.bed note.txt interproscan.txt
Build gene info table from various sources. The three beds contain
information on the original scaffolds, linkage groups, and final selected
loci (after removal of TEs and split loci). The final two text files contain
AHRD and domain data.
]
variable[p] assign[=] call[name[OptionParser], parameter[name[geneinfo].__doc__]]
<ast.Tuple object at 0x7da1b08eb910> assign[=] call[name[p].parse_args, parameter[name[args]]]
if compare[call[name[len], parameter[name[args]]] not_equal[!=] constant[5]] begin[:]
call[name[sys].exit, parameter[<ast.UnaryOp object at 0x7da1b08e89d0>]]
<ast.Tuple object at 0x7da1b08eb7f0> assign[=] name[args]
variable[note] assign[=] call[name[DictFile], parameter[name[note]]]
variable[scfbed] assign[=] call[name[Bed], parameter[name[scfbed]]]
variable[lgorder] assign[=] call[name[Bed], parameter[name[lgbed]]].order
variable[liftover] assign[=] call[name[Bed], parameter[name[liftoverbed]]].order
variable[header] assign[=] call[constant[Accession Scaffold-position LG-position Description Interpro-domain Interpro-description GO-term KEGG].split, parameter[]]
variable[ipr] assign[=] call[name[read_interpro], parameter[name[ipr]]]
variable[fw_clean] assign[=] call[name[must_open], parameter[constant[master.txt], constant[w]]]
variable[fw_removed] assign[=] call[name[must_open], parameter[constant[master-removed.txt], constant[w]]]
for taget[name[fw]] in starred[tuple[[<ast.Name object at 0x7da1b08c91b0>, <ast.Name object at 0x7da1b08c9c00>]]] begin[:]
call[name[print], parameter[call[constant[ ].join, parameter[name[header]]]]]
for taget[name[b]] in starred[name[scfbed]] begin[:]
variable[accession] assign[=] name[b].accn
variable[scaffold_position] assign[=] name[b].tag
if compare[name[accession] in name[liftover]] begin[:]
variable[lg_position] assign[=] call[call[name[liftover]][name[accession]]][<ast.UnaryOp object at 0x7da1b08cb370>].tag
variable[fw] assign[=] <ast.IfExp object at 0x7da1b08c9fc0>
variable[description] assign[=] call[name[note]][name[accession]]
variable[interpro] assign[=] constant[]
if compare[name[accession] in name[ipr]] begin[:]
<ast.Tuple object at 0x7da1b08c8550> assign[=] call[name[ipr]][name[accession]]
call[name[print], parameter[call[constant[ ].join, parameter[tuple[[<ast.Name object at 0x7da1b08c8fa0>, <ast.Name object at 0x7da1b08c9600>, <ast.Name object at 0x7da1b0810e20>, <ast.Name object at 0x7da1b0812350>, <ast.Name object at 0x7da1b08111e0>, <ast.Name object at 0x7da1b0811600>, <ast.Name object at 0x7da1b0811a80>, <ast.Name object at 0x7da1b0812290>]]]]]]
call[name[fw].close, parameter[]] | keyword[def] identifier[geneinfo] ( identifier[args] ):
literal[string]
identifier[p] = identifier[OptionParser] ( identifier[geneinfo] . identifier[__doc__] )
identifier[opts] , identifier[args] = identifier[p] . identifier[parse_args] ( identifier[args] )
keyword[if] identifier[len] ( identifier[args] )!= literal[int] :
identifier[sys] . identifier[exit] ( keyword[not] identifier[p] . identifier[print_help] ())
identifier[scfbed] , identifier[liftoverbed] , identifier[lgbed] , identifier[note] , identifier[ipr] = identifier[args]
identifier[note] = identifier[DictFile] ( identifier[note] , identifier[delimiter] = literal[string] )
identifier[scfbed] = identifier[Bed] ( identifier[scfbed] )
identifier[lgorder] = identifier[Bed] ( identifier[lgbed] ). identifier[order]
identifier[liftover] = identifier[Bed] ( identifier[liftoverbed] ). identifier[order]
identifier[header] = literal[string] literal[string] literal[string] . identifier[split] ()
identifier[ipr] = identifier[read_interpro] ( identifier[ipr] )
identifier[fw_clean] = identifier[must_open] ( literal[string] , literal[string] )
identifier[fw_removed] = identifier[must_open] ( literal[string] , literal[string] )
keyword[for] identifier[fw] keyword[in] ( identifier[fw_clean] , identifier[fw_removed] ):
identifier[print] ( literal[string] . identifier[join] ( identifier[header] ), identifier[file] = identifier[fw] )
keyword[for] identifier[b] keyword[in] identifier[scfbed] :
identifier[accession] = identifier[b] . identifier[accn]
identifier[scaffold_position] = identifier[b] . identifier[tag]
keyword[if] identifier[accession] keyword[in] identifier[liftover] :
identifier[lg_position] = identifier[liftover] [ identifier[accession] ][- literal[int] ]. identifier[tag]
keyword[else] :
identifier[lg_position] = literal[string]
identifier[fw] = identifier[fw_clean] keyword[if] identifier[accession] keyword[in] identifier[lgorder] keyword[else] identifier[fw_removed]
identifier[description] = identifier[note] [ identifier[accession] ]
identifier[interpro] = identifier[interpro_description] = identifier[go] = identifier[kegg] = literal[string]
keyword[if] identifier[accession] keyword[in] identifier[ipr] :
identifier[interpro] , identifier[interpro_description] , identifier[go] , identifier[kegg] = identifier[ipr] [ identifier[accession] ]
identifier[print] ( literal[string] . identifier[join] (( identifier[accession] , identifier[scaffold_position] , identifier[lg_position] ,
identifier[description] , identifier[interpro] , identifier[interpro_description] , identifier[go] , identifier[kegg] )), identifier[file] = identifier[fw] )
identifier[fw] . identifier[close] () | def geneinfo(args):
"""
%prog geneinfo pineapple.20141004.bed liftover.bed pineapple.20150413.bed note.txt interproscan.txt
Build gene info table from various sources. The three beds contain
information on the original scaffolds, linkage groups, and final selected
loci (after removal of TEs and split loci). The final two text files contain
AHRD and domain data.
"""
p = OptionParser(geneinfo.__doc__)
(opts, args) = p.parse_args(args)
if len(args) != 5:
sys.exit(not p.print_help()) # depends on [control=['if'], data=[]]
(scfbed, liftoverbed, lgbed, note, ipr) = args
note = DictFile(note, delimiter='\t')
scfbed = Bed(scfbed)
lgorder = Bed(lgbed).order
liftover = Bed(liftoverbed).order
header = 'Accession Scaffold-position LG-position Description Interpro-domain Interpro-description GO-term KEGG'.split()
ipr = read_interpro(ipr)
fw_clean = must_open('master.txt', 'w')
fw_removed = must_open('master-removed.txt', 'w')
for fw in (fw_clean, fw_removed):
print('\t'.join(header), file=fw) # depends on [control=['for'], data=['fw']]
for b in scfbed:
accession = b.accn
scaffold_position = b.tag
if accession in liftover:
lg_position = liftover[accession][-1].tag # depends on [control=['if'], data=['accession', 'liftover']]
else:
lg_position = 'split'
fw = fw_clean if accession in lgorder else fw_removed
description = note[accession]
interpro = interpro_description = go = kegg = ''
if accession in ipr:
(interpro, interpro_description, go, kegg) = ipr[accession] # depends on [control=['if'], data=['accession', 'ipr']]
print('\t'.join((accession, scaffold_position, lg_position, description, interpro, interpro_description, go, kegg)), file=fw) # depends on [control=['for'], data=['b']]
fw.close() |
def banner(cls, content_="Well Come"):
"""生成占3行的字符串"""
# char def
sp_char = "#"
# length calc
itsays = content_.strip()
effective_length = int(len(itsays))
# gen contents
side_space = ' ' * int(effective_length * ((1 - 0.618) / 0.618) / 2)
content_line = sp_char + side_space + itsays + side_space + sp_char
content_line_length = len(content_line)
banner_border = sp_char * content_line_length
return banner_border + '\n' + content_line + '\n' + banner_border | def function[banner, parameter[cls, content_]]:
constant[生成占3行的字符串]
variable[sp_char] assign[=] constant[#]
variable[itsays] assign[=] call[name[content_].strip, parameter[]]
variable[effective_length] assign[=] call[name[int], parameter[call[name[len], parameter[name[itsays]]]]]
variable[side_space] assign[=] binary_operation[constant[ ] * call[name[int], parameter[binary_operation[binary_operation[name[effective_length] * binary_operation[binary_operation[constant[1] - constant[0.618]] / constant[0.618]]] / constant[2]]]]]
variable[content_line] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[name[sp_char] + name[side_space]] + name[itsays]] + name[side_space]] + name[sp_char]]
variable[content_line_length] assign[=] call[name[len], parameter[name[content_line]]]
variable[banner_border] assign[=] binary_operation[name[sp_char] * name[content_line_length]]
return[binary_operation[binary_operation[binary_operation[binary_operation[name[banner_border] + constant[
]] + name[content_line]] + constant[
]] + name[banner_border]]] | keyword[def] identifier[banner] ( identifier[cls] , identifier[content_] = literal[string] ):
literal[string]
identifier[sp_char] = literal[string]
identifier[itsays] = identifier[content_] . identifier[strip] ()
identifier[effective_length] = identifier[int] ( identifier[len] ( identifier[itsays] ))
identifier[side_space] = literal[string] * identifier[int] ( identifier[effective_length] *(( literal[int] - literal[int] )/ literal[int] )/ literal[int] )
identifier[content_line] = identifier[sp_char] + identifier[side_space] + identifier[itsays] + identifier[side_space] + identifier[sp_char]
identifier[content_line_length] = identifier[len] ( identifier[content_line] )
identifier[banner_border] = identifier[sp_char] * identifier[content_line_length]
keyword[return] identifier[banner_border] + literal[string] + identifier[content_line] + literal[string] + identifier[banner_border] | def banner(cls, content_='Well Come'):
"""生成占3行的字符串"""
# char def
sp_char = '#'
# length calc
itsays = content_.strip()
effective_length = int(len(itsays))
# gen contents
side_space = ' ' * int(effective_length * ((1 - 0.618) / 0.618) / 2)
content_line = sp_char + side_space + itsays + side_space + sp_char
content_line_length = len(content_line)
banner_border = sp_char * content_line_length
return banner_border + '\n' + content_line + '\n' + banner_border |
def get_instance(self, payload):
"""
Build an instance of ReservationInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.taskrouter.v1.workspace.task.reservation.ReservationInstance
:rtype: twilio.rest.taskrouter.v1.workspace.task.reservation.ReservationInstance
"""
return ReservationInstance(
self._version,
payload,
workspace_sid=self._solution['workspace_sid'],
task_sid=self._solution['task_sid'],
) | def function[get_instance, parameter[self, payload]]:
constant[
Build an instance of ReservationInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.taskrouter.v1.workspace.task.reservation.ReservationInstance
:rtype: twilio.rest.taskrouter.v1.workspace.task.reservation.ReservationInstance
]
return[call[name[ReservationInstance], parameter[name[self]._version, name[payload]]]] | keyword[def] identifier[get_instance] ( identifier[self] , identifier[payload] ):
literal[string]
keyword[return] identifier[ReservationInstance] (
identifier[self] . identifier[_version] ,
identifier[payload] ,
identifier[workspace_sid] = identifier[self] . identifier[_solution] [ literal[string] ],
identifier[task_sid] = identifier[self] . identifier[_solution] [ literal[string] ],
) | def get_instance(self, payload):
"""
Build an instance of ReservationInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.taskrouter.v1.workspace.task.reservation.ReservationInstance
:rtype: twilio.rest.taskrouter.v1.workspace.task.reservation.ReservationInstance
"""
return ReservationInstance(self._version, payload, workspace_sid=self._solution['workspace_sid'], task_sid=self._solution['task_sid']) |
def coerce_to_array(values, dtype, mask=None, copy=False):
"""
Coerce the input values array to numpy arrays with a mask
Parameters
----------
values : 1D list-like
dtype : integer dtype
mask : boolean 1D array, optional
copy : boolean, default False
if True, copy the input
Returns
-------
tuple of (values, mask)
"""
# if values is integer numpy array, preserve it's dtype
if dtype is None and hasattr(values, 'dtype'):
if is_integer_dtype(values.dtype):
dtype = values.dtype
if dtype is not None:
if (isinstance(dtype, str) and
(dtype.startswith("Int") or dtype.startswith("UInt"))):
# Avoid DeprecationWarning from NumPy about np.dtype("Int64")
# https://github.com/numpy/numpy/pull/7476
dtype = dtype.lower()
if not issubclass(type(dtype), _IntegerDtype):
try:
dtype = _dtypes[str(np.dtype(dtype))]
except KeyError:
raise ValueError("invalid dtype specified {}".format(dtype))
if isinstance(values, IntegerArray):
values, mask = values._data, values._mask
if dtype is not None:
values = values.astype(dtype.numpy_dtype, copy=False)
if copy:
values = values.copy()
mask = mask.copy()
return values, mask
values = np.array(values, copy=copy)
if is_object_dtype(values):
inferred_type = lib.infer_dtype(values, skipna=True)
if inferred_type == 'empty':
values = np.empty(len(values))
values.fill(np.nan)
elif inferred_type not in ['floating', 'integer',
'mixed-integer', 'mixed-integer-float']:
raise TypeError("{} cannot be converted to an IntegerDtype".format(
values.dtype))
elif not (is_integer_dtype(values) or is_float_dtype(values)):
raise TypeError("{} cannot be converted to an IntegerDtype".format(
values.dtype))
if mask is None:
mask = isna(values)
else:
assert len(mask) == len(values)
if not values.ndim == 1:
raise TypeError("values must be a 1D list-like")
if not mask.ndim == 1:
raise TypeError("mask must be a 1D list-like")
# infer dtype if needed
if dtype is None:
dtype = np.dtype('int64')
else:
dtype = dtype.type
# if we are float, let's make sure that we can
# safely cast
# we copy as need to coerce here
if mask.any():
values = values.copy()
values[mask] = 1
values = safe_cast(values, dtype, copy=False)
else:
values = safe_cast(values, dtype, copy=False)
return values, mask | def function[coerce_to_array, parameter[values, dtype, mask, copy]]:
constant[
Coerce the input values array to numpy arrays with a mask
Parameters
----------
values : 1D list-like
dtype : integer dtype
mask : boolean 1D array, optional
copy : boolean, default False
if True, copy the input
Returns
-------
tuple of (values, mask)
]
if <ast.BoolOp object at 0x7da18f00d2a0> begin[:]
if call[name[is_integer_dtype], parameter[name[values].dtype]] begin[:]
variable[dtype] assign[=] name[values].dtype
if compare[name[dtype] is_not constant[None]] begin[:]
if <ast.BoolOp object at 0x7da18f00cfa0> begin[:]
variable[dtype] assign[=] call[name[dtype].lower, parameter[]]
if <ast.UnaryOp object at 0x7da18f00e530> begin[:]
<ast.Try object at 0x7da18f00d8a0>
if call[name[isinstance], parameter[name[values], name[IntegerArray]]] begin[:]
<ast.Tuple object at 0x7da18f00e8f0> assign[=] tuple[[<ast.Attribute object at 0x7da18f00c8b0>, <ast.Attribute object at 0x7da18f00d5d0>]]
if compare[name[dtype] is_not constant[None]] begin[:]
variable[values] assign[=] call[name[values].astype, parameter[name[dtype].numpy_dtype]]
if name[copy] begin[:]
variable[values] assign[=] call[name[values].copy, parameter[]]
variable[mask] assign[=] call[name[mask].copy, parameter[]]
return[tuple[[<ast.Name object at 0x7da18f00ce20>, <ast.Name object at 0x7da18f00e320>]]]
variable[values] assign[=] call[name[np].array, parameter[name[values]]]
if call[name[is_object_dtype], parameter[name[values]]] begin[:]
variable[inferred_type] assign[=] call[name[lib].infer_dtype, parameter[name[values]]]
if compare[name[inferred_type] equal[==] constant[empty]] begin[:]
variable[values] assign[=] call[name[np].empty, parameter[call[name[len], parameter[name[values]]]]]
call[name[values].fill, parameter[name[np].nan]]
if compare[name[mask] is constant[None]] begin[:]
variable[mask] assign[=] call[name[isna], parameter[name[values]]]
if <ast.UnaryOp object at 0x7da18f00d930> begin[:]
<ast.Raise object at 0x7da18f00da80>
if <ast.UnaryOp object at 0x7da20ed9b820> begin[:]
<ast.Raise object at 0x7da207f02b00>
if compare[name[dtype] is constant[None]] begin[:]
variable[dtype] assign[=] call[name[np].dtype, parameter[constant[int64]]]
if call[name[mask].any, parameter[]] begin[:]
variable[values] assign[=] call[name[values].copy, parameter[]]
call[name[values]][name[mask]] assign[=] constant[1]
variable[values] assign[=] call[name[safe_cast], parameter[name[values], name[dtype]]]
return[tuple[[<ast.Name object at 0x7da207f00c70>, <ast.Name object at 0x7da207f03010>]]] | keyword[def] identifier[coerce_to_array] ( identifier[values] , identifier[dtype] , identifier[mask] = keyword[None] , identifier[copy] = keyword[False] ):
literal[string]
keyword[if] identifier[dtype] keyword[is] keyword[None] keyword[and] identifier[hasattr] ( identifier[values] , literal[string] ):
keyword[if] identifier[is_integer_dtype] ( identifier[values] . identifier[dtype] ):
identifier[dtype] = identifier[values] . identifier[dtype]
keyword[if] identifier[dtype] keyword[is] keyword[not] keyword[None] :
keyword[if] ( identifier[isinstance] ( identifier[dtype] , identifier[str] ) keyword[and]
( identifier[dtype] . identifier[startswith] ( literal[string] ) keyword[or] identifier[dtype] . identifier[startswith] ( literal[string] ))):
identifier[dtype] = identifier[dtype] . identifier[lower] ()
keyword[if] keyword[not] identifier[issubclass] ( identifier[type] ( identifier[dtype] ), identifier[_IntegerDtype] ):
keyword[try] :
identifier[dtype] = identifier[_dtypes] [ identifier[str] ( identifier[np] . identifier[dtype] ( identifier[dtype] ))]
keyword[except] identifier[KeyError] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[dtype] ))
keyword[if] identifier[isinstance] ( identifier[values] , identifier[IntegerArray] ):
identifier[values] , identifier[mask] = identifier[values] . identifier[_data] , identifier[values] . identifier[_mask]
keyword[if] identifier[dtype] keyword[is] keyword[not] keyword[None] :
identifier[values] = identifier[values] . identifier[astype] ( identifier[dtype] . identifier[numpy_dtype] , identifier[copy] = keyword[False] )
keyword[if] identifier[copy] :
identifier[values] = identifier[values] . identifier[copy] ()
identifier[mask] = identifier[mask] . identifier[copy] ()
keyword[return] identifier[values] , identifier[mask]
identifier[values] = identifier[np] . identifier[array] ( identifier[values] , identifier[copy] = identifier[copy] )
keyword[if] identifier[is_object_dtype] ( identifier[values] ):
identifier[inferred_type] = identifier[lib] . identifier[infer_dtype] ( identifier[values] , identifier[skipna] = keyword[True] )
keyword[if] identifier[inferred_type] == literal[string] :
identifier[values] = identifier[np] . identifier[empty] ( identifier[len] ( identifier[values] ))
identifier[values] . identifier[fill] ( identifier[np] . identifier[nan] )
keyword[elif] identifier[inferred_type] keyword[not] keyword[in] [ literal[string] , literal[string] ,
literal[string] , literal[string] ]:
keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] (
identifier[values] . identifier[dtype] ))
keyword[elif] keyword[not] ( identifier[is_integer_dtype] ( identifier[values] ) keyword[or] identifier[is_float_dtype] ( identifier[values] )):
keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] (
identifier[values] . identifier[dtype] ))
keyword[if] identifier[mask] keyword[is] keyword[None] :
identifier[mask] = identifier[isna] ( identifier[values] )
keyword[else] :
keyword[assert] identifier[len] ( identifier[mask] )== identifier[len] ( identifier[values] )
keyword[if] keyword[not] identifier[values] . identifier[ndim] == literal[int] :
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] keyword[not] identifier[mask] . identifier[ndim] == literal[int] :
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] identifier[dtype] keyword[is] keyword[None] :
identifier[dtype] = identifier[np] . identifier[dtype] ( literal[string] )
keyword[else] :
identifier[dtype] = identifier[dtype] . identifier[type]
keyword[if] identifier[mask] . identifier[any] ():
identifier[values] = identifier[values] . identifier[copy] ()
identifier[values] [ identifier[mask] ]= literal[int]
identifier[values] = identifier[safe_cast] ( identifier[values] , identifier[dtype] , identifier[copy] = keyword[False] )
keyword[else] :
identifier[values] = identifier[safe_cast] ( identifier[values] , identifier[dtype] , identifier[copy] = keyword[False] )
keyword[return] identifier[values] , identifier[mask] | def coerce_to_array(values, dtype, mask=None, copy=False):
"""
Coerce the input values array to numpy arrays with a mask
Parameters
----------
values : 1D list-like
dtype : integer dtype
mask : boolean 1D array, optional
copy : boolean, default False
if True, copy the input
Returns
-------
tuple of (values, mask)
"""
# if values is integer numpy array, preserve it's dtype
if dtype is None and hasattr(values, 'dtype'):
if is_integer_dtype(values.dtype):
dtype = values.dtype # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if dtype is not None:
if isinstance(dtype, str) and (dtype.startswith('Int') or dtype.startswith('UInt')):
# Avoid DeprecationWarning from NumPy about np.dtype("Int64")
# https://github.com/numpy/numpy/pull/7476
dtype = dtype.lower() # depends on [control=['if'], data=[]]
if not issubclass(type(dtype), _IntegerDtype):
try:
dtype = _dtypes[str(np.dtype(dtype))] # depends on [control=['try'], data=[]]
except KeyError:
raise ValueError('invalid dtype specified {}'.format(dtype)) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['dtype']]
if isinstance(values, IntegerArray):
(values, mask) = (values._data, values._mask)
if dtype is not None:
values = values.astype(dtype.numpy_dtype, copy=False) # depends on [control=['if'], data=['dtype']]
if copy:
values = values.copy()
mask = mask.copy() # depends on [control=['if'], data=[]]
return (values, mask) # depends on [control=['if'], data=[]]
values = np.array(values, copy=copy)
if is_object_dtype(values):
inferred_type = lib.infer_dtype(values, skipna=True)
if inferred_type == 'empty':
values = np.empty(len(values))
values.fill(np.nan) # depends on [control=['if'], data=[]]
elif inferred_type not in ['floating', 'integer', 'mixed-integer', 'mixed-integer-float']:
raise TypeError('{} cannot be converted to an IntegerDtype'.format(values.dtype)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif not (is_integer_dtype(values) or is_float_dtype(values)):
raise TypeError('{} cannot be converted to an IntegerDtype'.format(values.dtype)) # depends on [control=['if'], data=[]]
if mask is None:
mask = isna(values) # depends on [control=['if'], data=['mask']]
else:
assert len(mask) == len(values)
if not values.ndim == 1:
raise TypeError('values must be a 1D list-like') # depends on [control=['if'], data=[]]
if not mask.ndim == 1:
raise TypeError('mask must be a 1D list-like') # depends on [control=['if'], data=[]]
# infer dtype if needed
if dtype is None:
dtype = np.dtype('int64') # depends on [control=['if'], data=['dtype']]
else:
dtype = dtype.type
# if we are float, let's make sure that we can
# safely cast
# we copy as need to coerce here
if mask.any():
values = values.copy()
values[mask] = 1
values = safe_cast(values, dtype, copy=False) # depends on [control=['if'], data=[]]
else:
values = safe_cast(values, dtype, copy=False)
return (values, mask) |
def multiple_replace(d: Dict[str, str], text: str) -> str:
""" Performs string replacement from dict in a single pass. Taken from
https://www.oreilly.com/library/view/python-cookbook/0596001673/ch03s15.html
"""
# Create a regular expression from all of the dictionary keys
regex = re.compile("|".join(map(re.escape, d.keys())))
# For each match, look up the corresponding value in the dictionary
return regex.sub(lambda match: d[match.group(0)], text) | def function[multiple_replace, parameter[d, text]]:
constant[ Performs string replacement from dict in a single pass. Taken from
https://www.oreilly.com/library/view/python-cookbook/0596001673/ch03s15.html
]
variable[regex] assign[=] call[name[re].compile, parameter[call[constant[|].join, parameter[call[name[map], parameter[name[re].escape, call[name[d].keys, parameter[]]]]]]]]
return[call[name[regex].sub, parameter[<ast.Lambda object at 0x7da20c6ab700>, name[text]]]] | keyword[def] identifier[multiple_replace] ( identifier[d] : identifier[Dict] [ identifier[str] , identifier[str] ], identifier[text] : identifier[str] )-> identifier[str] :
literal[string]
identifier[regex] = identifier[re] . identifier[compile] ( literal[string] . identifier[join] ( identifier[map] ( identifier[re] . identifier[escape] , identifier[d] . identifier[keys] ())))
keyword[return] identifier[regex] . identifier[sub] ( keyword[lambda] identifier[match] : identifier[d] [ identifier[match] . identifier[group] ( literal[int] )], identifier[text] ) | def multiple_replace(d: Dict[str, str], text: str) -> str:
""" Performs string replacement from dict in a single pass. Taken from
https://www.oreilly.com/library/view/python-cookbook/0596001673/ch03s15.html
"""
# Create a regular expression from all of the dictionary keys
regex = re.compile('|'.join(map(re.escape, d.keys())))
# For each match, look up the corresponding value in the dictionary
return regex.sub(lambda match: d[match.group(0)], text) |
def _get_inputs(self, old_inputs):
"""Converts command line args into a list of template inputs
"""
# Convert inputs to dict to facilitate overriding by channel name
# Also, drop DataNode ID and keep only contents.
input_dict = {}
for input in old_inputs:
# Strip out DataNode UUID and URL
input['data'] = {'contents': input['data']['contents']}
input_dict[input['channel']] = input
file_inputs = self._get_file_inputs()
try:
jsonschema.validate(file_inputs, file_input_schema)
except jsonschema.ValidationError:
raise SystemExit("ERROR! User inputs file is not valid")
for (channel, input_id) in file_inputs.iteritems():
input_dict[channel] = {
'channel': channel,
'data': {'contents': input_id}
}
# Override with cli user inputs if specified
if self.args.inputs:
for kv_pair in self.args.inputs:
(channel, input_id) = kv_pair.split('=')
input_dict[channel] = {
'channel': channel,
'data': {
'contents':
self._parse_string_to_nested_lists(input_id)}
}
return input_dict.values() | def function[_get_inputs, parameter[self, old_inputs]]:
constant[Converts command line args into a list of template inputs
]
variable[input_dict] assign[=] dictionary[[], []]
for taget[name[input]] in starred[name[old_inputs]] begin[:]
call[name[input]][constant[data]] assign[=] dictionary[[<ast.Constant object at 0x7da1b10c2380>], [<ast.Subscript object at 0x7da1b10c1660>]]
call[name[input_dict]][call[name[input]][constant[channel]]] assign[=] name[input]
variable[file_inputs] assign[=] call[name[self]._get_file_inputs, parameter[]]
<ast.Try object at 0x7da1b10c0c40>
for taget[tuple[[<ast.Name object at 0x7da1b10c0be0>, <ast.Name object at 0x7da1b10c0400>]]] in starred[call[name[file_inputs].iteritems, parameter[]]] begin[:]
call[name[input_dict]][name[channel]] assign[=] dictionary[[<ast.Constant object at 0x7da1b10c0b20>, <ast.Constant object at 0x7da1b10c0130>], [<ast.Name object at 0x7da1b10c1ba0>, <ast.Dict object at 0x7da1b10c0c10>]]
if name[self].args.inputs begin[:]
for taget[name[kv_pair]] in starred[name[self].args.inputs] begin[:]
<ast.Tuple object at 0x7da1b1024670> assign[=] call[name[kv_pair].split, parameter[constant[=]]]
call[name[input_dict]][name[channel]] assign[=] dictionary[[<ast.Constant object at 0x7da1b10242e0>, <ast.Constant object at 0x7da1b10241f0>], [<ast.Name object at 0x7da1b1024130>, <ast.Dict object at 0x7da1b1024190>]]
return[call[name[input_dict].values, parameter[]]] | keyword[def] identifier[_get_inputs] ( identifier[self] , identifier[old_inputs] ):
literal[string]
identifier[input_dict] ={}
keyword[for] identifier[input] keyword[in] identifier[old_inputs] :
identifier[input] [ literal[string] ]={ literal[string] : identifier[input] [ literal[string] ][ literal[string] ]}
identifier[input_dict] [ identifier[input] [ literal[string] ]]= identifier[input]
identifier[file_inputs] = identifier[self] . identifier[_get_file_inputs] ()
keyword[try] :
identifier[jsonschema] . identifier[validate] ( identifier[file_inputs] , identifier[file_input_schema] )
keyword[except] identifier[jsonschema] . identifier[ValidationError] :
keyword[raise] identifier[SystemExit] ( literal[string] )
keyword[for] ( identifier[channel] , identifier[input_id] ) keyword[in] identifier[file_inputs] . identifier[iteritems] ():
identifier[input_dict] [ identifier[channel] ]={
literal[string] : identifier[channel] ,
literal[string] :{ literal[string] : identifier[input_id] }
}
keyword[if] identifier[self] . identifier[args] . identifier[inputs] :
keyword[for] identifier[kv_pair] keyword[in] identifier[self] . identifier[args] . identifier[inputs] :
( identifier[channel] , identifier[input_id] )= identifier[kv_pair] . identifier[split] ( literal[string] )
identifier[input_dict] [ identifier[channel] ]={
literal[string] : identifier[channel] ,
literal[string] :{
literal[string] :
identifier[self] . identifier[_parse_string_to_nested_lists] ( identifier[input_id] )}
}
keyword[return] identifier[input_dict] . identifier[values] () | def _get_inputs(self, old_inputs):
"""Converts command line args into a list of template inputs
"""
# Convert inputs to dict to facilitate overriding by channel name
# Also, drop DataNode ID and keep only contents.
input_dict = {}
for input in old_inputs:
# Strip out DataNode UUID and URL
input['data'] = {'contents': input['data']['contents']}
input_dict[input['channel']] = input # depends on [control=['for'], data=['input']]
file_inputs = self._get_file_inputs()
try:
jsonschema.validate(file_inputs, file_input_schema) # depends on [control=['try'], data=[]]
except jsonschema.ValidationError:
raise SystemExit('ERROR! User inputs file is not valid') # depends on [control=['except'], data=[]]
for (channel, input_id) in file_inputs.iteritems():
input_dict[channel] = {'channel': channel, 'data': {'contents': input_id}} # depends on [control=['for'], data=[]]
# Override with cli user inputs if specified
if self.args.inputs:
for kv_pair in self.args.inputs:
(channel, input_id) = kv_pair.split('=')
input_dict[channel] = {'channel': channel, 'data': {'contents': self._parse_string_to_nested_lists(input_id)}} # depends on [control=['for'], data=['kv_pair']] # depends on [control=['if'], data=[]]
return input_dict.values() |
def data(self, column, role):
"""Return the data for the specified column and role
The column addresses one attribute of the data.
:param column: the data column
:type column: int
:param role: the data role
:type role: QtCore.Qt.ItemDataRole
:returns: data depending on the role
:rtype:
:raises: None
"""
return self.columns[column](self._Department, role) | def function[data, parameter[self, column, role]]:
constant[Return the data for the specified column and role
The column addresses one attribute of the data.
:param column: the data column
:type column: int
:param role: the data role
:type role: QtCore.Qt.ItemDataRole
:returns: data depending on the role
:rtype:
:raises: None
]
return[call[call[name[self].columns][name[column]], parameter[name[self]._Department, name[role]]]] | keyword[def] identifier[data] ( identifier[self] , identifier[column] , identifier[role] ):
literal[string]
keyword[return] identifier[self] . identifier[columns] [ identifier[column] ]( identifier[self] . identifier[_Department] , identifier[role] ) | def data(self, column, role):
"""Return the data for the specified column and role
The column addresses one attribute of the data.
:param column: the data column
:type column: int
:param role: the data role
:type role: QtCore.Qt.ItemDataRole
:returns: data depending on the role
:rtype:
:raises: None
"""
return self.columns[column](self._Department, role) |
def options(self, context, module_options):
'''
PATH Path to the file containing raw shellcode to inject
PROCID Process ID to inject into (default: current powershell process)
'''
if not 'PATH' in module_options:
context.log.error('PATH option is required!')
exit(1)
self.shellcode_path = os.path.expanduser(module_options['PATH'])
if not os.path.exists(self.shellcode_path):
context.log.error('Invalid path to shellcode!')
exit(1)
self.procid = None
if 'PROCID' in module_options.keys():
self.procid = module_options['PROCID']
self.ps_script = obfs_ps_script('powersploit/CodeExecution/Invoke-Shellcode.ps1') | def function[options, parameter[self, context, module_options]]:
constant[
PATH Path to the file containing raw shellcode to inject
PROCID Process ID to inject into (default: current powershell process)
]
if <ast.UnaryOp object at 0x7da1b1c351b0> begin[:]
call[name[context].log.error, parameter[constant[PATH option is required!]]]
call[name[exit], parameter[constant[1]]]
name[self].shellcode_path assign[=] call[name[os].path.expanduser, parameter[call[name[module_options]][constant[PATH]]]]
if <ast.UnaryOp object at 0x7da1b1c35db0> begin[:]
call[name[context].log.error, parameter[constant[Invalid path to shellcode!]]]
call[name[exit], parameter[constant[1]]]
name[self].procid assign[=] constant[None]
if compare[constant[PROCID] in call[name[module_options].keys, parameter[]]] begin[:]
name[self].procid assign[=] call[name[module_options]][constant[PROCID]]
name[self].ps_script assign[=] call[name[obfs_ps_script], parameter[constant[powersploit/CodeExecution/Invoke-Shellcode.ps1]]] | keyword[def] identifier[options] ( identifier[self] , identifier[context] , identifier[module_options] ):
literal[string]
keyword[if] keyword[not] literal[string] keyword[in] identifier[module_options] :
identifier[context] . identifier[log] . identifier[error] ( literal[string] )
identifier[exit] ( literal[int] )
identifier[self] . identifier[shellcode_path] = identifier[os] . identifier[path] . identifier[expanduser] ( identifier[module_options] [ literal[string] ])
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[self] . identifier[shellcode_path] ):
identifier[context] . identifier[log] . identifier[error] ( literal[string] )
identifier[exit] ( literal[int] )
identifier[self] . identifier[procid] = keyword[None]
keyword[if] literal[string] keyword[in] identifier[module_options] . identifier[keys] ():
identifier[self] . identifier[procid] = identifier[module_options] [ literal[string] ]
identifier[self] . identifier[ps_script] = identifier[obfs_ps_script] ( literal[string] ) | def options(self, context, module_options):
"""
PATH Path to the file containing raw shellcode to inject
PROCID Process ID to inject into (default: current powershell process)
"""
if not 'PATH' in module_options:
context.log.error('PATH option is required!')
exit(1) # depends on [control=['if'], data=[]]
self.shellcode_path = os.path.expanduser(module_options['PATH'])
if not os.path.exists(self.shellcode_path):
context.log.error('Invalid path to shellcode!')
exit(1) # depends on [control=['if'], data=[]]
self.procid = None
if 'PROCID' in module_options.keys():
self.procid = module_options['PROCID'] # depends on [control=['if'], data=[]]
self.ps_script = obfs_ps_script('powersploit/CodeExecution/Invoke-Shellcode.ps1') |
def getOrCreate(cls, conf=None):
"""
Get or instantiate a SparkContext and register it as a singleton object.
:param conf: SparkConf (optional)
"""
with SparkContext._lock:
if SparkContext._active_spark_context is None:
SparkContext(conf=conf or SparkConf())
return SparkContext._active_spark_context | def function[getOrCreate, parameter[cls, conf]]:
constant[
Get or instantiate a SparkContext and register it as a singleton object.
:param conf: SparkConf (optional)
]
with name[SparkContext]._lock begin[:]
if compare[name[SparkContext]._active_spark_context is constant[None]] begin[:]
call[name[SparkContext], parameter[]]
return[name[SparkContext]._active_spark_context] | keyword[def] identifier[getOrCreate] ( identifier[cls] , identifier[conf] = keyword[None] ):
literal[string]
keyword[with] identifier[SparkContext] . identifier[_lock] :
keyword[if] identifier[SparkContext] . identifier[_active_spark_context] keyword[is] keyword[None] :
identifier[SparkContext] ( identifier[conf] = identifier[conf] keyword[or] identifier[SparkConf] ())
keyword[return] identifier[SparkContext] . identifier[_active_spark_context] | def getOrCreate(cls, conf=None):
"""
Get or instantiate a SparkContext and register it as a singleton object.
:param conf: SparkConf (optional)
"""
with SparkContext._lock:
if SparkContext._active_spark_context is None:
SparkContext(conf=conf or SparkConf()) # depends on [control=['if'], data=[]]
return SparkContext._active_spark_context # depends on [control=['with'], data=[]] |
def from_array(array):
"""
Deserialize a new OrderInfo from a given dictionary.
:return: new OrderInfo instance.
:rtype: OrderInfo
"""
if array is None or not array:
return None
# end if
assert_type_or_raise(array, dict, parameter_name="array")
from pytgbot.api_types.receivable.payments import ShippingAddress
data = {}
data['name'] = u(array.get('name')) if array.get('name') is not None else None
data['phone_number'] = u(array.get('phone_number')) if array.get('phone_number') is not None else None
data['email'] = u(array.get('email')) if array.get('email') is not None else None
data['shipping_address'] = ShippingAddress.from_array(array.get('shipping_address')) if array.get('shipping_address') is not None else None
data['_raw'] = array
return OrderInfo(**data) | def function[from_array, parameter[array]]:
constant[
Deserialize a new OrderInfo from a given dictionary.
:return: new OrderInfo instance.
:rtype: OrderInfo
]
if <ast.BoolOp object at 0x7da1b0400ac0> begin[:]
return[constant[None]]
call[name[assert_type_or_raise], parameter[name[array], name[dict]]]
from relative_module[pytgbot.api_types.receivable.payments] import module[ShippingAddress]
variable[data] assign[=] dictionary[[], []]
call[name[data]][constant[name]] assign[=] <ast.IfExp object at 0x7da18bc73100>
call[name[data]][constant[phone_number]] assign[=] <ast.IfExp object at 0x7da18bc739d0>
call[name[data]][constant[email]] assign[=] <ast.IfExp object at 0x7da18f58ca00>
call[name[data]][constant[shipping_address]] assign[=] <ast.IfExp object at 0x7da18f58fa00>
call[name[data]][constant[_raw]] assign[=] name[array]
return[call[name[OrderInfo], parameter[]]] | keyword[def] identifier[from_array] ( identifier[array] ):
literal[string]
keyword[if] identifier[array] keyword[is] keyword[None] keyword[or] keyword[not] identifier[array] :
keyword[return] keyword[None]
identifier[assert_type_or_raise] ( identifier[array] , identifier[dict] , identifier[parameter_name] = literal[string] )
keyword[from] identifier[pytgbot] . identifier[api_types] . identifier[receivable] . identifier[payments] keyword[import] identifier[ShippingAddress]
identifier[data] ={}
identifier[data] [ literal[string] ]= identifier[u] ( identifier[array] . identifier[get] ( literal[string] )) keyword[if] identifier[array] . identifier[get] ( literal[string] ) keyword[is] keyword[not] keyword[None] keyword[else] keyword[None]
identifier[data] [ literal[string] ]= identifier[u] ( identifier[array] . identifier[get] ( literal[string] )) keyword[if] identifier[array] . identifier[get] ( literal[string] ) keyword[is] keyword[not] keyword[None] keyword[else] keyword[None]
identifier[data] [ literal[string] ]= identifier[u] ( identifier[array] . identifier[get] ( literal[string] )) keyword[if] identifier[array] . identifier[get] ( literal[string] ) keyword[is] keyword[not] keyword[None] keyword[else] keyword[None]
identifier[data] [ literal[string] ]= identifier[ShippingAddress] . identifier[from_array] ( identifier[array] . identifier[get] ( literal[string] )) keyword[if] identifier[array] . identifier[get] ( literal[string] ) keyword[is] keyword[not] keyword[None] keyword[else] keyword[None]
identifier[data] [ literal[string] ]= identifier[array]
keyword[return] identifier[OrderInfo] (** identifier[data] ) | def from_array(array):
"""
Deserialize a new OrderInfo from a given dictionary.
:return: new OrderInfo instance.
:rtype: OrderInfo
"""
if array is None or not array:
return None # depends on [control=['if'], data=[]]
# end if
assert_type_or_raise(array, dict, parameter_name='array')
from pytgbot.api_types.receivable.payments import ShippingAddress
data = {}
data['name'] = u(array.get('name')) if array.get('name') is not None else None
data['phone_number'] = u(array.get('phone_number')) if array.get('phone_number') is not None else None
data['email'] = u(array.get('email')) if array.get('email') is not None else None
data['shipping_address'] = ShippingAddress.from_array(array.get('shipping_address')) if array.get('shipping_address') is not None else None
data['_raw'] = array
return OrderInfo(**data) |
def _authenticate_client(self, client):
"""Authenticate the client if necessary."""
if self.login and not self.restart_required:
try:
db = client[self.auth_source]
if self.x509_extra_user:
db.authenticate(
DEFAULT_SUBJECT,
mechanism='MONGODB-X509'
)
else:
db.authenticate(
self.login, self.password)
except Exception:
logger.exception(
"Could not authenticate to %r as %s/%s"
% (client, self.login, self.password))
raise | def function[_authenticate_client, parameter[self, client]]:
constant[Authenticate the client if necessary.]
if <ast.BoolOp object at 0x7da20c6e6bf0> begin[:]
<ast.Try object at 0x7da20c6e4ac0> | keyword[def] identifier[_authenticate_client] ( identifier[self] , identifier[client] ):
literal[string]
keyword[if] identifier[self] . identifier[login] keyword[and] keyword[not] identifier[self] . identifier[restart_required] :
keyword[try] :
identifier[db] = identifier[client] [ identifier[self] . identifier[auth_source] ]
keyword[if] identifier[self] . identifier[x509_extra_user] :
identifier[db] . identifier[authenticate] (
identifier[DEFAULT_SUBJECT] ,
identifier[mechanism] = literal[string]
)
keyword[else] :
identifier[db] . identifier[authenticate] (
identifier[self] . identifier[login] , identifier[self] . identifier[password] )
keyword[except] identifier[Exception] :
identifier[logger] . identifier[exception] (
literal[string]
%( identifier[client] , identifier[self] . identifier[login] , identifier[self] . identifier[password] ))
keyword[raise] | def _authenticate_client(self, client):
"""Authenticate the client if necessary."""
if self.login and (not self.restart_required):
try:
db = client[self.auth_source]
if self.x509_extra_user:
db.authenticate(DEFAULT_SUBJECT, mechanism='MONGODB-X509') # depends on [control=['if'], data=[]]
else:
db.authenticate(self.login, self.password) # depends on [control=['try'], data=[]]
except Exception:
logger.exception('Could not authenticate to %r as %s/%s' % (client, self.login, self.password))
raise # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] |
def add_section(self):
"""Add a section, a sub-CodeBuilder."""
sect = CodeBuilder(self.indent_amount)
self.code.append(sect)
return sect | def function[add_section, parameter[self]]:
constant[Add a section, a sub-CodeBuilder.]
variable[sect] assign[=] call[name[CodeBuilder], parameter[name[self].indent_amount]]
call[name[self].code.append, parameter[name[sect]]]
return[name[sect]] | keyword[def] identifier[add_section] ( identifier[self] ):
literal[string]
identifier[sect] = identifier[CodeBuilder] ( identifier[self] . identifier[indent_amount] )
identifier[self] . identifier[code] . identifier[append] ( identifier[sect] )
keyword[return] identifier[sect] | def add_section(self):
"""Add a section, a sub-CodeBuilder."""
sect = CodeBuilder(self.indent_amount)
self.code.append(sect)
return sect |
def get_plot(self, width=8, height=8):
"""
Returns a plot object.
Args:
width: Width of the plot. Defaults to 8 in.
height: Height of the plot. Defaults to 6 in.
Returns:
A matplotlib plot object.
"""
plt = pretty_plot(width, height)
for label, electrode in self._electrodes.items():
(x, y) = self.get_plot_data(electrode)
plt.plot(x, y, '-', linewidth=2, label=label)
plt.legend()
if self.xaxis == "capacity":
plt.xlabel('Capacity (mAh/g)')
else:
plt.xlabel('Fraction')
plt.ylabel('Voltage (V)')
plt.tight_layout()
return plt | def function[get_plot, parameter[self, width, height]]:
constant[
Returns a plot object.
Args:
width: Width of the plot. Defaults to 8 in.
height: Height of the plot. Defaults to 6 in.
Returns:
A matplotlib plot object.
]
variable[plt] assign[=] call[name[pretty_plot], parameter[name[width], name[height]]]
for taget[tuple[[<ast.Name object at 0x7da20c76fa90>, <ast.Name object at 0x7da20c76e050>]]] in starred[call[name[self]._electrodes.items, parameter[]]] begin[:]
<ast.Tuple object at 0x7da20c76e770> assign[=] call[name[self].get_plot_data, parameter[name[electrode]]]
call[name[plt].plot, parameter[name[x], name[y], constant[-]]]
call[name[plt].legend, parameter[]]
if compare[name[self].xaxis equal[==] constant[capacity]] begin[:]
call[name[plt].xlabel, parameter[constant[Capacity (mAh/g)]]]
call[name[plt].ylabel, parameter[constant[Voltage (V)]]]
call[name[plt].tight_layout, parameter[]]
return[name[plt]] | keyword[def] identifier[get_plot] ( identifier[self] , identifier[width] = literal[int] , identifier[height] = literal[int] ):
literal[string]
identifier[plt] = identifier[pretty_plot] ( identifier[width] , identifier[height] )
keyword[for] identifier[label] , identifier[electrode] keyword[in] identifier[self] . identifier[_electrodes] . identifier[items] ():
( identifier[x] , identifier[y] )= identifier[self] . identifier[get_plot_data] ( identifier[electrode] )
identifier[plt] . identifier[plot] ( identifier[x] , identifier[y] , literal[string] , identifier[linewidth] = literal[int] , identifier[label] = identifier[label] )
identifier[plt] . identifier[legend] ()
keyword[if] identifier[self] . identifier[xaxis] == literal[string] :
identifier[plt] . identifier[xlabel] ( literal[string] )
keyword[else] :
identifier[plt] . identifier[xlabel] ( literal[string] )
identifier[plt] . identifier[ylabel] ( literal[string] )
identifier[plt] . identifier[tight_layout] ()
keyword[return] identifier[plt] | def get_plot(self, width=8, height=8):
"""
Returns a plot object.
Args:
width: Width of the plot. Defaults to 8 in.
height: Height of the plot. Defaults to 6 in.
Returns:
A matplotlib plot object.
"""
plt = pretty_plot(width, height)
for (label, electrode) in self._electrodes.items():
(x, y) = self.get_plot_data(electrode)
plt.plot(x, y, '-', linewidth=2, label=label) # depends on [control=['for'], data=[]]
plt.legend()
if self.xaxis == 'capacity':
plt.xlabel('Capacity (mAh/g)') # depends on [control=['if'], data=[]]
else:
plt.xlabel('Fraction')
plt.ylabel('Voltage (V)')
plt.tight_layout()
return plt |
def ExtractEvents(self, parser_mediator, registry_key, **kwargs):
"""Extracts events from a Windows Registry key.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
"""
values_dict = {}
if registry_key.number_of_values == 0:
values_dict['Value'] = 'No values stored in key.'
else:
for registry_value in registry_key.GetValues():
value_name = registry_value.name or '(default)'
if registry_value.data is None:
value_string = '[{0:s}] Empty'.format(
registry_value.data_type_string)
elif registry_value.DataIsString():
value_string = registry_value.GetDataAsObject()
value_string = '[{0:s}] {1:s}'.format(
registry_value.data_type_string, value_string)
elif registry_value.DataIsInteger():
value_integer = registry_value.GetDataAsObject()
value_string = '[{0:s}] {1:d}'.format(
registry_value.data_type_string, value_integer)
elif registry_value.DataIsMultiString():
multi_string = registry_value.GetDataAsObject()
if not isinstance(multi_string, (list, tuple)):
value_string = '[{0:s}]'.format(registry_value.data_type_string)
# TODO: Add a flag or some sort of an anomaly alert.
else:
value_string = '[{0:s}] {1:s}'.format(
registry_value.data_type_string, ''.join(multi_string))
else:
value_string = '[{0:s}]'.format(registry_value.data_type_string)
values_dict[value_name] = value_string
event_data = windows_events.WindowsRegistryEventData()
event_data.key_path = registry_key.path
event_data.offset = registry_key.offset
event_data.regvalue = values_dict
event = time_events.DateTimeValuesEvent(
registry_key.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN)
parser_mediator.ProduceEventWithEventData(event, event_data) | def function[ExtractEvents, parameter[self, parser_mediator, registry_key]]:
constant[Extracts events from a Windows Registry key.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
]
variable[values_dict] assign[=] dictionary[[], []]
if compare[name[registry_key].number_of_values equal[==] constant[0]] begin[:]
call[name[values_dict]][constant[Value]] assign[=] constant[No values stored in key.]
variable[event_data] assign[=] call[name[windows_events].WindowsRegistryEventData, parameter[]]
name[event_data].key_path assign[=] name[registry_key].path
name[event_data].offset assign[=] name[registry_key].offset
name[event_data].regvalue assign[=] name[values_dict]
variable[event] assign[=] call[name[time_events].DateTimeValuesEvent, parameter[name[registry_key].last_written_time, name[definitions].TIME_DESCRIPTION_WRITTEN]]
call[name[parser_mediator].ProduceEventWithEventData, parameter[name[event], name[event_data]]] | keyword[def] identifier[ExtractEvents] ( identifier[self] , identifier[parser_mediator] , identifier[registry_key] ,** identifier[kwargs] ):
literal[string]
identifier[values_dict] ={}
keyword[if] identifier[registry_key] . identifier[number_of_values] == literal[int] :
identifier[values_dict] [ literal[string] ]= literal[string]
keyword[else] :
keyword[for] identifier[registry_value] keyword[in] identifier[registry_key] . identifier[GetValues] ():
identifier[value_name] = identifier[registry_value] . identifier[name] keyword[or] literal[string]
keyword[if] identifier[registry_value] . identifier[data] keyword[is] keyword[None] :
identifier[value_string] = literal[string] . identifier[format] (
identifier[registry_value] . identifier[data_type_string] )
keyword[elif] identifier[registry_value] . identifier[DataIsString] ():
identifier[value_string] = identifier[registry_value] . identifier[GetDataAsObject] ()
identifier[value_string] = literal[string] . identifier[format] (
identifier[registry_value] . identifier[data_type_string] , identifier[value_string] )
keyword[elif] identifier[registry_value] . identifier[DataIsInteger] ():
identifier[value_integer] = identifier[registry_value] . identifier[GetDataAsObject] ()
identifier[value_string] = literal[string] . identifier[format] (
identifier[registry_value] . identifier[data_type_string] , identifier[value_integer] )
keyword[elif] identifier[registry_value] . identifier[DataIsMultiString] ():
identifier[multi_string] = identifier[registry_value] . identifier[GetDataAsObject] ()
keyword[if] keyword[not] identifier[isinstance] ( identifier[multi_string] ,( identifier[list] , identifier[tuple] )):
identifier[value_string] = literal[string] . identifier[format] ( identifier[registry_value] . identifier[data_type_string] )
keyword[else] :
identifier[value_string] = literal[string] . identifier[format] (
identifier[registry_value] . identifier[data_type_string] , literal[string] . identifier[join] ( identifier[multi_string] ))
keyword[else] :
identifier[value_string] = literal[string] . identifier[format] ( identifier[registry_value] . identifier[data_type_string] )
identifier[values_dict] [ identifier[value_name] ]= identifier[value_string]
identifier[event_data] = identifier[windows_events] . identifier[WindowsRegistryEventData] ()
identifier[event_data] . identifier[key_path] = identifier[registry_key] . identifier[path]
identifier[event_data] . identifier[offset] = identifier[registry_key] . identifier[offset]
identifier[event_data] . identifier[regvalue] = identifier[values_dict]
identifier[event] = identifier[time_events] . identifier[DateTimeValuesEvent] (
identifier[registry_key] . identifier[last_written_time] , identifier[definitions] . identifier[TIME_DESCRIPTION_WRITTEN] )
identifier[parser_mediator] . identifier[ProduceEventWithEventData] ( identifier[event] , identifier[event_data] ) | def ExtractEvents(self, parser_mediator, registry_key, **kwargs):
"""Extracts events from a Windows Registry key.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
"""
values_dict = {}
if registry_key.number_of_values == 0:
values_dict['Value'] = 'No values stored in key.' # depends on [control=['if'], data=[]]
else:
for registry_value in registry_key.GetValues():
value_name = registry_value.name or '(default)'
if registry_value.data is None:
value_string = '[{0:s}] Empty'.format(registry_value.data_type_string) # depends on [control=['if'], data=[]]
elif registry_value.DataIsString():
value_string = registry_value.GetDataAsObject()
value_string = '[{0:s}] {1:s}'.format(registry_value.data_type_string, value_string) # depends on [control=['if'], data=[]]
elif registry_value.DataIsInteger():
value_integer = registry_value.GetDataAsObject()
value_string = '[{0:s}] {1:d}'.format(registry_value.data_type_string, value_integer) # depends on [control=['if'], data=[]]
elif registry_value.DataIsMultiString():
multi_string = registry_value.GetDataAsObject()
if not isinstance(multi_string, (list, tuple)):
value_string = '[{0:s}]'.format(registry_value.data_type_string) # depends on [control=['if'], data=[]]
else:
# TODO: Add a flag or some sort of an anomaly alert.
value_string = '[{0:s}] {1:s}'.format(registry_value.data_type_string, ''.join(multi_string)) # depends on [control=['if'], data=[]]
else:
value_string = '[{0:s}]'.format(registry_value.data_type_string)
values_dict[value_name] = value_string # depends on [control=['for'], data=['registry_value']]
event_data = windows_events.WindowsRegistryEventData()
event_data.key_path = registry_key.path
event_data.offset = registry_key.offset
event_data.regvalue = values_dict
event = time_events.DateTimeValuesEvent(registry_key.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN)
parser_mediator.ProduceEventWithEventData(event, event_data) |
def register_monitors(self, *monitors):
"""
Register monitors they should be tuple of name and Theano variable.
"""
for key, node in monitors:
if key not in self._registered_monitors:
node *= 1.0 # Avoid CudaNdarray
self.training_monitors.append((key, node))
self.testing_monitors.append((key, node))
self._registered_monitors.add(key) | def function[register_monitors, parameter[self]]:
constant[
Register monitors they should be tuple of name and Theano variable.
]
for taget[tuple[[<ast.Name object at 0x7da1b05c4250>, <ast.Name object at 0x7da1b05c66e0>]]] in starred[name[monitors]] begin[:]
if compare[name[key] <ast.NotIn object at 0x7da2590d7190> name[self]._registered_monitors] begin[:]
<ast.AugAssign object at 0x7da1b05c6020>
call[name[self].training_monitors.append, parameter[tuple[[<ast.Name object at 0x7da1b05c46d0>, <ast.Name object at 0x7da1b05c76a0>]]]]
call[name[self].testing_monitors.append, parameter[tuple[[<ast.Name object at 0x7da1b05c57b0>, <ast.Name object at 0x7da1b05c4ee0>]]]]
call[name[self]._registered_monitors.add, parameter[name[key]]] | keyword[def] identifier[register_monitors] ( identifier[self] ,* identifier[monitors] ):
literal[string]
keyword[for] identifier[key] , identifier[node] keyword[in] identifier[monitors] :
keyword[if] identifier[key] keyword[not] keyword[in] identifier[self] . identifier[_registered_monitors] :
identifier[node] *= literal[int]
identifier[self] . identifier[training_monitors] . identifier[append] (( identifier[key] , identifier[node] ))
identifier[self] . identifier[testing_monitors] . identifier[append] (( identifier[key] , identifier[node] ))
identifier[self] . identifier[_registered_monitors] . identifier[add] ( identifier[key] ) | def register_monitors(self, *monitors):
"""
Register monitors they should be tuple of name and Theano variable.
"""
for (key, node) in monitors:
if key not in self._registered_monitors:
node *= 1.0 # Avoid CudaNdarray
self.training_monitors.append((key, node))
self.testing_monitors.append((key, node))
self._registered_monitors.add(key) # depends on [control=['if'], data=['key']] # depends on [control=['for'], data=[]] |
def get_qc_tools(data):
"""Retrieve a list of QC tools to use based on configuration and analysis type.
Uses defaults if previously set.
"""
if dd.get_algorithm_qc(data):
return dd.get_algorithm_qc(data)
analysis = data["analysis"].lower()
to_run = []
if tz.get_in(["config", "algorithm", "kraken"], data):
to_run.append("kraken")
if "fastqc" not in dd.get_tools_off(data):
to_run.append("fastqc")
if any([tool in dd.get_tools_on(data)
for tool in ["qualimap", "qualimap_full"]]):
to_run.append("qualimap")
if analysis.startswith("rna-seq") or analysis == "smallrna-seq":
if "qualimap" not in dd.get_tools_off(data):
if gtf.is_qualimap_compatible(dd.get_gtf_file(data)):
to_run.append("qualimap_rnaseq")
else:
logger.debug("GTF not compatible with Qualimap, skipping.")
if analysis.startswith("chip-seq"):
to_run.append("chipqc")
if analysis.startswith("smallrna-seq"):
to_run.append("small-rna")
to_run.append("atropos")
if "coverage_qc" not in dd.get_tools_off(data):
to_run.append("samtools")
if dd.has_variantcalls(data):
if "coverage_qc" not in dd.get_tools_off(data):
to_run += ["coverage", "picard"]
to_run += ["qsignature", "variants"]
if vcfanno.is_human(data):
to_run += ["contamination", "peddy"]
if vcfutils.get_paired_phenotype(data):
to_run += ["viral"]
if damage.should_filter([data]):
to_run += ["damage"]
if dd.get_umi_consensus(data):
to_run += ["umi"]
if tz.get_in(["config", "algorithm", "preseq"], data):
to_run.append("preseq")
to_run = [tool for tool in to_run if tool not in dd.get_tools_off(data)]
to_run.sort()
return to_run | def function[get_qc_tools, parameter[data]]:
constant[Retrieve a list of QC tools to use based on configuration and analysis type.
Uses defaults if previously set.
]
if call[name[dd].get_algorithm_qc, parameter[name[data]]] begin[:]
return[call[name[dd].get_algorithm_qc, parameter[name[data]]]]
variable[analysis] assign[=] call[call[name[data]][constant[analysis]].lower, parameter[]]
variable[to_run] assign[=] list[[]]
if call[name[tz].get_in, parameter[list[[<ast.Constant object at 0x7da18f09e5c0>, <ast.Constant object at 0x7da18f09dde0>, <ast.Constant object at 0x7da18f09d2a0>]], name[data]]] begin[:]
call[name[to_run].append, parameter[constant[kraken]]]
if compare[constant[fastqc] <ast.NotIn object at 0x7da2590d7190> call[name[dd].get_tools_off, parameter[name[data]]]] begin[:]
call[name[to_run].append, parameter[constant[fastqc]]]
if call[name[any], parameter[<ast.ListComp object at 0x7da18f09c970>]] begin[:]
call[name[to_run].append, parameter[constant[qualimap]]]
if <ast.BoolOp object at 0x7da1b18d26e0> begin[:]
if compare[constant[qualimap] <ast.NotIn object at 0x7da2590d7190> call[name[dd].get_tools_off, parameter[name[data]]]] begin[:]
if call[name[gtf].is_qualimap_compatible, parameter[call[name[dd].get_gtf_file, parameter[name[data]]]]] begin[:]
call[name[to_run].append, parameter[constant[qualimap_rnaseq]]]
if call[name[analysis].startswith, parameter[constant[chip-seq]]] begin[:]
call[name[to_run].append, parameter[constant[chipqc]]]
if call[name[analysis].startswith, parameter[constant[smallrna-seq]]] begin[:]
call[name[to_run].append, parameter[constant[small-rna]]]
call[name[to_run].append, parameter[constant[atropos]]]
if compare[constant[coverage_qc] <ast.NotIn object at 0x7da2590d7190> call[name[dd].get_tools_off, parameter[name[data]]]] begin[:]
call[name[to_run].append, parameter[constant[samtools]]]
if call[name[dd].has_variantcalls, parameter[name[data]]] begin[:]
if compare[constant[coverage_qc] <ast.NotIn object at 0x7da2590d7190> call[name[dd].get_tools_off, parameter[name[data]]]] begin[:]
<ast.AugAssign object at 0x7da1b1848a00>
<ast.AugAssign object at 0x7da1b184af80>
if call[name[vcfanno].is_human, parameter[name[data]]] begin[:]
<ast.AugAssign object at 0x7da1b1848df0>
if call[name[vcfutils].get_paired_phenotype, parameter[name[data]]] begin[:]
<ast.AugAssign object at 0x7da1b184a380>
if call[name[damage].should_filter, parameter[list[[<ast.Name object at 0x7da1b1884400>]]]] begin[:]
<ast.AugAssign object at 0x7da1b1887640>
if call[name[dd].get_umi_consensus, parameter[name[data]]] begin[:]
<ast.AugAssign object at 0x7da1b1884f40>
if call[name[tz].get_in, parameter[list[[<ast.Constant object at 0x7da1b1887e20>, <ast.Constant object at 0x7da1b1887be0>, <ast.Constant object at 0x7da1b1884cd0>]], name[data]]] begin[:]
call[name[to_run].append, parameter[constant[preseq]]]
variable[to_run] assign[=] <ast.ListComp object at 0x7da1b1887ca0>
call[name[to_run].sort, parameter[]]
return[name[to_run]] | keyword[def] identifier[get_qc_tools] ( identifier[data] ):
literal[string]
keyword[if] identifier[dd] . identifier[get_algorithm_qc] ( identifier[data] ):
keyword[return] identifier[dd] . identifier[get_algorithm_qc] ( identifier[data] )
identifier[analysis] = identifier[data] [ literal[string] ]. identifier[lower] ()
identifier[to_run] =[]
keyword[if] identifier[tz] . identifier[get_in] ([ literal[string] , literal[string] , literal[string] ], identifier[data] ):
identifier[to_run] . identifier[append] ( literal[string] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[dd] . identifier[get_tools_off] ( identifier[data] ):
identifier[to_run] . identifier[append] ( literal[string] )
keyword[if] identifier[any] ([ identifier[tool] keyword[in] identifier[dd] . identifier[get_tools_on] ( identifier[data] )
keyword[for] identifier[tool] keyword[in] [ literal[string] , literal[string] ]]):
identifier[to_run] . identifier[append] ( literal[string] )
keyword[if] identifier[analysis] . identifier[startswith] ( literal[string] ) keyword[or] identifier[analysis] == literal[string] :
keyword[if] literal[string] keyword[not] keyword[in] identifier[dd] . identifier[get_tools_off] ( identifier[data] ):
keyword[if] identifier[gtf] . identifier[is_qualimap_compatible] ( identifier[dd] . identifier[get_gtf_file] ( identifier[data] )):
identifier[to_run] . identifier[append] ( literal[string] )
keyword[else] :
identifier[logger] . identifier[debug] ( literal[string] )
keyword[if] identifier[analysis] . identifier[startswith] ( literal[string] ):
identifier[to_run] . identifier[append] ( literal[string] )
keyword[if] identifier[analysis] . identifier[startswith] ( literal[string] ):
identifier[to_run] . identifier[append] ( literal[string] )
identifier[to_run] . identifier[append] ( literal[string] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[dd] . identifier[get_tools_off] ( identifier[data] ):
identifier[to_run] . identifier[append] ( literal[string] )
keyword[if] identifier[dd] . identifier[has_variantcalls] ( identifier[data] ):
keyword[if] literal[string] keyword[not] keyword[in] identifier[dd] . identifier[get_tools_off] ( identifier[data] ):
identifier[to_run] +=[ literal[string] , literal[string] ]
identifier[to_run] +=[ literal[string] , literal[string] ]
keyword[if] identifier[vcfanno] . identifier[is_human] ( identifier[data] ):
identifier[to_run] +=[ literal[string] , literal[string] ]
keyword[if] identifier[vcfutils] . identifier[get_paired_phenotype] ( identifier[data] ):
identifier[to_run] +=[ literal[string] ]
keyword[if] identifier[damage] . identifier[should_filter] ([ identifier[data] ]):
identifier[to_run] +=[ literal[string] ]
keyword[if] identifier[dd] . identifier[get_umi_consensus] ( identifier[data] ):
identifier[to_run] +=[ literal[string] ]
keyword[if] identifier[tz] . identifier[get_in] ([ literal[string] , literal[string] , literal[string] ], identifier[data] ):
identifier[to_run] . identifier[append] ( literal[string] )
identifier[to_run] =[ identifier[tool] keyword[for] identifier[tool] keyword[in] identifier[to_run] keyword[if] identifier[tool] keyword[not] keyword[in] identifier[dd] . identifier[get_tools_off] ( identifier[data] )]
identifier[to_run] . identifier[sort] ()
keyword[return] identifier[to_run] | def get_qc_tools(data):
"""Retrieve a list of QC tools to use based on configuration and analysis type.
Uses defaults if previously set.
"""
if dd.get_algorithm_qc(data):
return dd.get_algorithm_qc(data) # depends on [control=['if'], data=[]]
analysis = data['analysis'].lower()
to_run = []
if tz.get_in(['config', 'algorithm', 'kraken'], data):
to_run.append('kraken') # depends on [control=['if'], data=[]]
if 'fastqc' not in dd.get_tools_off(data):
to_run.append('fastqc') # depends on [control=['if'], data=[]]
if any([tool in dd.get_tools_on(data) for tool in ['qualimap', 'qualimap_full']]):
to_run.append('qualimap') # depends on [control=['if'], data=[]]
if analysis.startswith('rna-seq') or analysis == 'smallrna-seq':
if 'qualimap' not in dd.get_tools_off(data):
if gtf.is_qualimap_compatible(dd.get_gtf_file(data)):
to_run.append('qualimap_rnaseq') # depends on [control=['if'], data=[]]
else:
logger.debug('GTF not compatible with Qualimap, skipping.') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if analysis.startswith('chip-seq'):
to_run.append('chipqc') # depends on [control=['if'], data=[]]
if analysis.startswith('smallrna-seq'):
to_run.append('small-rna')
to_run.append('atropos') # depends on [control=['if'], data=[]]
if 'coverage_qc' not in dd.get_tools_off(data):
to_run.append('samtools') # depends on [control=['if'], data=[]]
if dd.has_variantcalls(data):
if 'coverage_qc' not in dd.get_tools_off(data):
to_run += ['coverage', 'picard'] # depends on [control=['if'], data=[]]
to_run += ['qsignature', 'variants']
if vcfanno.is_human(data):
to_run += ['contamination', 'peddy'] # depends on [control=['if'], data=[]]
if vcfutils.get_paired_phenotype(data):
to_run += ['viral'] # depends on [control=['if'], data=[]]
if damage.should_filter([data]):
to_run += ['damage'] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if dd.get_umi_consensus(data):
to_run += ['umi'] # depends on [control=['if'], data=[]]
if tz.get_in(['config', 'algorithm', 'preseq'], data):
to_run.append('preseq') # depends on [control=['if'], data=[]]
to_run = [tool for tool in to_run if tool not in dd.get_tools_off(data)]
to_run.sort()
return to_run |
def add_edges(self, edges, src_field=None, dst_field=None):
"""
Add edges to the SGraph. Edges should be input as a list of
:class:`~turicreate.Edge` objects, an :class:`~turicreate.SFrame`, or a
Pandas DataFrame. If the new edges are in an SFrame or DataFrame, then
``src_field`` and ``dst_field`` are required to specify the columns that
contain the source and destination vertex IDs; additional columns are
treated as edge attributes. If these attributes are not already present
in the graph's edge data, they are added, with existing edges acquiring
the value ``None``.
Parameters
----------
edges : Edge | list [Edge] | pandas.DataFrame | SFrame
Edge data. If the edges are in an SFrame or DataFrame, then
``src_field`` and ``dst_field`` are required to specify the columns
that contain the source and destination vertex IDs. Additional
columns are treated as edge attributes.
src_field : string, optional
Column in the SFrame or DataFrame to use as source vertex IDs. Not
required if ``edges`` is a list.
dst_field : string, optional
Column in the SFrame or Pandas DataFrame to use as destination
vertex IDs. Not required if ``edges`` is a list.
Returns
-------
out : SGraph
A new SGraph with `edges` added.
See Also
--------
edges, SFrame, add_vertices
Notes
-----
- If an edge is added whose source and destination IDs match edges that
already exist in the graph, a new edge is added to the graph. This
contrasts with :py:func:`add_vertices`, which overwrites existing
vertices.
Examples
--------
>>> from turicreate import SGraph, Vertex, Edge, SFrame
>>> g = SGraph()
>>> verts = [Vertex(0, attr={'breed': 'labrador'}),
Vertex(1, attr={'breed': 'labrador'}),
Vertex(2, attr={'breed': 'vizsla'})]
>>> g = g.add_vertices(verts)
Add a single edge.
>>> g = g.add_edges(Edge(1, 2))
Add a list of edges.
>>> g = g.add_edges([Edge(0, 2), Edge(1, 2)])
Add edges from an SFrame.
>>> sf_edge = SFrame({'source': [0, 1], 'dest': [2, 2]})
>>> g = g.add_edges(sf_edge, src_field='source', dst_field='dest')
"""
sf = _edge_data_to_sframe(edges, src_field, dst_field)
with cython_context():
proxy = self.__proxy__.add_edges(sf.__proxy__, _SRC_VID_COLUMN, _DST_VID_COLUMN)
return SGraph(_proxy=proxy) | def function[add_edges, parameter[self, edges, src_field, dst_field]]:
constant[
Add edges to the SGraph. Edges should be input as a list of
:class:`~turicreate.Edge` objects, an :class:`~turicreate.SFrame`, or a
Pandas DataFrame. If the new edges are in an SFrame or DataFrame, then
``src_field`` and ``dst_field`` are required to specify the columns that
contain the source and destination vertex IDs; additional columns are
treated as edge attributes. If these attributes are not already present
in the graph's edge data, they are added, with existing edges acquiring
the value ``None``.
Parameters
----------
edges : Edge | list [Edge] | pandas.DataFrame | SFrame
Edge data. If the edges are in an SFrame or DataFrame, then
``src_field`` and ``dst_field`` are required to specify the columns
that contain the source and destination vertex IDs. Additional
columns are treated as edge attributes.
src_field : string, optional
Column in the SFrame or DataFrame to use as source vertex IDs. Not
required if ``edges`` is a list.
dst_field : string, optional
Column in the SFrame or Pandas DataFrame to use as destination
vertex IDs. Not required if ``edges`` is a list.
Returns
-------
out : SGraph
A new SGraph with `edges` added.
See Also
--------
edges, SFrame, add_vertices
Notes
-----
- If an edge is added whose source and destination IDs match edges that
already exist in the graph, a new edge is added to the graph. This
contrasts with :py:func:`add_vertices`, which overwrites existing
vertices.
Examples
--------
>>> from turicreate import SGraph, Vertex, Edge, SFrame
>>> g = SGraph()
>>> verts = [Vertex(0, attr={'breed': 'labrador'}),
Vertex(1, attr={'breed': 'labrador'}),
Vertex(2, attr={'breed': 'vizsla'})]
>>> g = g.add_vertices(verts)
Add a single edge.
>>> g = g.add_edges(Edge(1, 2))
Add a list of edges.
>>> g = g.add_edges([Edge(0, 2), Edge(1, 2)])
Add edges from an SFrame.
>>> sf_edge = SFrame({'source': [0, 1], 'dest': [2, 2]})
>>> g = g.add_edges(sf_edge, src_field='source', dst_field='dest')
]
variable[sf] assign[=] call[name[_edge_data_to_sframe], parameter[name[edges], name[src_field], name[dst_field]]]
with call[name[cython_context], parameter[]] begin[:]
variable[proxy] assign[=] call[name[self].__proxy__.add_edges, parameter[name[sf].__proxy__, name[_SRC_VID_COLUMN], name[_DST_VID_COLUMN]]]
return[call[name[SGraph], parameter[]]] | keyword[def] identifier[add_edges] ( identifier[self] , identifier[edges] , identifier[src_field] = keyword[None] , identifier[dst_field] = keyword[None] ):
literal[string]
identifier[sf] = identifier[_edge_data_to_sframe] ( identifier[edges] , identifier[src_field] , identifier[dst_field] )
keyword[with] identifier[cython_context] ():
identifier[proxy] = identifier[self] . identifier[__proxy__] . identifier[add_edges] ( identifier[sf] . identifier[__proxy__] , identifier[_SRC_VID_COLUMN] , identifier[_DST_VID_COLUMN] )
keyword[return] identifier[SGraph] ( identifier[_proxy] = identifier[proxy] ) | def add_edges(self, edges, src_field=None, dst_field=None):
"""
Add edges to the SGraph. Edges should be input as a list of
:class:`~turicreate.Edge` objects, an :class:`~turicreate.SFrame`, or a
Pandas DataFrame. If the new edges are in an SFrame or DataFrame, then
``src_field`` and ``dst_field`` are required to specify the columns that
contain the source and destination vertex IDs; additional columns are
treated as edge attributes. If these attributes are not already present
in the graph's edge data, they are added, with existing edges acquiring
the value ``None``.
Parameters
----------
edges : Edge | list [Edge] | pandas.DataFrame | SFrame
Edge data. If the edges are in an SFrame or DataFrame, then
``src_field`` and ``dst_field`` are required to specify the columns
that contain the source and destination vertex IDs. Additional
columns are treated as edge attributes.
src_field : string, optional
Column in the SFrame or DataFrame to use as source vertex IDs. Not
required if ``edges`` is a list.
dst_field : string, optional
Column in the SFrame or Pandas DataFrame to use as destination
vertex IDs. Not required if ``edges`` is a list.
Returns
-------
out : SGraph
A new SGraph with `edges` added.
See Also
--------
edges, SFrame, add_vertices
Notes
-----
- If an edge is added whose source and destination IDs match edges that
already exist in the graph, a new edge is added to the graph. This
contrasts with :py:func:`add_vertices`, which overwrites existing
vertices.
Examples
--------
>>> from turicreate import SGraph, Vertex, Edge, SFrame
>>> g = SGraph()
>>> verts = [Vertex(0, attr={'breed': 'labrador'}),
Vertex(1, attr={'breed': 'labrador'}),
Vertex(2, attr={'breed': 'vizsla'})]
>>> g = g.add_vertices(verts)
Add a single edge.
>>> g = g.add_edges(Edge(1, 2))
Add a list of edges.
>>> g = g.add_edges([Edge(0, 2), Edge(1, 2)])
Add edges from an SFrame.
>>> sf_edge = SFrame({'source': [0, 1], 'dest': [2, 2]})
>>> g = g.add_edges(sf_edge, src_field='source', dst_field='dest')
"""
sf = _edge_data_to_sframe(edges, src_field, dst_field)
with cython_context():
proxy = self.__proxy__.add_edges(sf.__proxy__, _SRC_VID_COLUMN, _DST_VID_COLUMN)
return SGraph(_proxy=proxy) # depends on [control=['with'], data=[]] |
def get_version(path=VERSION_PATH):
"""
Reads the python file defined in the VERSION_PATH to find the get_version
function, and executes it to ensure that it is loaded correctly. Separating
the version in this way ensures no additional code is executed.
"""
namespace = {}
exec(read(path), namespace)
return namespace['get_version'](short=True) | def function[get_version, parameter[path]]:
constant[
Reads the python file defined in the VERSION_PATH to find the get_version
function, and executes it to ensure that it is loaded correctly. Separating
the version in this way ensures no additional code is executed.
]
variable[namespace] assign[=] dictionary[[], []]
call[name[exec], parameter[call[name[read], parameter[name[path]]], name[namespace]]]
return[call[call[name[namespace]][constant[get_version]], parameter[]]] | keyword[def] identifier[get_version] ( identifier[path] = identifier[VERSION_PATH] ):
literal[string]
identifier[namespace] ={}
identifier[exec] ( identifier[read] ( identifier[path] ), identifier[namespace] )
keyword[return] identifier[namespace] [ literal[string] ]( identifier[short] = keyword[True] ) | def get_version(path=VERSION_PATH):
"""
Reads the python file defined in the VERSION_PATH to find the get_version
function, and executes it to ensure that it is loaded correctly. Separating
the version in this way ensures no additional code is executed.
"""
namespace = {}
exec(read(path), namespace)
return namespace['get_version'](short=True) |
def __callback (self, img):
'''
Callback function to receive and save Images.
@param img: ROS Image received
@type img: sensor_msgs.msg.Image
'''
image = imageMsg2Image(img, self.bridge)
self.lock.acquire()
self.data = image
self.lock.release() | def function[__callback, parameter[self, img]]:
constant[
Callback function to receive and save Images.
@param img: ROS Image received
@type img: sensor_msgs.msg.Image
]
variable[image] assign[=] call[name[imageMsg2Image], parameter[name[img], name[self].bridge]]
call[name[self].lock.acquire, parameter[]]
name[self].data assign[=] name[image]
call[name[self].lock.release, parameter[]] | keyword[def] identifier[__callback] ( identifier[self] , identifier[img] ):
literal[string]
identifier[image] = identifier[imageMsg2Image] ( identifier[img] , identifier[self] . identifier[bridge] )
identifier[self] . identifier[lock] . identifier[acquire] ()
identifier[self] . identifier[data] = identifier[image]
identifier[self] . identifier[lock] . identifier[release] () | def __callback(self, img):
"""
Callback function to receive and save Images.
@param img: ROS Image received
@type img: sensor_msgs.msg.Image
"""
image = imageMsg2Image(img, self.bridge)
self.lock.acquire()
self.data = image
self.lock.release() |
def update(self):
"""Cache the list into the data section of the record"""
from ambry.orm.exc import NotFoundError
from requests.exceptions import ConnectionError, HTTPError
from boto.exception import S3ResponseError
d = {}
try:
for k, v in self.list(full=True):
if not v:
continue
d[v['vid']] = {
'vid': v['vid'],
'vname': v.get('vname'),
'id': v.get('id'),
'name': v.get('name')
}
self.data['list'] = d
except (NotFoundError, ConnectionError, S3ResponseError, HTTPError) as e:
raise RemoteAccessError("Failed to update {}: {}".format(self.short_name, e)) | def function[update, parameter[self]]:
constant[Cache the list into the data section of the record]
from relative_module[ambry.orm.exc] import module[NotFoundError]
from relative_module[requests.exceptions] import module[ConnectionError], module[HTTPError]
from relative_module[boto.exception] import module[S3ResponseError]
variable[d] assign[=] dictionary[[], []]
<ast.Try object at 0x7da1b2345390> | keyword[def] identifier[update] ( identifier[self] ):
literal[string]
keyword[from] identifier[ambry] . identifier[orm] . identifier[exc] keyword[import] identifier[NotFoundError]
keyword[from] identifier[requests] . identifier[exceptions] keyword[import] identifier[ConnectionError] , identifier[HTTPError]
keyword[from] identifier[boto] . identifier[exception] keyword[import] identifier[S3ResponseError]
identifier[d] ={}
keyword[try] :
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[list] ( identifier[full] = keyword[True] ):
keyword[if] keyword[not] identifier[v] :
keyword[continue]
identifier[d] [ identifier[v] [ literal[string] ]]={
literal[string] : identifier[v] [ literal[string] ],
literal[string] : identifier[v] . identifier[get] ( literal[string] ),
literal[string] : identifier[v] . identifier[get] ( literal[string] ),
literal[string] : identifier[v] . identifier[get] ( literal[string] )
}
identifier[self] . identifier[data] [ literal[string] ]= identifier[d]
keyword[except] ( identifier[NotFoundError] , identifier[ConnectionError] , identifier[S3ResponseError] , identifier[HTTPError] ) keyword[as] identifier[e] :
keyword[raise] identifier[RemoteAccessError] ( literal[string] . identifier[format] ( identifier[self] . identifier[short_name] , identifier[e] )) | def update(self):
"""Cache the list into the data section of the record"""
from ambry.orm.exc import NotFoundError
from requests.exceptions import ConnectionError, HTTPError
from boto.exception import S3ResponseError
d = {}
try:
for (k, v) in self.list(full=True):
if not v:
continue # depends on [control=['if'], data=[]]
d[v['vid']] = {'vid': v['vid'], 'vname': v.get('vname'), 'id': v.get('id'), 'name': v.get('name')} # depends on [control=['for'], data=[]]
self.data['list'] = d # depends on [control=['try'], data=[]]
except (NotFoundError, ConnectionError, S3ResponseError, HTTPError) as e:
raise RemoteAccessError('Failed to update {}: {}'.format(self.short_name, e)) # depends on [control=['except'], data=['e']] |
def order_by_json_path(self, json_path, language_code=None, order='asc'):
"""
Makes the method available through the manager (i.e. `Model.objects`).
Usage example:
MyModel.objects.order_by_json_path('title', order='desc')
MyModel.objects.order_by_json_path('title', language_code='en_us', order='desc')
"""
return self.get_queryset(language_code).order_by_json_path(
json_path, language_code=language_code, order=order) | def function[order_by_json_path, parameter[self, json_path, language_code, order]]:
constant[
Makes the method available through the manager (i.e. `Model.objects`).
Usage example:
MyModel.objects.order_by_json_path('title', order='desc')
MyModel.objects.order_by_json_path('title', language_code='en_us', order='desc')
]
return[call[call[name[self].get_queryset, parameter[name[language_code]]].order_by_json_path, parameter[name[json_path]]]] | keyword[def] identifier[order_by_json_path] ( identifier[self] , identifier[json_path] , identifier[language_code] = keyword[None] , identifier[order] = literal[string] ):
literal[string]
keyword[return] identifier[self] . identifier[get_queryset] ( identifier[language_code] ). identifier[order_by_json_path] (
identifier[json_path] , identifier[language_code] = identifier[language_code] , identifier[order] = identifier[order] ) | def order_by_json_path(self, json_path, language_code=None, order='asc'):
"""
Makes the method available through the manager (i.e. `Model.objects`).
Usage example:
MyModel.objects.order_by_json_path('title', order='desc')
MyModel.objects.order_by_json_path('title', language_code='en_us', order='desc')
"""
return self.get_queryset(language_code).order_by_json_path(json_path, language_code=language_code, order=order) |
def RegisterIntKey(cls, key, atomid, min_value=0, max_value=(2 ** 16) - 1):
"""Register a scalar integer key.
"""
def getter(tags, key):
return list(map(text_type, tags[atomid]))
def setter(tags, key, value):
clamp = lambda x: int(min(max(min_value, x), max_value))
tags[atomid] = [clamp(v) for v in map(int, value)]
def deleter(tags, key):
del(tags[atomid])
cls.RegisterKey(key, getter, setter, deleter) | def function[RegisterIntKey, parameter[cls, key, atomid, min_value, max_value]]:
constant[Register a scalar integer key.
]
def function[getter, parameter[tags, key]]:
return[call[name[list], parameter[call[name[map], parameter[name[text_type], call[name[tags]][name[atomid]]]]]]]
def function[setter, parameter[tags, key, value]]:
variable[clamp] assign[=] <ast.Lambda object at 0x7da1b1e46020>
call[name[tags]][name[atomid]] assign[=] <ast.ListComp object at 0x7da1b1e46da0>
def function[deleter, parameter[tags, key]]:
<ast.Delete object at 0x7da1b20fb790>
call[name[cls].RegisterKey, parameter[name[key], name[getter], name[setter], name[deleter]]] | keyword[def] identifier[RegisterIntKey] ( identifier[cls] , identifier[key] , identifier[atomid] , identifier[min_value] = literal[int] , identifier[max_value] =( literal[int] ** literal[int] )- literal[int] ):
literal[string]
keyword[def] identifier[getter] ( identifier[tags] , identifier[key] ):
keyword[return] identifier[list] ( identifier[map] ( identifier[text_type] , identifier[tags] [ identifier[atomid] ]))
keyword[def] identifier[setter] ( identifier[tags] , identifier[key] , identifier[value] ):
identifier[clamp] = keyword[lambda] identifier[x] : identifier[int] ( identifier[min] ( identifier[max] ( identifier[min_value] , identifier[x] ), identifier[max_value] ))
identifier[tags] [ identifier[atomid] ]=[ identifier[clamp] ( identifier[v] ) keyword[for] identifier[v] keyword[in] identifier[map] ( identifier[int] , identifier[value] )]
keyword[def] identifier[deleter] ( identifier[tags] , identifier[key] ):
keyword[del] ( identifier[tags] [ identifier[atomid] ])
identifier[cls] . identifier[RegisterKey] ( identifier[key] , identifier[getter] , identifier[setter] , identifier[deleter] ) | def RegisterIntKey(cls, key, atomid, min_value=0, max_value=2 ** 16 - 1):
"""Register a scalar integer key.
"""
def getter(tags, key):
return list(map(text_type, tags[atomid]))
def setter(tags, key, value):
clamp = lambda x: int(min(max(min_value, x), max_value))
tags[atomid] = [clamp(v) for v in map(int, value)]
def deleter(tags, key):
del tags[atomid]
cls.RegisterKey(key, getter, setter, deleter) |
def predict(self, eval_data, num_batch=None, merge_batches=True, reset=True,
always_output_list=False, sparse_row_id_fn=None):
"""Runs prediction and collects the outputs.
When `merge_batches` is ``True`` (by default), the return value will be a list
``[out1, out2, out3]``, where each element is formed by concatenating the outputs for
all the mini-batches. When `always_output_list` is ``False`` (as by default),
then in the case of a single output, `out1` is returned instead of ``[out1]``.
When `merge_batches` is ``False``, the return value will be a nested list like
``[[out1_batch1, out2_batch1], [out1_batch2], ...]``. This mode is useful because
in some cases (e.g. bucketing), the module does not necessarily produce the same
number of outputs.
The objects in the results have type `NDArray`. If you need to work with a numpy array,
just call ``.asnumpy()`` on each `NDArray`.
Parameters
----------
eval_data : DataIter or NDArray or numpy array
Evaluation data to run prediction on.
num_batch : int
Defaults to ``None``, indicates running all the batches in the data iterator.
merge_batches : bool
Defaults to ``True``, see above for return values.
reset : bool
Defaults to ``True``, indicates whether we should reset the data iter before
doing prediction.
always_output_list : bool
Defaults to ``False``, see above for return values.
sparse_row_id_fn : A callback function
The function takes `data_batch` as an input and returns a dict of
str -> NDArray. The resulting dict is used for pulling row_sparse
parameters from the kvstore, where the str key is the name of the param,
and the value is the row id of the param to pull.
Returns
-------
list of NDArray or list of list of NDArray
Prediction results.
Examples
--------
>>> # An example of using `predict` for prediction.
>>> # Predict on the first 10 batches of val_dataiter
>>> mod.predict(eval_data=val_dataiter, num_batch=10)
"""
assert self.binded and self.params_initialized
if isinstance(eval_data, (ndarray.NDArray, np.ndarray)):
if isinstance(eval_data, np.ndarray):
eval_data = ndarray.array(eval_data)
self.forward(DataBatch([eval_data]))
return self.get_outputs()[0]
if not isinstance(eval_data, DataIter):
raise ValueError('eval_data must be of type NDArray or DataIter')
if reset:
eval_data.reset()
output_list = []
for nbatch, eval_batch in enumerate(eval_data):
if num_batch is not None and nbatch == num_batch:
break
self.prepare(eval_batch, sparse_row_id_fn=sparse_row_id_fn)
self.forward(eval_batch, is_train=False)
pad = eval_batch.pad
outputs = [out[0:out.shape[0]-pad].copy() for out in self.get_outputs()]
output_list.append(outputs)
if len(output_list) == 0:
return output_list
if merge_batches:
num_outputs = len(output_list[0])
for out in output_list:
assert len(out) == num_outputs, \
'Cannot merge batches, as num of outputs is not the same ' + \
'in mini-batches. Maybe bucketing is used?'
output_list2 = [ndarray.concatenate([out[i] for out in output_list])
for i in range(num_outputs)]
if num_outputs == 1 and not always_output_list:
return output_list2[0]
return output_list2
return output_list | def function[predict, parameter[self, eval_data, num_batch, merge_batches, reset, always_output_list, sparse_row_id_fn]]:
constant[Runs prediction and collects the outputs.
When `merge_batches` is ``True`` (by default), the return value will be a list
``[out1, out2, out3]``, where each element is formed by concatenating the outputs for
all the mini-batches. When `always_output_list` is ``False`` (as by default),
then in the case of a single output, `out1` is returned instead of ``[out1]``.
When `merge_batches` is ``False``, the return value will be a nested list like
``[[out1_batch1, out2_batch1], [out1_batch2], ...]``. This mode is useful because
in some cases (e.g. bucketing), the module does not necessarily produce the same
number of outputs.
The objects in the results have type `NDArray`. If you need to work with a numpy array,
just call ``.asnumpy()`` on each `NDArray`.
Parameters
----------
eval_data : DataIter or NDArray or numpy array
Evaluation data to run prediction on.
num_batch : int
Defaults to ``None``, indicates running all the batches in the data iterator.
merge_batches : bool
Defaults to ``True``, see above for return values.
reset : bool
Defaults to ``True``, indicates whether we should reset the data iter before
doing prediction.
always_output_list : bool
Defaults to ``False``, see above for return values.
sparse_row_id_fn : A callback function
The function takes `data_batch` as an input and returns a dict of
str -> NDArray. The resulting dict is used for pulling row_sparse
parameters from the kvstore, where the str key is the name of the param,
and the value is the row id of the param to pull.
Returns
-------
list of NDArray or list of list of NDArray
Prediction results.
Examples
--------
>>> # An example of using `predict` for prediction.
>>> # Predict on the first 10 batches of val_dataiter
>>> mod.predict(eval_data=val_dataiter, num_batch=10)
]
assert[<ast.BoolOp object at 0x7da1b20facb0>]
if call[name[isinstance], parameter[name[eval_data], tuple[[<ast.Attribute object at 0x7da1b20f9750>, <ast.Attribute object at 0x7da1b20f8c70>]]]] begin[:]
if call[name[isinstance], parameter[name[eval_data], name[np].ndarray]] begin[:]
variable[eval_data] assign[=] call[name[ndarray].array, parameter[name[eval_data]]]
call[name[self].forward, parameter[call[name[DataBatch], parameter[list[[<ast.Name object at 0x7da1b204f3d0>]]]]]]
return[call[call[name[self].get_outputs, parameter[]]][constant[0]]]
if <ast.UnaryOp object at 0x7da1b204d750> begin[:]
<ast.Raise object at 0x7da1b204c940>
if name[reset] begin[:]
call[name[eval_data].reset, parameter[]]
variable[output_list] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b204fb20>, <ast.Name object at 0x7da1b204c3d0>]]] in starred[call[name[enumerate], parameter[name[eval_data]]]] begin[:]
if <ast.BoolOp object at 0x7da1b204d330> begin[:]
break
call[name[self].prepare, parameter[name[eval_batch]]]
call[name[self].forward, parameter[name[eval_batch]]]
variable[pad] assign[=] name[eval_batch].pad
variable[outputs] assign[=] <ast.ListComp object at 0x7da1b2029960>
call[name[output_list].append, parameter[name[outputs]]]
if compare[call[name[len], parameter[name[output_list]]] equal[==] constant[0]] begin[:]
return[name[output_list]]
if name[merge_batches] begin[:]
variable[num_outputs] assign[=] call[name[len], parameter[call[name[output_list]][constant[0]]]]
for taget[name[out]] in starred[name[output_list]] begin[:]
assert[compare[call[name[len], parameter[name[out]]] equal[==] name[num_outputs]]]
variable[output_list2] assign[=] <ast.ListComp object at 0x7da1b202bdf0>
if <ast.BoolOp object at 0x7da1b202b250> begin[:]
return[call[name[output_list2]][constant[0]]]
return[name[output_list2]]
return[name[output_list]] | keyword[def] identifier[predict] ( identifier[self] , identifier[eval_data] , identifier[num_batch] = keyword[None] , identifier[merge_batches] = keyword[True] , identifier[reset] = keyword[True] ,
identifier[always_output_list] = keyword[False] , identifier[sparse_row_id_fn] = keyword[None] ):
literal[string]
keyword[assert] identifier[self] . identifier[binded] keyword[and] identifier[self] . identifier[params_initialized]
keyword[if] identifier[isinstance] ( identifier[eval_data] ,( identifier[ndarray] . identifier[NDArray] , identifier[np] . identifier[ndarray] )):
keyword[if] identifier[isinstance] ( identifier[eval_data] , identifier[np] . identifier[ndarray] ):
identifier[eval_data] = identifier[ndarray] . identifier[array] ( identifier[eval_data] )
identifier[self] . identifier[forward] ( identifier[DataBatch] ([ identifier[eval_data] ]))
keyword[return] identifier[self] . identifier[get_outputs] ()[ literal[int] ]
keyword[if] keyword[not] identifier[isinstance] ( identifier[eval_data] , identifier[DataIter] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[reset] :
identifier[eval_data] . identifier[reset] ()
identifier[output_list] =[]
keyword[for] identifier[nbatch] , identifier[eval_batch] keyword[in] identifier[enumerate] ( identifier[eval_data] ):
keyword[if] identifier[num_batch] keyword[is] keyword[not] keyword[None] keyword[and] identifier[nbatch] == identifier[num_batch] :
keyword[break]
identifier[self] . identifier[prepare] ( identifier[eval_batch] , identifier[sparse_row_id_fn] = identifier[sparse_row_id_fn] )
identifier[self] . identifier[forward] ( identifier[eval_batch] , identifier[is_train] = keyword[False] )
identifier[pad] = identifier[eval_batch] . identifier[pad]
identifier[outputs] =[ identifier[out] [ literal[int] : identifier[out] . identifier[shape] [ literal[int] ]- identifier[pad] ]. identifier[copy] () keyword[for] identifier[out] keyword[in] identifier[self] . identifier[get_outputs] ()]
identifier[output_list] . identifier[append] ( identifier[outputs] )
keyword[if] identifier[len] ( identifier[output_list] )== literal[int] :
keyword[return] identifier[output_list]
keyword[if] identifier[merge_batches] :
identifier[num_outputs] = identifier[len] ( identifier[output_list] [ literal[int] ])
keyword[for] identifier[out] keyword[in] identifier[output_list] :
keyword[assert] identifier[len] ( identifier[out] )== identifier[num_outputs] , literal[string] + literal[string]
identifier[output_list2] =[ identifier[ndarray] . identifier[concatenate] ([ identifier[out] [ identifier[i] ] keyword[for] identifier[out] keyword[in] identifier[output_list] ])
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[num_outputs] )]
keyword[if] identifier[num_outputs] == literal[int] keyword[and] keyword[not] identifier[always_output_list] :
keyword[return] identifier[output_list2] [ literal[int] ]
keyword[return] identifier[output_list2]
keyword[return] identifier[output_list] | def predict(self, eval_data, num_batch=None, merge_batches=True, reset=True, always_output_list=False, sparse_row_id_fn=None):
"""Runs prediction and collects the outputs.
When `merge_batches` is ``True`` (by default), the return value will be a list
``[out1, out2, out3]``, where each element is formed by concatenating the outputs for
all the mini-batches. When `always_output_list` is ``False`` (as by default),
then in the case of a single output, `out1` is returned instead of ``[out1]``.
When `merge_batches` is ``False``, the return value will be a nested list like
``[[out1_batch1, out2_batch1], [out1_batch2], ...]``. This mode is useful because
in some cases (e.g. bucketing), the module does not necessarily produce the same
number of outputs.
The objects in the results have type `NDArray`. If you need to work with a numpy array,
just call ``.asnumpy()`` on each `NDArray`.
Parameters
----------
eval_data : DataIter or NDArray or numpy array
Evaluation data to run prediction on.
num_batch : int
Defaults to ``None``, indicates running all the batches in the data iterator.
merge_batches : bool
Defaults to ``True``, see above for return values.
reset : bool
Defaults to ``True``, indicates whether we should reset the data iter before
doing prediction.
always_output_list : bool
Defaults to ``False``, see above for return values.
sparse_row_id_fn : A callback function
The function takes `data_batch` as an input and returns a dict of
str -> NDArray. The resulting dict is used for pulling row_sparse
parameters from the kvstore, where the str key is the name of the param,
and the value is the row id of the param to pull.
Returns
-------
list of NDArray or list of list of NDArray
Prediction results.
Examples
--------
>>> # An example of using `predict` for prediction.
>>> # Predict on the first 10 batches of val_dataiter
>>> mod.predict(eval_data=val_dataiter, num_batch=10)
"""
assert self.binded and self.params_initialized
if isinstance(eval_data, (ndarray.NDArray, np.ndarray)):
if isinstance(eval_data, np.ndarray):
eval_data = ndarray.array(eval_data) # depends on [control=['if'], data=[]]
self.forward(DataBatch([eval_data]))
return self.get_outputs()[0] # depends on [control=['if'], data=[]]
if not isinstance(eval_data, DataIter):
raise ValueError('eval_data must be of type NDArray or DataIter') # depends on [control=['if'], data=[]]
if reset:
eval_data.reset() # depends on [control=['if'], data=[]]
output_list = []
for (nbatch, eval_batch) in enumerate(eval_data):
if num_batch is not None and nbatch == num_batch:
break # depends on [control=['if'], data=[]]
self.prepare(eval_batch, sparse_row_id_fn=sparse_row_id_fn)
self.forward(eval_batch, is_train=False)
pad = eval_batch.pad
outputs = [out[0:out.shape[0] - pad].copy() for out in self.get_outputs()]
output_list.append(outputs) # depends on [control=['for'], data=[]]
if len(output_list) == 0:
return output_list # depends on [control=['if'], data=[]]
if merge_batches:
num_outputs = len(output_list[0])
for out in output_list:
assert len(out) == num_outputs, 'Cannot merge batches, as num of outputs is not the same ' + 'in mini-batches. Maybe bucketing is used?' # depends on [control=['for'], data=['out']]
output_list2 = [ndarray.concatenate([out[i] for out in output_list]) for i in range(num_outputs)]
if num_outputs == 1 and (not always_output_list):
return output_list2[0] # depends on [control=['if'], data=[]]
return output_list2 # depends on [control=['if'], data=[]]
return output_list |
def to_dict(self):
"""Return this Context as a dict suitable for json encoding."""
import copy
options = copy.deepcopy(self._options)
if self._insert_tasks:
options['insert_tasks'] = reference_to_path(self._insert_tasks)
if self._persistence_engine:
options['persistence_engine'] = reference_to_path(
self._persistence_engine)
options.update({
'_tasks_inserted': self._tasks_inserted,
})
callbacks = self._options.get('callbacks')
if callbacks:
options['callbacks'] = encode_callbacks(callbacks)
return options | def function[to_dict, parameter[self]]:
constant[Return this Context as a dict suitable for json encoding.]
import module[copy]
variable[options] assign[=] call[name[copy].deepcopy, parameter[name[self]._options]]
if name[self]._insert_tasks begin[:]
call[name[options]][constant[insert_tasks]] assign[=] call[name[reference_to_path], parameter[name[self]._insert_tasks]]
if name[self]._persistence_engine begin[:]
call[name[options]][constant[persistence_engine]] assign[=] call[name[reference_to_path], parameter[name[self]._persistence_engine]]
call[name[options].update, parameter[dictionary[[<ast.Constant object at 0x7da1b1800c40>], [<ast.Attribute object at 0x7da1b18012a0>]]]]
variable[callbacks] assign[=] call[name[self]._options.get, parameter[constant[callbacks]]]
if name[callbacks] begin[:]
call[name[options]][constant[callbacks]] assign[=] call[name[encode_callbacks], parameter[name[callbacks]]]
return[name[options]] | keyword[def] identifier[to_dict] ( identifier[self] ):
literal[string]
keyword[import] identifier[copy]
identifier[options] = identifier[copy] . identifier[deepcopy] ( identifier[self] . identifier[_options] )
keyword[if] identifier[self] . identifier[_insert_tasks] :
identifier[options] [ literal[string] ]= identifier[reference_to_path] ( identifier[self] . identifier[_insert_tasks] )
keyword[if] identifier[self] . identifier[_persistence_engine] :
identifier[options] [ literal[string] ]= identifier[reference_to_path] (
identifier[self] . identifier[_persistence_engine] )
identifier[options] . identifier[update] ({
literal[string] : identifier[self] . identifier[_tasks_inserted] ,
})
identifier[callbacks] = identifier[self] . identifier[_options] . identifier[get] ( literal[string] )
keyword[if] identifier[callbacks] :
identifier[options] [ literal[string] ]= identifier[encode_callbacks] ( identifier[callbacks] )
keyword[return] identifier[options] | def to_dict(self):
"""Return this Context as a dict suitable for json encoding."""
import copy
options = copy.deepcopy(self._options)
if self._insert_tasks:
options['insert_tasks'] = reference_to_path(self._insert_tasks) # depends on [control=['if'], data=[]]
if self._persistence_engine:
options['persistence_engine'] = reference_to_path(self._persistence_engine) # depends on [control=['if'], data=[]]
options.update({'_tasks_inserted': self._tasks_inserted})
callbacks = self._options.get('callbacks')
if callbacks:
options['callbacks'] = encode_callbacks(callbacks) # depends on [control=['if'], data=[]]
return options |
def empty(cls: Type[BoardT], *, chess960: bool = False) -> BoardT:
"""Creates a new empty board. Also see :func:`~chess.Board.clear()`."""
return cls(None, chess960=chess960) | def function[empty, parameter[cls]]:
constant[Creates a new empty board. Also see :func:`~chess.Board.clear()`.]
return[call[name[cls], parameter[constant[None]]]] | keyword[def] identifier[empty] ( identifier[cls] : identifier[Type] [ identifier[BoardT] ],*, identifier[chess960] : identifier[bool] = keyword[False] )-> identifier[BoardT] :
literal[string]
keyword[return] identifier[cls] ( keyword[None] , identifier[chess960] = identifier[chess960] ) | def empty(cls: Type[BoardT], *, chess960: bool=False) -> BoardT:
"""Creates a new empty board. Also see :func:`~chess.Board.clear()`."""
return cls(None, chess960=chess960) |
def rpc_spec_table(app):
"""Collects methods which are speced as RPC."""
table = {}
for attr, value in inspect.getmembers(app):
rpc_spec = get_rpc_spec(value, default=None)
if rpc_spec is None:
continue
table[rpc_spec.name] = (value, rpc_spec)
return table | def function[rpc_spec_table, parameter[app]]:
constant[Collects methods which are speced as RPC.]
variable[table] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b0290310>, <ast.Name object at 0x7da1b0291270>]]] in starred[call[name[inspect].getmembers, parameter[name[app]]]] begin[:]
variable[rpc_spec] assign[=] call[name[get_rpc_spec], parameter[name[value]]]
if compare[name[rpc_spec] is constant[None]] begin[:]
continue
call[name[table]][name[rpc_spec].name] assign[=] tuple[[<ast.Name object at 0x7da1b0293640>, <ast.Name object at 0x7da1b0290ee0>]]
return[name[table]] | keyword[def] identifier[rpc_spec_table] ( identifier[app] ):
literal[string]
identifier[table] ={}
keyword[for] identifier[attr] , identifier[value] keyword[in] identifier[inspect] . identifier[getmembers] ( identifier[app] ):
identifier[rpc_spec] = identifier[get_rpc_spec] ( identifier[value] , identifier[default] = keyword[None] )
keyword[if] identifier[rpc_spec] keyword[is] keyword[None] :
keyword[continue]
identifier[table] [ identifier[rpc_spec] . identifier[name] ]=( identifier[value] , identifier[rpc_spec] )
keyword[return] identifier[table] | def rpc_spec_table(app):
"""Collects methods which are speced as RPC."""
table = {}
for (attr, value) in inspect.getmembers(app):
rpc_spec = get_rpc_spec(value, default=None)
if rpc_spec is None:
continue # depends on [control=['if'], data=[]]
table[rpc_spec.name] = (value, rpc_spec) # depends on [control=['for'], data=[]]
return table |
def userKicked(self, kickee, channel, kicker, message):
"""Called when I see another user get kicked."""
self.dispatch('population', 'userKicked', kickee, channel, kicker,
message) | def function[userKicked, parameter[self, kickee, channel, kicker, message]]:
constant[Called when I see another user get kicked.]
call[name[self].dispatch, parameter[constant[population], constant[userKicked], name[kickee], name[channel], name[kicker], name[message]]] | keyword[def] identifier[userKicked] ( identifier[self] , identifier[kickee] , identifier[channel] , identifier[kicker] , identifier[message] ):
literal[string]
identifier[self] . identifier[dispatch] ( literal[string] , literal[string] , identifier[kickee] , identifier[channel] , identifier[kicker] ,
identifier[message] ) | def userKicked(self, kickee, channel, kicker, message):
"""Called when I see another user get kicked."""
self.dispatch('population', 'userKicked', kickee, channel, kicker, message) |
def get_next_step(self):
"""Find the proper step when user clicks the Next button.
:returns: The step to be switched to.
:rtype: WizardStep instance or None
"""
if self.layer_purpose != layer_purpose_aggregation:
subcategory = self.parent.step_kw_subcategory.\
selected_subcategory()
else:
subcategory = {'key': None}
if is_raster_layer(self.parent.layer):
return self.parent.step_kw_source
# Check if it can go to inasafe field step
inasafe_fields = get_non_compulsory_fields(
self.layer_purpose['key'], subcategory['key'])
if not skip_inasafe_field(self.parent.layer, inasafe_fields):
return self.parent.step_kw_inasafe_fields
# Check if it can go to inasafe default field step
default_inasafe_fields = get_fields(
self.layer_purpose['key'],
subcategory['key'],
replace_null=True,
in_group=False
)
if default_inasafe_fields:
return self.parent.step_kw_default_inasafe_fields
# Any other case
return self.parent.step_kw_source | def function[get_next_step, parameter[self]]:
constant[Find the proper step when user clicks the Next button.
:returns: The step to be switched to.
:rtype: WizardStep instance or None
]
if compare[name[self].layer_purpose not_equal[!=] name[layer_purpose_aggregation]] begin[:]
variable[subcategory] assign[=] call[name[self].parent.step_kw_subcategory.selected_subcategory, parameter[]]
if call[name[is_raster_layer], parameter[name[self].parent.layer]] begin[:]
return[name[self].parent.step_kw_source]
variable[inasafe_fields] assign[=] call[name[get_non_compulsory_fields], parameter[call[name[self].layer_purpose][constant[key]], call[name[subcategory]][constant[key]]]]
if <ast.UnaryOp object at 0x7da2044c01f0> begin[:]
return[name[self].parent.step_kw_inasafe_fields]
variable[default_inasafe_fields] assign[=] call[name[get_fields], parameter[call[name[self].layer_purpose][constant[key]], call[name[subcategory]][constant[key]]]]
if name[default_inasafe_fields] begin[:]
return[name[self].parent.step_kw_default_inasafe_fields]
return[name[self].parent.step_kw_source] | keyword[def] identifier[get_next_step] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[layer_purpose] != identifier[layer_purpose_aggregation] :
identifier[subcategory] = identifier[self] . identifier[parent] . identifier[step_kw_subcategory] . identifier[selected_subcategory] ()
keyword[else] :
identifier[subcategory] ={ literal[string] : keyword[None] }
keyword[if] identifier[is_raster_layer] ( identifier[self] . identifier[parent] . identifier[layer] ):
keyword[return] identifier[self] . identifier[parent] . identifier[step_kw_source]
identifier[inasafe_fields] = identifier[get_non_compulsory_fields] (
identifier[self] . identifier[layer_purpose] [ literal[string] ], identifier[subcategory] [ literal[string] ])
keyword[if] keyword[not] identifier[skip_inasafe_field] ( identifier[self] . identifier[parent] . identifier[layer] , identifier[inasafe_fields] ):
keyword[return] identifier[self] . identifier[parent] . identifier[step_kw_inasafe_fields]
identifier[default_inasafe_fields] = identifier[get_fields] (
identifier[self] . identifier[layer_purpose] [ literal[string] ],
identifier[subcategory] [ literal[string] ],
identifier[replace_null] = keyword[True] ,
identifier[in_group] = keyword[False]
)
keyword[if] identifier[default_inasafe_fields] :
keyword[return] identifier[self] . identifier[parent] . identifier[step_kw_default_inasafe_fields]
keyword[return] identifier[self] . identifier[parent] . identifier[step_kw_source] | def get_next_step(self):
"""Find the proper step when user clicks the Next button.
:returns: The step to be switched to.
:rtype: WizardStep instance or None
"""
if self.layer_purpose != layer_purpose_aggregation:
subcategory = self.parent.step_kw_subcategory.selected_subcategory() # depends on [control=['if'], data=[]]
else:
subcategory = {'key': None}
if is_raster_layer(self.parent.layer):
return self.parent.step_kw_source # depends on [control=['if'], data=[]]
# Check if it can go to inasafe field step
inasafe_fields = get_non_compulsory_fields(self.layer_purpose['key'], subcategory['key'])
if not skip_inasafe_field(self.parent.layer, inasafe_fields):
return self.parent.step_kw_inasafe_fields # depends on [control=['if'], data=[]]
# Check if it can go to inasafe default field step
default_inasafe_fields = get_fields(self.layer_purpose['key'], subcategory['key'], replace_null=True, in_group=False)
if default_inasafe_fields:
return self.parent.step_kw_default_inasafe_fields # depends on [control=['if'], data=[]]
# Any other case
return self.parent.step_kw_source |
def spinner(
spinner_name=None,
start_text=None,
handler_map=None,
nospin=False,
write_to_stdout=True,
):
"""Get a spinner object or a dummy spinner to wrap a context.
:param str spinner_name: A spinner type e.g. "dots" or "bouncingBar" (default: {"bouncingBar"})
:param str start_text: Text to start off the spinner with (default: {None})
:param dict handler_map: Handler map for signals to be handled gracefully (default: {None})
:param bool nospin: If true, use the dummy spinner (default: {False})
:param bool write_to_stdout: Writes to stdout if true, otherwise writes to stderr (default: True)
:return: A spinner object which can be manipulated while alive
:rtype: :class:`~vistir.spin.VistirSpinner`
Raises:
RuntimeError -- Raised if the spinner extra is not installed
"""
from .spin import create_spinner
has_yaspin = None
try:
import yaspin
except ImportError:
has_yaspin = False
if not nospin:
raise RuntimeError(
"Failed to import spinner! Reinstall vistir with command:"
" pip install --upgrade vistir[spinner]"
)
else:
spinner_name = ""
else:
has_yaspin = True
spinner_name = ""
use_yaspin = (has_yaspin is False) or (nospin is True)
if has_yaspin is None or has_yaspin is True and not nospin:
use_yaspin = True
if start_text is None and use_yaspin is True:
start_text = "Running..."
with create_spinner(
spinner_name=spinner_name,
text=start_text,
handler_map=handler_map,
nospin=nospin,
use_yaspin=use_yaspin,
write_to_stdout=write_to_stdout,
) as _spinner:
yield _spinner | def function[spinner, parameter[spinner_name, start_text, handler_map, nospin, write_to_stdout]]:
constant[Get a spinner object or a dummy spinner to wrap a context.
:param str spinner_name: A spinner type e.g. "dots" or "bouncingBar" (default: {"bouncingBar"})
:param str start_text: Text to start off the spinner with (default: {None})
:param dict handler_map: Handler map for signals to be handled gracefully (default: {None})
:param bool nospin: If true, use the dummy spinner (default: {False})
:param bool write_to_stdout: Writes to stdout if true, otherwise writes to stderr (default: True)
:return: A spinner object which can be manipulated while alive
:rtype: :class:`~vistir.spin.VistirSpinner`
Raises:
RuntimeError -- Raised if the spinner extra is not installed
]
from relative_module[spin] import module[create_spinner]
variable[has_yaspin] assign[=] constant[None]
<ast.Try object at 0x7da18ede6200>
variable[use_yaspin] assign[=] <ast.BoolOp object at 0x7da18ede5990>
if <ast.BoolOp object at 0x7da18ede7a00> begin[:]
variable[use_yaspin] assign[=] constant[True]
if <ast.BoolOp object at 0x7da18ede6320> begin[:]
variable[start_text] assign[=] constant[Running...]
with call[name[create_spinner], parameter[]] begin[:]
<ast.Yield object at 0x7da18ede4f70> | keyword[def] identifier[spinner] (
identifier[spinner_name] = keyword[None] ,
identifier[start_text] = keyword[None] ,
identifier[handler_map] = keyword[None] ,
identifier[nospin] = keyword[False] ,
identifier[write_to_stdout] = keyword[True] ,
):
literal[string]
keyword[from] . identifier[spin] keyword[import] identifier[create_spinner]
identifier[has_yaspin] = keyword[None]
keyword[try] :
keyword[import] identifier[yaspin]
keyword[except] identifier[ImportError] :
identifier[has_yaspin] = keyword[False]
keyword[if] keyword[not] identifier[nospin] :
keyword[raise] identifier[RuntimeError] (
literal[string]
literal[string]
)
keyword[else] :
identifier[spinner_name] = literal[string]
keyword[else] :
identifier[has_yaspin] = keyword[True]
identifier[spinner_name] = literal[string]
identifier[use_yaspin] =( identifier[has_yaspin] keyword[is] keyword[False] ) keyword[or] ( identifier[nospin] keyword[is] keyword[True] )
keyword[if] identifier[has_yaspin] keyword[is] keyword[None] keyword[or] identifier[has_yaspin] keyword[is] keyword[True] keyword[and] keyword[not] identifier[nospin] :
identifier[use_yaspin] = keyword[True]
keyword[if] identifier[start_text] keyword[is] keyword[None] keyword[and] identifier[use_yaspin] keyword[is] keyword[True] :
identifier[start_text] = literal[string]
keyword[with] identifier[create_spinner] (
identifier[spinner_name] = identifier[spinner_name] ,
identifier[text] = identifier[start_text] ,
identifier[handler_map] = identifier[handler_map] ,
identifier[nospin] = identifier[nospin] ,
identifier[use_yaspin] = identifier[use_yaspin] ,
identifier[write_to_stdout] = identifier[write_to_stdout] ,
) keyword[as] identifier[_spinner] :
keyword[yield] identifier[_spinner] | def spinner(spinner_name=None, start_text=None, handler_map=None, nospin=False, write_to_stdout=True):
"""Get a spinner object or a dummy spinner to wrap a context.
:param str spinner_name: A spinner type e.g. "dots" or "bouncingBar" (default: {"bouncingBar"})
:param str start_text: Text to start off the spinner with (default: {None})
:param dict handler_map: Handler map for signals to be handled gracefully (default: {None})
:param bool nospin: If true, use the dummy spinner (default: {False})
:param bool write_to_stdout: Writes to stdout if true, otherwise writes to stderr (default: True)
:return: A spinner object which can be manipulated while alive
:rtype: :class:`~vistir.spin.VistirSpinner`
Raises:
RuntimeError -- Raised if the spinner extra is not installed
"""
from .spin import create_spinner
has_yaspin = None
try:
import yaspin # depends on [control=['try'], data=[]]
except ImportError:
has_yaspin = False
if not nospin:
raise RuntimeError('Failed to import spinner! Reinstall vistir with command: pip install --upgrade vistir[spinner]') # depends on [control=['if'], data=[]]
else:
spinner_name = '' # depends on [control=['except'], data=[]]
else:
has_yaspin = True
spinner_name = ''
use_yaspin = has_yaspin is False or nospin is True
if has_yaspin is None or (has_yaspin is True and (not nospin)):
use_yaspin = True # depends on [control=['if'], data=[]]
if start_text is None and use_yaspin is True:
start_text = 'Running...' # depends on [control=['if'], data=[]]
with create_spinner(spinner_name=spinner_name, text=start_text, handler_map=handler_map, nospin=nospin, use_yaspin=use_yaspin, write_to_stdout=write_to_stdout) as _spinner:
yield _spinner # depends on [control=['with'], data=['_spinner']] |
def on_click(self, event):
"""
Switch the displayed module or pass the event on to the active module
"""
if event["button"] == self.button_reset:
self._change_active(0)
elif event["button"] == self.button_change_time_format:
self.active_time_format += 1
if self.active_time_format >= len(self.format_time):
self.active_time_format = 0
# save the active format_time
time_format = self.format_time[self.active_time_format]
self.py3.storage_set("time_format", time_format)
elif event["button"] == self.button_change_format:
self._change_active(1) | def function[on_click, parameter[self, event]]:
constant[
Switch the displayed module or pass the event on to the active module
]
if compare[call[name[event]][constant[button]] equal[==] name[self].button_reset] begin[:]
call[name[self]._change_active, parameter[constant[0]]] | keyword[def] identifier[on_click] ( identifier[self] , identifier[event] ):
literal[string]
keyword[if] identifier[event] [ literal[string] ]== identifier[self] . identifier[button_reset] :
identifier[self] . identifier[_change_active] ( literal[int] )
keyword[elif] identifier[event] [ literal[string] ]== identifier[self] . identifier[button_change_time_format] :
identifier[self] . identifier[active_time_format] += literal[int]
keyword[if] identifier[self] . identifier[active_time_format] >= identifier[len] ( identifier[self] . identifier[format_time] ):
identifier[self] . identifier[active_time_format] = literal[int]
identifier[time_format] = identifier[self] . identifier[format_time] [ identifier[self] . identifier[active_time_format] ]
identifier[self] . identifier[py3] . identifier[storage_set] ( literal[string] , identifier[time_format] )
keyword[elif] identifier[event] [ literal[string] ]== identifier[self] . identifier[button_change_format] :
identifier[self] . identifier[_change_active] ( literal[int] ) | def on_click(self, event):
"""
Switch the displayed module or pass the event on to the active module
"""
if event['button'] == self.button_reset:
self._change_active(0) # depends on [control=['if'], data=[]]
elif event['button'] == self.button_change_time_format:
self.active_time_format += 1
if self.active_time_format >= len(self.format_time):
self.active_time_format = 0 # depends on [control=['if'], data=[]]
# save the active format_time
time_format = self.format_time[self.active_time_format]
self.py3.storage_set('time_format', time_format) # depends on [control=['if'], data=[]]
elif event['button'] == self.button_change_format:
self._change_active(1) # depends on [control=['if'], data=[]] |
def _read_join_ack(self, bits, size, kind):
"""Read Join Connection option for Third ACK.
Positional arguments:
* bits - str, 4-bit data
* size - int, length of option
* kind - int, 30 (Multipath TCP)
Returns:
* dict -- extracted Join Connection (MP_JOIN-ACK) option for Third ACK
Structure of MP_JOIN-ACK [RFC 6824]:
1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+---------------+---------------+-------+-----------------------+
| Kind | Length = 24 |Subtype| (reserved) |
+---------------+---------------+-------+-----------------------+
| |
| |
| Sender's HMAC (160 bits) |
| |
| |
+---------------------------------------------------------------+
Octets Bits Name Description
0 0 tcp.mp.kind Kind (30)
1 8 tcp.mp.length Length (24)
2 16 tcp.mp.subtype Subtype (1|ACK)
2 20 - Reserved (must be zero)
4 32 tcp.mp.join.ack.hmac Sender's HMAC
"""
temp = self._read_fileng(20)
data = dict(
kind=kind,
length=size + 1,
subtype='MP_JOIN-ACK',
join=dict(
ack=dict(
hmac=temp,
),
),
)
return data | def function[_read_join_ack, parameter[self, bits, size, kind]]:
constant[Read Join Connection option for Third ACK.
Positional arguments:
* bits - str, 4-bit data
* size - int, length of option
* kind - int, 30 (Multipath TCP)
Returns:
* dict -- extracted Join Connection (MP_JOIN-ACK) option for Third ACK
Structure of MP_JOIN-ACK [RFC 6824]:
1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+---------------+---------------+-------+-----------------------+
| Kind | Length = 24 |Subtype| (reserved) |
+---------------+---------------+-------+-----------------------+
| |
| |
| Sender's HMAC (160 bits) |
| |
| |
+---------------------------------------------------------------+
Octets Bits Name Description
0 0 tcp.mp.kind Kind (30)
1 8 tcp.mp.length Length (24)
2 16 tcp.mp.subtype Subtype (1|ACK)
2 20 - Reserved (must be zero)
4 32 tcp.mp.join.ack.hmac Sender's HMAC
]
variable[temp] assign[=] call[name[self]._read_fileng, parameter[constant[20]]]
variable[data] assign[=] call[name[dict], parameter[]]
return[name[data]] | keyword[def] identifier[_read_join_ack] ( identifier[self] , identifier[bits] , identifier[size] , identifier[kind] ):
literal[string]
identifier[temp] = identifier[self] . identifier[_read_fileng] ( literal[int] )
identifier[data] = identifier[dict] (
identifier[kind] = identifier[kind] ,
identifier[length] = identifier[size] + literal[int] ,
identifier[subtype] = literal[string] ,
identifier[join] = identifier[dict] (
identifier[ack] = identifier[dict] (
identifier[hmac] = identifier[temp] ,
),
),
)
keyword[return] identifier[data] | def _read_join_ack(self, bits, size, kind):
"""Read Join Connection option for Third ACK.
Positional arguments:
* bits - str, 4-bit data
* size - int, length of option
* kind - int, 30 (Multipath TCP)
Returns:
* dict -- extracted Join Connection (MP_JOIN-ACK) option for Third ACK
Structure of MP_JOIN-ACK [RFC 6824]:
1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+---------------+---------------+-------+-----------------------+
| Kind | Length = 24 |Subtype| (reserved) |
+---------------+---------------+-------+-----------------------+
| |
| |
| Sender's HMAC (160 bits) |
| |
| |
+---------------------------------------------------------------+
Octets Bits Name Description
0 0 tcp.mp.kind Kind (30)
1 8 tcp.mp.length Length (24)
2 16 tcp.mp.subtype Subtype (1|ACK)
2 20 - Reserved (must be zero)
4 32 tcp.mp.join.ack.hmac Sender's HMAC
"""
temp = self._read_fileng(20)
data = dict(kind=kind, length=size + 1, subtype='MP_JOIN-ACK', join=dict(ack=dict(hmac=temp)))
return data |
def replace_command(command, broken, matched):
"""Helper for *_no_command rules."""
new_cmds = get_close_matches(broken, matched, cutoff=0.1)
return [replace_argument(command.script, broken, new_cmd.strip())
for new_cmd in new_cmds] | def function[replace_command, parameter[command, broken, matched]]:
constant[Helper for *_no_command rules.]
variable[new_cmds] assign[=] call[name[get_close_matches], parameter[name[broken], name[matched]]]
return[<ast.ListComp object at 0x7da1b2028790>] | keyword[def] identifier[replace_command] ( identifier[command] , identifier[broken] , identifier[matched] ):
literal[string]
identifier[new_cmds] = identifier[get_close_matches] ( identifier[broken] , identifier[matched] , identifier[cutoff] = literal[int] )
keyword[return] [ identifier[replace_argument] ( identifier[command] . identifier[script] , identifier[broken] , identifier[new_cmd] . identifier[strip] ())
keyword[for] identifier[new_cmd] keyword[in] identifier[new_cmds] ] | def replace_command(command, broken, matched):
"""Helper for *_no_command rules."""
new_cmds = get_close_matches(broken, matched, cutoff=0.1)
return [replace_argument(command.script, broken, new_cmd.strip()) for new_cmd in new_cmds] |
def post(self, request, *args, **kwargs):
""" Handles POST requests. """
self.init_attachment_cache()
# Stores a boolean indicating if we are considering a preview
self.preview = 'preview' in self.request.POST
# Initializes the forms
post_form_class = self.get_post_form_class()
post_form = self.get_post_form(post_form_class)
attachment_formset_class = self.get_attachment_formset_class()
attachment_formset = self.get_attachment_formset(attachment_formset_class)
self.attachment_preview = (
self.preview if attachment_formset and attachment_formset.is_valid() else None
)
post_form_valid = post_form.is_valid()
if (
(post_form_valid and attachment_formset is None) or
(post_form_valid and attachment_formset.is_valid())
):
return self.form_valid(post_form, attachment_formset)
else:
return self.form_invalid(post_form, attachment_formset) | def function[post, parameter[self, request]]:
constant[ Handles POST requests. ]
call[name[self].init_attachment_cache, parameter[]]
name[self].preview assign[=] compare[constant[preview] in name[self].request.POST]
variable[post_form_class] assign[=] call[name[self].get_post_form_class, parameter[]]
variable[post_form] assign[=] call[name[self].get_post_form, parameter[name[post_form_class]]]
variable[attachment_formset_class] assign[=] call[name[self].get_attachment_formset_class, parameter[]]
variable[attachment_formset] assign[=] call[name[self].get_attachment_formset, parameter[name[attachment_formset_class]]]
name[self].attachment_preview assign[=] <ast.IfExp object at 0x7da18eb566b0>
variable[post_form_valid] assign[=] call[name[post_form].is_valid, parameter[]]
if <ast.BoolOp object at 0x7da18eb54eb0> begin[:]
return[call[name[self].form_valid, parameter[name[post_form], name[attachment_formset]]]] | keyword[def] identifier[post] ( identifier[self] , identifier[request] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[self] . identifier[init_attachment_cache] ()
identifier[self] . identifier[preview] = literal[string] keyword[in] identifier[self] . identifier[request] . identifier[POST]
identifier[post_form_class] = identifier[self] . identifier[get_post_form_class] ()
identifier[post_form] = identifier[self] . identifier[get_post_form] ( identifier[post_form_class] )
identifier[attachment_formset_class] = identifier[self] . identifier[get_attachment_formset_class] ()
identifier[attachment_formset] = identifier[self] . identifier[get_attachment_formset] ( identifier[attachment_formset_class] )
identifier[self] . identifier[attachment_preview] =(
identifier[self] . identifier[preview] keyword[if] identifier[attachment_formset] keyword[and] identifier[attachment_formset] . identifier[is_valid] () keyword[else] keyword[None]
)
identifier[post_form_valid] = identifier[post_form] . identifier[is_valid] ()
keyword[if] (
( identifier[post_form_valid] keyword[and] identifier[attachment_formset] keyword[is] keyword[None] ) keyword[or]
( identifier[post_form_valid] keyword[and] identifier[attachment_formset] . identifier[is_valid] ())
):
keyword[return] identifier[self] . identifier[form_valid] ( identifier[post_form] , identifier[attachment_formset] )
keyword[else] :
keyword[return] identifier[self] . identifier[form_invalid] ( identifier[post_form] , identifier[attachment_formset] ) | def post(self, request, *args, **kwargs):
""" Handles POST requests. """
self.init_attachment_cache()
# Stores a boolean indicating if we are considering a preview
self.preview = 'preview' in self.request.POST
# Initializes the forms
post_form_class = self.get_post_form_class()
post_form = self.get_post_form(post_form_class)
attachment_formset_class = self.get_attachment_formset_class()
attachment_formset = self.get_attachment_formset(attachment_formset_class)
self.attachment_preview = self.preview if attachment_formset and attachment_formset.is_valid() else None
post_form_valid = post_form.is_valid()
if post_form_valid and attachment_formset is None or (post_form_valid and attachment_formset.is_valid()):
return self.form_valid(post_form, attachment_formset) # depends on [control=['if'], data=[]]
else:
return self.form_invalid(post_form, attachment_formset) |
def _parse_hparams(hparams):
"""Split hparams, based on key prefixes.
Args:
hparams: hyperparameters
Returns:
Tuple of hparams for respectably: agent, optimizer, runner, replay_buffer.
"""
prefixes = ["agent_", "optimizer_", "runner_", "replay_buffer_"]
ret = []
for prefix in prefixes:
ret_dict = {}
for key in hparams.values():
if prefix in key:
par_name = key[len(prefix):]
ret_dict[par_name] = hparams.get(key)
ret.append(ret_dict)
return ret | def function[_parse_hparams, parameter[hparams]]:
constant[Split hparams, based on key prefixes.
Args:
hparams: hyperparameters
Returns:
Tuple of hparams for respectably: agent, optimizer, runner, replay_buffer.
]
variable[prefixes] assign[=] list[[<ast.Constant object at 0x7da1b20f9330>, <ast.Constant object at 0x7da1b20fbb20>, <ast.Constant object at 0x7da1b20f9c30>, <ast.Constant object at 0x7da1b20f9750>]]
variable[ret] assign[=] list[[]]
for taget[name[prefix]] in starred[name[prefixes]] begin[:]
variable[ret_dict] assign[=] dictionary[[], []]
for taget[name[key]] in starred[call[name[hparams].values, parameter[]]] begin[:]
if compare[name[prefix] in name[key]] begin[:]
variable[par_name] assign[=] call[name[key]][<ast.Slice object at 0x7da1b20f88e0>]
call[name[ret_dict]][name[par_name]] assign[=] call[name[hparams].get, parameter[name[key]]]
call[name[ret].append, parameter[name[ret_dict]]]
return[name[ret]] | keyword[def] identifier[_parse_hparams] ( identifier[hparams] ):
literal[string]
identifier[prefixes] =[ literal[string] , literal[string] , literal[string] , literal[string] ]
identifier[ret] =[]
keyword[for] identifier[prefix] keyword[in] identifier[prefixes] :
identifier[ret_dict] ={}
keyword[for] identifier[key] keyword[in] identifier[hparams] . identifier[values] ():
keyword[if] identifier[prefix] keyword[in] identifier[key] :
identifier[par_name] = identifier[key] [ identifier[len] ( identifier[prefix] ):]
identifier[ret_dict] [ identifier[par_name] ]= identifier[hparams] . identifier[get] ( identifier[key] )
identifier[ret] . identifier[append] ( identifier[ret_dict] )
keyword[return] identifier[ret] | def _parse_hparams(hparams):
"""Split hparams, based on key prefixes.
Args:
hparams: hyperparameters
Returns:
Tuple of hparams for respectably: agent, optimizer, runner, replay_buffer.
"""
prefixes = ['agent_', 'optimizer_', 'runner_', 'replay_buffer_']
ret = []
for prefix in prefixes:
ret_dict = {}
for key in hparams.values():
if prefix in key:
par_name = key[len(prefix):]
ret_dict[par_name] = hparams.get(key) # depends on [control=['if'], data=['prefix', 'key']] # depends on [control=['for'], data=['key']]
ret.append(ret_dict) # depends on [control=['for'], data=['prefix']]
return ret |
def is_void(func):
"""
Determines if a function is a void function, i.e., one whose body contains
nothing but a docstring or an ellipsis. A void function can be used to introduce
an overloaded function without actually registering an implementation.
"""
try:
source = dedent(inspect.getsource(func))
except (OSError, IOError):
return False
fdef = next(ast.iter_child_nodes(ast.parse(source)))
return (
type(fdef) is ast.FunctionDef and len(fdef.body) == 1 and
type(fdef.body[0]) is ast.Expr and
type(fdef.body[0].value) in {ast.Str, ast.Ellipsis}) | def function[is_void, parameter[func]]:
constant[
Determines if a function is a void function, i.e., one whose body contains
nothing but a docstring or an ellipsis. A void function can be used to introduce
an overloaded function without actually registering an implementation.
]
<ast.Try object at 0x7da1b2633220>
variable[fdef] assign[=] call[name[next], parameter[call[name[ast].iter_child_nodes, parameter[call[name[ast].parse, parameter[name[source]]]]]]]
return[<ast.BoolOp object at 0x7da1b2631ff0>] | keyword[def] identifier[is_void] ( identifier[func] ):
literal[string]
keyword[try] :
identifier[source] = identifier[dedent] ( identifier[inspect] . identifier[getsource] ( identifier[func] ))
keyword[except] ( identifier[OSError] , identifier[IOError] ):
keyword[return] keyword[False]
identifier[fdef] = identifier[next] ( identifier[ast] . identifier[iter_child_nodes] ( identifier[ast] . identifier[parse] ( identifier[source] )))
keyword[return] (
identifier[type] ( identifier[fdef] ) keyword[is] identifier[ast] . identifier[FunctionDef] keyword[and] identifier[len] ( identifier[fdef] . identifier[body] )== literal[int] keyword[and]
identifier[type] ( identifier[fdef] . identifier[body] [ literal[int] ]) keyword[is] identifier[ast] . identifier[Expr] keyword[and]
identifier[type] ( identifier[fdef] . identifier[body] [ literal[int] ]. identifier[value] ) keyword[in] { identifier[ast] . identifier[Str] , identifier[ast] . identifier[Ellipsis] }) | def is_void(func):
"""
Determines if a function is a void function, i.e., one whose body contains
nothing but a docstring or an ellipsis. A void function can be used to introduce
an overloaded function without actually registering an implementation.
"""
try:
source = dedent(inspect.getsource(func)) # depends on [control=['try'], data=[]]
except (OSError, IOError):
return False # depends on [control=['except'], data=[]]
fdef = next(ast.iter_child_nodes(ast.parse(source)))
return type(fdef) is ast.FunctionDef and len(fdef.body) == 1 and (type(fdef.body[0]) is ast.Expr) and (type(fdef.body[0].value) in {ast.Str, ast.Ellipsis}) |
def _get_fill_value(dtype, fill_value=None, fill_value_typ=None):
""" return the correct fill value for the dtype of the values """
if fill_value is not None:
return fill_value
if _na_ok_dtype(dtype):
if fill_value_typ is None:
return np.nan
else:
if fill_value_typ == '+inf':
return np.inf
else:
return -np.inf
else:
if fill_value_typ is None:
return tslibs.iNaT
else:
if fill_value_typ == '+inf':
# need the max int here
return _int64_max
else:
return tslibs.iNaT | def function[_get_fill_value, parameter[dtype, fill_value, fill_value_typ]]:
constant[ return the correct fill value for the dtype of the values ]
if compare[name[fill_value] is_not constant[None]] begin[:]
return[name[fill_value]]
if call[name[_na_ok_dtype], parameter[name[dtype]]] begin[:]
if compare[name[fill_value_typ] is constant[None]] begin[:]
return[name[np].nan] | keyword[def] identifier[_get_fill_value] ( identifier[dtype] , identifier[fill_value] = keyword[None] , identifier[fill_value_typ] = keyword[None] ):
literal[string]
keyword[if] identifier[fill_value] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[fill_value]
keyword[if] identifier[_na_ok_dtype] ( identifier[dtype] ):
keyword[if] identifier[fill_value_typ] keyword[is] keyword[None] :
keyword[return] identifier[np] . identifier[nan]
keyword[else] :
keyword[if] identifier[fill_value_typ] == literal[string] :
keyword[return] identifier[np] . identifier[inf]
keyword[else] :
keyword[return] - identifier[np] . identifier[inf]
keyword[else] :
keyword[if] identifier[fill_value_typ] keyword[is] keyword[None] :
keyword[return] identifier[tslibs] . identifier[iNaT]
keyword[else] :
keyword[if] identifier[fill_value_typ] == literal[string] :
keyword[return] identifier[_int64_max]
keyword[else] :
keyword[return] identifier[tslibs] . identifier[iNaT] | def _get_fill_value(dtype, fill_value=None, fill_value_typ=None):
""" return the correct fill value for the dtype of the values """
if fill_value is not None:
return fill_value # depends on [control=['if'], data=['fill_value']]
if _na_ok_dtype(dtype):
if fill_value_typ is None:
return np.nan # depends on [control=['if'], data=[]]
elif fill_value_typ == '+inf':
return np.inf # depends on [control=['if'], data=[]]
else:
return -np.inf # depends on [control=['if'], data=[]]
elif fill_value_typ is None:
return tslibs.iNaT # depends on [control=['if'], data=[]]
elif fill_value_typ == '+inf':
# need the max int here
return _int64_max # depends on [control=['if'], data=[]]
else:
return tslibs.iNaT |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.