code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def get_task_log(task_id, session, request_kwargs=None):
"""Static method for getting a task log, given a task_id.
This method exists so a task log can be retrieved without
retrieving the items task history first.
:type task_id: str or int
:param task_id: The task id for the task log you'd like to fetch.
:type archive_session: :class:`ArchiveSession <ArchiveSession>`
:type request_kwargs: dict
:param request_kwargs: (optional) Keyword arguments that
:py:class:`requests.Request` takes.
:rtype: str
:returns: The task log as a string.
"""
request_kwargs = request_kwargs if request_kwargs else dict()
url = '{0}//catalogd.archive.org/log/{1}'.format(session.protocol, task_id)
p = dict(full=1)
r = session.get(url, params=p, **request_kwargs)
r.raise_for_status()
return r.content.decode('utf-8') | def function[get_task_log, parameter[task_id, session, request_kwargs]]:
constant[Static method for getting a task log, given a task_id.
This method exists so a task log can be retrieved without
retrieving the items task history first.
:type task_id: str or int
:param task_id: The task id for the task log you'd like to fetch.
:type archive_session: :class:`ArchiveSession <ArchiveSession>`
:type request_kwargs: dict
:param request_kwargs: (optional) Keyword arguments that
:py:class:`requests.Request` takes.
:rtype: str
:returns: The task log as a string.
]
variable[request_kwargs] assign[=] <ast.IfExp object at 0x7da1b20b40a0>
variable[url] assign[=] call[constant[{0}//catalogd.archive.org/log/{1}].format, parameter[name[session].protocol, name[task_id]]]
variable[p] assign[=] call[name[dict], parameter[]]
variable[r] assign[=] call[name[session].get, parameter[name[url]]]
call[name[r].raise_for_status, parameter[]]
return[call[name[r].content.decode, parameter[constant[utf-8]]]] | keyword[def] identifier[get_task_log] ( identifier[task_id] , identifier[session] , identifier[request_kwargs] = keyword[None] ):
literal[string]
identifier[request_kwargs] = identifier[request_kwargs] keyword[if] identifier[request_kwargs] keyword[else] identifier[dict] ()
identifier[url] = literal[string] . identifier[format] ( identifier[session] . identifier[protocol] , identifier[task_id] )
identifier[p] = identifier[dict] ( identifier[full] = literal[int] )
identifier[r] = identifier[session] . identifier[get] ( identifier[url] , identifier[params] = identifier[p] ,** identifier[request_kwargs] )
identifier[r] . identifier[raise_for_status] ()
keyword[return] identifier[r] . identifier[content] . identifier[decode] ( literal[string] ) | def get_task_log(task_id, session, request_kwargs=None):
"""Static method for getting a task log, given a task_id.
This method exists so a task log can be retrieved without
retrieving the items task history first.
:type task_id: str or int
:param task_id: The task id for the task log you'd like to fetch.
:type archive_session: :class:`ArchiveSession <ArchiveSession>`
:type request_kwargs: dict
:param request_kwargs: (optional) Keyword arguments that
:py:class:`requests.Request` takes.
:rtype: str
:returns: The task log as a string.
"""
request_kwargs = request_kwargs if request_kwargs else dict()
url = '{0}//catalogd.archive.org/log/{1}'.format(session.protocol, task_id)
p = dict(full=1)
r = session.get(url, params=p, **request_kwargs)
r.raise_for_status()
return r.content.decode('utf-8') |
def _attempt_to_extract_ccc(self):
"""Extracts the country calling code from the beginning of
_national_number to _prefix_before_national_number when they are
available, and places the remaining input into _national_number.
Returns True when a valid country calling code can be found.
"""
if len(self._national_number) == 0:
return False
country_code, number_without_ccc = _extract_country_code(self._national_number)
if country_code == 0:
return False
self._national_number = number_without_ccc
new_region_code = region_code_for_country_code(country_code)
if new_region_code == REGION_CODE_FOR_NON_GEO_ENTITY:
self._current_metadata = PhoneMetadata.metadata_for_nongeo_region(country_code)
elif new_region_code != self._default_country:
self._current_metadata = _get_metadata_for_region(new_region_code)
self._prefix_before_national_number += str(country_code)
self._prefix_before_national_number += _SEPARATOR_BEFORE_NATIONAL_NUMBER
# When we have successfully extracted the IDD, the previously
# extracted NDD should be cleared because it is no longer valid.
self._extracted_national_prefix = U_EMPTY_STRING
return True | def function[_attempt_to_extract_ccc, parameter[self]]:
constant[Extracts the country calling code from the beginning of
_national_number to _prefix_before_national_number when they are
available, and places the remaining input into _national_number.
Returns True when a valid country calling code can be found.
]
if compare[call[name[len], parameter[name[self]._national_number]] equal[==] constant[0]] begin[:]
return[constant[False]]
<ast.Tuple object at 0x7da1b18a2c20> assign[=] call[name[_extract_country_code], parameter[name[self]._national_number]]
if compare[name[country_code] equal[==] constant[0]] begin[:]
return[constant[False]]
name[self]._national_number assign[=] name[number_without_ccc]
variable[new_region_code] assign[=] call[name[region_code_for_country_code], parameter[name[country_code]]]
if compare[name[new_region_code] equal[==] name[REGION_CODE_FOR_NON_GEO_ENTITY]] begin[:]
name[self]._current_metadata assign[=] call[name[PhoneMetadata].metadata_for_nongeo_region, parameter[name[country_code]]]
<ast.AugAssign object at 0x7da1b18a1360>
<ast.AugAssign object at 0x7da1b18a2d70>
name[self]._extracted_national_prefix assign[=] name[U_EMPTY_STRING]
return[constant[True]] | keyword[def] identifier[_attempt_to_extract_ccc] ( identifier[self] ):
literal[string]
keyword[if] identifier[len] ( identifier[self] . identifier[_national_number] )== literal[int] :
keyword[return] keyword[False]
identifier[country_code] , identifier[number_without_ccc] = identifier[_extract_country_code] ( identifier[self] . identifier[_national_number] )
keyword[if] identifier[country_code] == literal[int] :
keyword[return] keyword[False]
identifier[self] . identifier[_national_number] = identifier[number_without_ccc]
identifier[new_region_code] = identifier[region_code_for_country_code] ( identifier[country_code] )
keyword[if] identifier[new_region_code] == identifier[REGION_CODE_FOR_NON_GEO_ENTITY] :
identifier[self] . identifier[_current_metadata] = identifier[PhoneMetadata] . identifier[metadata_for_nongeo_region] ( identifier[country_code] )
keyword[elif] identifier[new_region_code] != identifier[self] . identifier[_default_country] :
identifier[self] . identifier[_current_metadata] = identifier[_get_metadata_for_region] ( identifier[new_region_code] )
identifier[self] . identifier[_prefix_before_national_number] += identifier[str] ( identifier[country_code] )
identifier[self] . identifier[_prefix_before_national_number] += identifier[_SEPARATOR_BEFORE_NATIONAL_NUMBER]
identifier[self] . identifier[_extracted_national_prefix] = identifier[U_EMPTY_STRING]
keyword[return] keyword[True] | def _attempt_to_extract_ccc(self):
"""Extracts the country calling code from the beginning of
_national_number to _prefix_before_national_number when they are
available, and places the remaining input into _national_number.
Returns True when a valid country calling code can be found.
"""
if len(self._national_number) == 0:
return False # depends on [control=['if'], data=[]]
(country_code, number_without_ccc) = _extract_country_code(self._national_number)
if country_code == 0:
return False # depends on [control=['if'], data=[]]
self._national_number = number_without_ccc
new_region_code = region_code_for_country_code(country_code)
if new_region_code == REGION_CODE_FOR_NON_GEO_ENTITY:
self._current_metadata = PhoneMetadata.metadata_for_nongeo_region(country_code) # depends on [control=['if'], data=[]]
elif new_region_code != self._default_country:
self._current_metadata = _get_metadata_for_region(new_region_code) # depends on [control=['if'], data=['new_region_code']]
self._prefix_before_national_number += str(country_code)
self._prefix_before_national_number += _SEPARATOR_BEFORE_NATIONAL_NUMBER
# When we have successfully extracted the IDD, the previously
# extracted NDD should be cleared because it is no longer valid.
self._extracted_national_prefix = U_EMPTY_STRING
return True |
def _collect_zipimporter_cache_entries(normalized_path, cache):
"""
Return zipimporter cache entry keys related to a given normalized path.
Alternative path spellings (e.g. those using different character case or
those using alternative path separators) related to the same path are
included. Any sub-path entries are included as well, i.e. those
corresponding to zip archives embedded in other zip archives.
"""
result = []
prefix_len = len(normalized_path)
for p in cache:
np = normalize_path(p)
if (np.startswith(normalized_path) and
np[prefix_len:prefix_len + 1] in (os.sep, '')):
result.append(p)
return result | def function[_collect_zipimporter_cache_entries, parameter[normalized_path, cache]]:
constant[
Return zipimporter cache entry keys related to a given normalized path.
Alternative path spellings (e.g. those using different character case or
those using alternative path separators) related to the same path are
included. Any sub-path entries are included as well, i.e. those
corresponding to zip archives embedded in other zip archives.
]
variable[result] assign[=] list[[]]
variable[prefix_len] assign[=] call[name[len], parameter[name[normalized_path]]]
for taget[name[p]] in starred[name[cache]] begin[:]
variable[np] assign[=] call[name[normalize_path], parameter[name[p]]]
if <ast.BoolOp object at 0x7da1b1a06ec0> begin[:]
call[name[result].append, parameter[name[p]]]
return[name[result]] | keyword[def] identifier[_collect_zipimporter_cache_entries] ( identifier[normalized_path] , identifier[cache] ):
literal[string]
identifier[result] =[]
identifier[prefix_len] = identifier[len] ( identifier[normalized_path] )
keyword[for] identifier[p] keyword[in] identifier[cache] :
identifier[np] = identifier[normalize_path] ( identifier[p] )
keyword[if] ( identifier[np] . identifier[startswith] ( identifier[normalized_path] ) keyword[and]
identifier[np] [ identifier[prefix_len] : identifier[prefix_len] + literal[int] ] keyword[in] ( identifier[os] . identifier[sep] , literal[string] )):
identifier[result] . identifier[append] ( identifier[p] )
keyword[return] identifier[result] | def _collect_zipimporter_cache_entries(normalized_path, cache):
"""
Return zipimporter cache entry keys related to a given normalized path.
Alternative path spellings (e.g. those using different character case or
those using alternative path separators) related to the same path are
included. Any sub-path entries are included as well, i.e. those
corresponding to zip archives embedded in other zip archives.
"""
result = []
prefix_len = len(normalized_path)
for p in cache:
np = normalize_path(p)
if np.startswith(normalized_path) and np[prefix_len:prefix_len + 1] in (os.sep, ''):
result.append(p) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['p']]
return result |
def plot_all(*args, **kwargs):
'''
Read all the trial data and plot the result of applying a function on them.
'''
dfs = do_all(*args, **kwargs)
ps = []
for line in dfs:
f, df, config = line
df.plot(title=config['name'])
ps.append(df)
return ps | def function[plot_all, parameter[]]:
constant[
Read all the trial data and plot the result of applying a function on them.
]
variable[dfs] assign[=] call[name[do_all], parameter[<ast.Starred object at 0x7da1b244e830>]]
variable[ps] assign[=] list[[]]
for taget[name[line]] in starred[name[dfs]] begin[:]
<ast.Tuple object at 0x7da1b247e140> assign[=] name[line]
call[name[df].plot, parameter[]]
call[name[ps].append, parameter[name[df]]]
return[name[ps]] | keyword[def] identifier[plot_all] (* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[dfs] = identifier[do_all] (* identifier[args] ,** identifier[kwargs] )
identifier[ps] =[]
keyword[for] identifier[line] keyword[in] identifier[dfs] :
identifier[f] , identifier[df] , identifier[config] = identifier[line]
identifier[df] . identifier[plot] ( identifier[title] = identifier[config] [ literal[string] ])
identifier[ps] . identifier[append] ( identifier[df] )
keyword[return] identifier[ps] | def plot_all(*args, **kwargs):
"""
Read all the trial data and plot the result of applying a function on them.
"""
dfs = do_all(*args, **kwargs)
ps = []
for line in dfs:
(f, df, config) = line
df.plot(title=config['name'])
ps.append(df) # depends on [control=['for'], data=['line']]
return ps |
def out_64(library, session, space, offset, data, extended=False):
"""Write in an 64-bit value from the specified memory space and offset.
Corresponds to viOut64* functions of the VISA library.
:param library: the visa library wrapped by ctypes.
:param session: Unique logical identifier to a session.
:param space: Specifies the address space. (Constants.*SPACE*)
:param offset: Offset (in bytes) of the address or register from which to read.
:param data: Data to write to bus.
:param extended: Use 64 bits offset independent of the platform.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
if extended:
return library.viOut64Ex(session, space, offset, data)
else:
return library.viOut64(session, space, offset, data) | def function[out_64, parameter[library, session, space, offset, data, extended]]:
constant[Write in an 64-bit value from the specified memory space and offset.
Corresponds to viOut64* functions of the VISA library.
:param library: the visa library wrapped by ctypes.
:param session: Unique logical identifier to a session.
:param space: Specifies the address space. (Constants.*SPACE*)
:param offset: Offset (in bytes) of the address or register from which to read.
:param data: Data to write to bus.
:param extended: Use 64 bits offset independent of the platform.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
]
if name[extended] begin[:]
return[call[name[library].viOut64Ex, parameter[name[session], name[space], name[offset], name[data]]]] | keyword[def] identifier[out_64] ( identifier[library] , identifier[session] , identifier[space] , identifier[offset] , identifier[data] , identifier[extended] = keyword[False] ):
literal[string]
keyword[if] identifier[extended] :
keyword[return] identifier[library] . identifier[viOut64Ex] ( identifier[session] , identifier[space] , identifier[offset] , identifier[data] )
keyword[else] :
keyword[return] identifier[library] . identifier[viOut64] ( identifier[session] , identifier[space] , identifier[offset] , identifier[data] ) | def out_64(library, session, space, offset, data, extended=False):
"""Write in an 64-bit value from the specified memory space and offset.
Corresponds to viOut64* functions of the VISA library.
:param library: the visa library wrapped by ctypes.
:param session: Unique logical identifier to a session.
:param space: Specifies the address space. (Constants.*SPACE*)
:param offset: Offset (in bytes) of the address or register from which to read.
:param data: Data to write to bus.
:param extended: Use 64 bits offset independent of the platform.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
if extended:
return library.viOut64Ex(session, space, offset, data) # depends on [control=['if'], data=[]]
else:
return library.viOut64(session, space, offset, data) |
def compute_wcs(key, challenge):
"""
Compute an WAMP-CRA authentication signature from an authentication
challenge and a (derived) key.
:param key: The key derived (via PBKDF2) from the secret.
:type key: str/bytes
:param challenge: The authentication challenge to sign.
:type challenge: str/bytes
:return: The authentication signature.
:rtype: bytes
"""
key = key.encode('utf8')
challenge = challenge.encode('utf8')
sig = hmac.new(key, challenge, hashlib.sha256).digest()
return binascii.b2a_base64(sig).strip() | def function[compute_wcs, parameter[key, challenge]]:
constant[
Compute an WAMP-CRA authentication signature from an authentication
challenge and a (derived) key.
:param key: The key derived (via PBKDF2) from the secret.
:type key: str/bytes
:param challenge: The authentication challenge to sign.
:type challenge: str/bytes
:return: The authentication signature.
:rtype: bytes
]
variable[key] assign[=] call[name[key].encode, parameter[constant[utf8]]]
variable[challenge] assign[=] call[name[challenge].encode, parameter[constant[utf8]]]
variable[sig] assign[=] call[call[name[hmac].new, parameter[name[key], name[challenge], name[hashlib].sha256]].digest, parameter[]]
return[call[call[name[binascii].b2a_base64, parameter[name[sig]]].strip, parameter[]]] | keyword[def] identifier[compute_wcs] ( identifier[key] , identifier[challenge] ):
literal[string]
identifier[key] = identifier[key] . identifier[encode] ( literal[string] )
identifier[challenge] = identifier[challenge] . identifier[encode] ( literal[string] )
identifier[sig] = identifier[hmac] . identifier[new] ( identifier[key] , identifier[challenge] , identifier[hashlib] . identifier[sha256] ). identifier[digest] ()
keyword[return] identifier[binascii] . identifier[b2a_base64] ( identifier[sig] ). identifier[strip] () | def compute_wcs(key, challenge):
"""
Compute an WAMP-CRA authentication signature from an authentication
challenge and a (derived) key.
:param key: The key derived (via PBKDF2) from the secret.
:type key: str/bytes
:param challenge: The authentication challenge to sign.
:type challenge: str/bytes
:return: The authentication signature.
:rtype: bytes
"""
key = key.encode('utf8')
challenge = challenge.encode('utf8')
sig = hmac.new(key, challenge, hashlib.sha256).digest()
return binascii.b2a_base64(sig).strip() |
def _build_item_closure(itemset, productionset):
"""Build input itemset closure """
#For every item inside current itemset, if we have the following rule:
# xxx <cursor><nonterminalSymbol> xxx append every rule from self._productionruleset that begins with that NonTerminalSymbol
if not isinstance(itemset, LR0ItemSet):
raise TypeError
import copy
resultset = copy.copy(itemset)
changed = True
while changed:
changed = False
for currentitem in resultset.itemlist:
nextsymbol = currentitem.next_symbol()
if nextsymbol is None:
break
for rule in productionset.productions:
newitem = LR0Item(rule)
if rule.leftside[0] == nextsymbol and newitem not in resultset.itemlist:
resultset.append_item(newitem)
changed = True
return resultset | def function[_build_item_closure, parameter[itemset, productionset]]:
constant[Build input itemset closure ]
if <ast.UnaryOp object at 0x7da20e960bb0> begin[:]
<ast.Raise object at 0x7da20e963b80>
import module[copy]
variable[resultset] assign[=] call[name[copy].copy, parameter[name[itemset]]]
variable[changed] assign[=] constant[True]
while name[changed] begin[:]
variable[changed] assign[=] constant[False]
for taget[name[currentitem]] in starred[name[resultset].itemlist] begin[:]
variable[nextsymbol] assign[=] call[name[currentitem].next_symbol, parameter[]]
if compare[name[nextsymbol] is constant[None]] begin[:]
break
for taget[name[rule]] in starred[name[productionset].productions] begin[:]
variable[newitem] assign[=] call[name[LR0Item], parameter[name[rule]]]
if <ast.BoolOp object at 0x7da18f721de0> begin[:]
call[name[resultset].append_item, parameter[name[newitem]]]
variable[changed] assign[=] constant[True]
return[name[resultset]] | keyword[def] identifier[_build_item_closure] ( identifier[itemset] , identifier[productionset] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[itemset] , identifier[LR0ItemSet] ):
keyword[raise] identifier[TypeError]
keyword[import] identifier[copy]
identifier[resultset] = identifier[copy] . identifier[copy] ( identifier[itemset] )
identifier[changed] = keyword[True]
keyword[while] identifier[changed] :
identifier[changed] = keyword[False]
keyword[for] identifier[currentitem] keyword[in] identifier[resultset] . identifier[itemlist] :
identifier[nextsymbol] = identifier[currentitem] . identifier[next_symbol] ()
keyword[if] identifier[nextsymbol] keyword[is] keyword[None] :
keyword[break]
keyword[for] identifier[rule] keyword[in] identifier[productionset] . identifier[productions] :
identifier[newitem] = identifier[LR0Item] ( identifier[rule] )
keyword[if] identifier[rule] . identifier[leftside] [ literal[int] ]== identifier[nextsymbol] keyword[and] identifier[newitem] keyword[not] keyword[in] identifier[resultset] . identifier[itemlist] :
identifier[resultset] . identifier[append_item] ( identifier[newitem] )
identifier[changed] = keyword[True]
keyword[return] identifier[resultset] | def _build_item_closure(itemset, productionset):
"""Build input itemset closure """
#For every item inside current itemset, if we have the following rule:
# xxx <cursor><nonterminalSymbol> xxx append every rule from self._productionruleset that begins with that NonTerminalSymbol
if not isinstance(itemset, LR0ItemSet):
raise TypeError # depends on [control=['if'], data=[]]
import copy
resultset = copy.copy(itemset)
changed = True
while changed:
changed = False
for currentitem in resultset.itemlist:
nextsymbol = currentitem.next_symbol()
if nextsymbol is None:
break # depends on [control=['if'], data=[]]
for rule in productionset.productions:
newitem = LR0Item(rule)
if rule.leftside[0] == nextsymbol and newitem not in resultset.itemlist:
resultset.append_item(newitem)
changed = True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['rule']] # depends on [control=['for'], data=['currentitem']] # depends on [control=['while'], data=[]]
return resultset |
def build_polylines(self, polylines):
""" Process data to construct polylines
This method is built from the assumption that the polylines parameter
is a list of:
list of lists or tuples : a list of path points, each one
indicating the point coordinates --
[lat,lng], [lat, lng], (lat, lng), ...
tuple of lists or tuples : a tuple of path points, each one
indicating the point coordinates -- (lat,lng), [lat, lng],
(lat, lng), ...
dicts: a dictionary with polylines attributes
So, for instance, we have this general scenario as a input parameter:
polyline = {
'stroke_color': '#0AB0DE',
'stroke_opacity': 1.0,
'stroke_weight': 3,
'path': [{'lat': 33.678, 'lng': -116.243},
{'lat': 33.679, 'lng': -116.244},
{'lat': 33.680, 'lng': -116.250},
{'lat': 33.681, 'lng': -116.239},
{'lat': 33.678, 'lng': -116.243}]
}
path1 = [(33.665, -116.235), (33.666, -116.256),
(33.667, -116.250), (33.668, -116.229)]
path2 = ((33.659, -116.243), (33.660, -116.244),
(33.649, -116.250), (33.644, -116.239))
path3 = ([33.688, -116.243], [33.680, -116.244],
[33.682, -116.250], [33.690, -116.239])
path4 = [[33.690, -116.243], [33.691, -116.244],
[33.692, -116.250], [33.693, -116.239]]
polylines = [polyline, path1, path2, path3, path4]
"""
if not polylines:
return
if not isinstance(polylines, (list, tuple)):
raise AttributeError('A list or tuple of polylines is required')
for points in polylines:
if isinstance(points, dict):
self.add_polyline(**points)
elif isinstance(points, (tuple, list)):
path = []
for coords in points:
if len(coords) != 2:
raise AttributeError('A point needs two coordinates')
path.append({'lat': coords[0],
'lng': coords[1]})
polyline_dict = self.build_polyline_dict(path)
self.add_polyline(**polyline_dict) | def function[build_polylines, parameter[self, polylines]]:
constant[ Process data to construct polylines
This method is built from the assumption that the polylines parameter
is a list of:
list of lists or tuples : a list of path points, each one
indicating the point coordinates --
[lat,lng], [lat, lng], (lat, lng), ...
tuple of lists or tuples : a tuple of path points, each one
indicating the point coordinates -- (lat,lng), [lat, lng],
(lat, lng), ...
dicts: a dictionary with polylines attributes
So, for instance, we have this general scenario as a input parameter:
polyline = {
'stroke_color': '#0AB0DE',
'stroke_opacity': 1.0,
'stroke_weight': 3,
'path': [{'lat': 33.678, 'lng': -116.243},
{'lat': 33.679, 'lng': -116.244},
{'lat': 33.680, 'lng': -116.250},
{'lat': 33.681, 'lng': -116.239},
{'lat': 33.678, 'lng': -116.243}]
}
path1 = [(33.665, -116.235), (33.666, -116.256),
(33.667, -116.250), (33.668, -116.229)]
path2 = ((33.659, -116.243), (33.660, -116.244),
(33.649, -116.250), (33.644, -116.239))
path3 = ([33.688, -116.243], [33.680, -116.244],
[33.682, -116.250], [33.690, -116.239])
path4 = [[33.690, -116.243], [33.691, -116.244],
[33.692, -116.250], [33.693, -116.239]]
polylines = [polyline, path1, path2, path3, path4]
]
if <ast.UnaryOp object at 0x7da1b1ebab90> begin[:]
return[None]
if <ast.UnaryOp object at 0x7da1b1ebb490> begin[:]
<ast.Raise object at 0x7da1b1ebb160>
for taget[name[points]] in starred[name[polylines]] begin[:]
if call[name[isinstance], parameter[name[points], name[dict]]] begin[:]
call[name[self].add_polyline, parameter[]] | keyword[def] identifier[build_polylines] ( identifier[self] , identifier[polylines] ):
literal[string]
keyword[if] keyword[not] identifier[polylines] :
keyword[return]
keyword[if] keyword[not] identifier[isinstance] ( identifier[polylines] ,( identifier[list] , identifier[tuple] )):
keyword[raise] identifier[AttributeError] ( literal[string] )
keyword[for] identifier[points] keyword[in] identifier[polylines] :
keyword[if] identifier[isinstance] ( identifier[points] , identifier[dict] ):
identifier[self] . identifier[add_polyline] (** identifier[points] )
keyword[elif] identifier[isinstance] ( identifier[points] ,( identifier[tuple] , identifier[list] )):
identifier[path] =[]
keyword[for] identifier[coords] keyword[in] identifier[points] :
keyword[if] identifier[len] ( identifier[coords] )!= literal[int] :
keyword[raise] identifier[AttributeError] ( literal[string] )
identifier[path] . identifier[append] ({ literal[string] : identifier[coords] [ literal[int] ],
literal[string] : identifier[coords] [ literal[int] ]})
identifier[polyline_dict] = identifier[self] . identifier[build_polyline_dict] ( identifier[path] )
identifier[self] . identifier[add_polyline] (** identifier[polyline_dict] ) | def build_polylines(self, polylines):
""" Process data to construct polylines
This method is built from the assumption that the polylines parameter
is a list of:
list of lists or tuples : a list of path points, each one
indicating the point coordinates --
[lat,lng], [lat, lng], (lat, lng), ...
tuple of lists or tuples : a tuple of path points, each one
indicating the point coordinates -- (lat,lng), [lat, lng],
(lat, lng), ...
dicts: a dictionary with polylines attributes
So, for instance, we have this general scenario as a input parameter:
polyline = {
'stroke_color': '#0AB0DE',
'stroke_opacity': 1.0,
'stroke_weight': 3,
'path': [{'lat': 33.678, 'lng': -116.243},
{'lat': 33.679, 'lng': -116.244},
{'lat': 33.680, 'lng': -116.250},
{'lat': 33.681, 'lng': -116.239},
{'lat': 33.678, 'lng': -116.243}]
}
path1 = [(33.665, -116.235), (33.666, -116.256),
(33.667, -116.250), (33.668, -116.229)]
path2 = ((33.659, -116.243), (33.660, -116.244),
(33.649, -116.250), (33.644, -116.239))
path3 = ([33.688, -116.243], [33.680, -116.244],
[33.682, -116.250], [33.690, -116.239])
path4 = [[33.690, -116.243], [33.691, -116.244],
[33.692, -116.250], [33.693, -116.239]]
polylines = [polyline, path1, path2, path3, path4]
"""
if not polylines:
return # depends on [control=['if'], data=[]]
if not isinstance(polylines, (list, tuple)):
raise AttributeError('A list or tuple of polylines is required') # depends on [control=['if'], data=[]]
for points in polylines:
if isinstance(points, dict):
self.add_polyline(**points) # depends on [control=['if'], data=[]]
elif isinstance(points, (tuple, list)):
path = []
for coords in points:
if len(coords) != 2:
raise AttributeError('A point needs two coordinates') # depends on [control=['if'], data=[]]
path.append({'lat': coords[0], 'lng': coords[1]}) # depends on [control=['for'], data=['coords']]
polyline_dict = self.build_polyline_dict(path)
self.add_polyline(**polyline_dict) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['points']] |
def readObject(self):
"""
Reads an object from the stream.
"""
ref = self.readInteger(False)
if ref & REFERENCE_BIT == 0:
obj = self.context.getObject(ref >> 1)
if obj is None:
raise pyamf.ReferenceError('Unknown reference %d' % (ref >> 1,))
if self.use_proxies is True:
obj = self.readProxy(obj)
return obj
ref >>= 1
class_def = self._getClassDefinition(ref)
alias = class_def.alias
obj = alias.createInstance(codec=self)
obj_attrs = dict()
self.context.addObject(obj)
if class_def.encoding in (ObjectEncoding.EXTERNAL, ObjectEncoding.PROXY):
obj.__readamf__(DataInput(self))
if self.use_proxies is True:
obj = self.readProxy(obj)
return obj
elif class_def.encoding == ObjectEncoding.DYNAMIC:
self._readStatic(class_def, obj_attrs)
self._readDynamic(class_def, obj_attrs)
elif class_def.encoding == ObjectEncoding.STATIC:
self._readStatic(class_def, obj_attrs)
else:
raise pyamf.DecodeError("Unknown object encoding")
alias.applyAttributes(obj, obj_attrs, codec=self)
if self.use_proxies is True:
obj = self.readProxy(obj)
return obj | def function[readObject, parameter[self]]:
constant[
Reads an object from the stream.
]
variable[ref] assign[=] call[name[self].readInteger, parameter[constant[False]]]
if compare[binary_operation[name[ref] <ast.BitAnd object at 0x7da2590d6b60> name[REFERENCE_BIT]] equal[==] constant[0]] begin[:]
variable[obj] assign[=] call[name[self].context.getObject, parameter[binary_operation[name[ref] <ast.RShift object at 0x7da2590d6a40> constant[1]]]]
if compare[name[obj] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b15c7010>
if compare[name[self].use_proxies is constant[True]] begin[:]
variable[obj] assign[=] call[name[self].readProxy, parameter[name[obj]]]
return[name[obj]]
<ast.AugAssign object at 0x7da1b15c6b00>
variable[class_def] assign[=] call[name[self]._getClassDefinition, parameter[name[ref]]]
variable[alias] assign[=] name[class_def].alias
variable[obj] assign[=] call[name[alias].createInstance, parameter[]]
variable[obj_attrs] assign[=] call[name[dict], parameter[]]
call[name[self].context.addObject, parameter[name[obj]]]
if compare[name[class_def].encoding in tuple[[<ast.Attribute object at 0x7da1b15c6d40>, <ast.Attribute object at 0x7da1b15c79a0>]]] begin[:]
call[name[obj].__readamf__, parameter[call[name[DataInput], parameter[name[self]]]]]
if compare[name[self].use_proxies is constant[True]] begin[:]
variable[obj] assign[=] call[name[self].readProxy, parameter[name[obj]]]
return[name[obj]]
call[name[alias].applyAttributes, parameter[name[obj], name[obj_attrs]]]
if compare[name[self].use_proxies is constant[True]] begin[:]
variable[obj] assign[=] call[name[self].readProxy, parameter[name[obj]]]
return[name[obj]] | keyword[def] identifier[readObject] ( identifier[self] ):
literal[string]
identifier[ref] = identifier[self] . identifier[readInteger] ( keyword[False] )
keyword[if] identifier[ref] & identifier[REFERENCE_BIT] == literal[int] :
identifier[obj] = identifier[self] . identifier[context] . identifier[getObject] ( identifier[ref] >> literal[int] )
keyword[if] identifier[obj] keyword[is] keyword[None] :
keyword[raise] identifier[pyamf] . identifier[ReferenceError] ( literal[string] %( identifier[ref] >> literal[int] ,))
keyword[if] identifier[self] . identifier[use_proxies] keyword[is] keyword[True] :
identifier[obj] = identifier[self] . identifier[readProxy] ( identifier[obj] )
keyword[return] identifier[obj]
identifier[ref] >>= literal[int]
identifier[class_def] = identifier[self] . identifier[_getClassDefinition] ( identifier[ref] )
identifier[alias] = identifier[class_def] . identifier[alias]
identifier[obj] = identifier[alias] . identifier[createInstance] ( identifier[codec] = identifier[self] )
identifier[obj_attrs] = identifier[dict] ()
identifier[self] . identifier[context] . identifier[addObject] ( identifier[obj] )
keyword[if] identifier[class_def] . identifier[encoding] keyword[in] ( identifier[ObjectEncoding] . identifier[EXTERNAL] , identifier[ObjectEncoding] . identifier[PROXY] ):
identifier[obj] . identifier[__readamf__] ( identifier[DataInput] ( identifier[self] ))
keyword[if] identifier[self] . identifier[use_proxies] keyword[is] keyword[True] :
identifier[obj] = identifier[self] . identifier[readProxy] ( identifier[obj] )
keyword[return] identifier[obj]
keyword[elif] identifier[class_def] . identifier[encoding] == identifier[ObjectEncoding] . identifier[DYNAMIC] :
identifier[self] . identifier[_readStatic] ( identifier[class_def] , identifier[obj_attrs] )
identifier[self] . identifier[_readDynamic] ( identifier[class_def] , identifier[obj_attrs] )
keyword[elif] identifier[class_def] . identifier[encoding] == identifier[ObjectEncoding] . identifier[STATIC] :
identifier[self] . identifier[_readStatic] ( identifier[class_def] , identifier[obj_attrs] )
keyword[else] :
keyword[raise] identifier[pyamf] . identifier[DecodeError] ( literal[string] )
identifier[alias] . identifier[applyAttributes] ( identifier[obj] , identifier[obj_attrs] , identifier[codec] = identifier[self] )
keyword[if] identifier[self] . identifier[use_proxies] keyword[is] keyword[True] :
identifier[obj] = identifier[self] . identifier[readProxy] ( identifier[obj] )
keyword[return] identifier[obj] | def readObject(self):
"""
Reads an object from the stream.
"""
ref = self.readInteger(False)
if ref & REFERENCE_BIT == 0:
obj = self.context.getObject(ref >> 1)
if obj is None:
raise pyamf.ReferenceError('Unknown reference %d' % (ref >> 1,)) # depends on [control=['if'], data=[]]
if self.use_proxies is True:
obj = self.readProxy(obj) # depends on [control=['if'], data=[]]
return obj # depends on [control=['if'], data=[]]
ref >>= 1
class_def = self._getClassDefinition(ref)
alias = class_def.alias
obj = alias.createInstance(codec=self)
obj_attrs = dict()
self.context.addObject(obj)
if class_def.encoding in (ObjectEncoding.EXTERNAL, ObjectEncoding.PROXY):
obj.__readamf__(DataInput(self))
if self.use_proxies is True:
obj = self.readProxy(obj) # depends on [control=['if'], data=[]]
return obj # depends on [control=['if'], data=[]]
elif class_def.encoding == ObjectEncoding.DYNAMIC:
self._readStatic(class_def, obj_attrs)
self._readDynamic(class_def, obj_attrs) # depends on [control=['if'], data=[]]
elif class_def.encoding == ObjectEncoding.STATIC:
self._readStatic(class_def, obj_attrs) # depends on [control=['if'], data=[]]
else:
raise pyamf.DecodeError('Unknown object encoding')
alias.applyAttributes(obj, obj_attrs, codec=self)
if self.use_proxies is True:
obj = self.readProxy(obj) # depends on [control=['if'], data=[]]
return obj |
def list_qos_policies(self, retrieve_all=True, **_params):
"""Fetches a list of all qos policies for a project."""
# Pass filters in "params" argument to do_request
return self.list('policies', self.qos_policies_path,
retrieve_all, **_params) | def function[list_qos_policies, parameter[self, retrieve_all]]:
constant[Fetches a list of all qos policies for a project.]
return[call[name[self].list, parameter[constant[policies], name[self].qos_policies_path, name[retrieve_all]]]] | keyword[def] identifier[list_qos_policies] ( identifier[self] , identifier[retrieve_all] = keyword[True] ,** identifier[_params] ):
literal[string]
keyword[return] identifier[self] . identifier[list] ( literal[string] , identifier[self] . identifier[qos_policies_path] ,
identifier[retrieve_all] ,** identifier[_params] ) | def list_qos_policies(self, retrieve_all=True, **_params):
"""Fetches a list of all qos policies for a project."""
# Pass filters in "params" argument to do_request
return self.list('policies', self.qos_policies_path, retrieve_all, **_params) |
def element_data_str(z, eldata):
'''Return a string with all data for an element
This includes shell and ECP potential data
Parameters
----------
z : int or str
Element Z-number
eldata: dict
Data for the element to be printed
'''
sym = lut.element_sym_from_Z(z, True)
cs = contraction_string(eldata)
if cs == '':
cs = '(no electron shells)'
s = '\nElement: {} : {}\n'.format(sym, cs)
if 'electron_shells' in eldata:
for shellidx, shell in enumerate(eldata['electron_shells']):
s += electron_shell_str(shell, shellidx) + '\n'
if 'ecp_potentials' in eldata:
s += 'ECP: Element: {} Number of electrons: {}\n'.format(sym, eldata['ecp_electrons'])
for pot in eldata['ecp_potentials']:
s += ecp_pot_str(pot) + '\n'
return s | def function[element_data_str, parameter[z, eldata]]:
constant[Return a string with all data for an element
This includes shell and ECP potential data
Parameters
----------
z : int or str
Element Z-number
eldata: dict
Data for the element to be printed
]
variable[sym] assign[=] call[name[lut].element_sym_from_Z, parameter[name[z], constant[True]]]
variable[cs] assign[=] call[name[contraction_string], parameter[name[eldata]]]
if compare[name[cs] equal[==] constant[]] begin[:]
variable[cs] assign[=] constant[(no electron shells)]
variable[s] assign[=] call[constant[
Element: {} : {}
].format, parameter[name[sym], name[cs]]]
if compare[constant[electron_shells] in name[eldata]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da20c9920b0>, <ast.Name object at 0x7da20c9923b0>]]] in starred[call[name[enumerate], parameter[call[name[eldata]][constant[electron_shells]]]]] begin[:]
<ast.AugAssign object at 0x7da20c9937f0>
if compare[constant[ecp_potentials] in name[eldata]] begin[:]
<ast.AugAssign object at 0x7da20c991000>
for taget[name[pot]] in starred[call[name[eldata]][constant[ecp_potentials]]] begin[:]
<ast.AugAssign object at 0x7da20c9929e0>
return[name[s]] | keyword[def] identifier[element_data_str] ( identifier[z] , identifier[eldata] ):
literal[string]
identifier[sym] = identifier[lut] . identifier[element_sym_from_Z] ( identifier[z] , keyword[True] )
identifier[cs] = identifier[contraction_string] ( identifier[eldata] )
keyword[if] identifier[cs] == literal[string] :
identifier[cs] = literal[string]
identifier[s] = literal[string] . identifier[format] ( identifier[sym] , identifier[cs] )
keyword[if] literal[string] keyword[in] identifier[eldata] :
keyword[for] identifier[shellidx] , identifier[shell] keyword[in] identifier[enumerate] ( identifier[eldata] [ literal[string] ]):
identifier[s] += identifier[electron_shell_str] ( identifier[shell] , identifier[shellidx] )+ literal[string]
keyword[if] literal[string] keyword[in] identifier[eldata] :
identifier[s] += literal[string] . identifier[format] ( identifier[sym] , identifier[eldata] [ literal[string] ])
keyword[for] identifier[pot] keyword[in] identifier[eldata] [ literal[string] ]:
identifier[s] += identifier[ecp_pot_str] ( identifier[pot] )+ literal[string]
keyword[return] identifier[s] | def element_data_str(z, eldata):
"""Return a string with all data for an element
This includes shell and ECP potential data
Parameters
----------
z : int or str
Element Z-number
eldata: dict
Data for the element to be printed
"""
sym = lut.element_sym_from_Z(z, True)
cs = contraction_string(eldata)
if cs == '':
cs = '(no electron shells)' # depends on [control=['if'], data=['cs']]
s = '\nElement: {} : {}\n'.format(sym, cs)
if 'electron_shells' in eldata:
for (shellidx, shell) in enumerate(eldata['electron_shells']):
s += electron_shell_str(shell, shellidx) + '\n' # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['eldata']]
if 'ecp_potentials' in eldata:
s += 'ECP: Element: {} Number of electrons: {}\n'.format(sym, eldata['ecp_electrons'])
for pot in eldata['ecp_potentials']:
s += ecp_pot_str(pot) + '\n' # depends on [control=['for'], data=['pot']] # depends on [control=['if'], data=['eldata']]
return s |
def _find_address_and_connect(self, addresses):
"""Find and connect to the appropriate address.
:param addresses:
:raises AMQPConnectionError: Raises if the connection
encountered an error.
:rtype: socket.socket
"""
error_message = None
for address in addresses:
sock = self._create_socket(socket_family=address[0])
try:
sock.connect(address[4])
except (IOError, OSError) as why:
error_message = why.strerror
continue
return sock
raise AMQPConnectionError(
'Could not connect to %s:%d error: %s' % (
self._parameters['hostname'], self._parameters['port'],
error_message
)
) | def function[_find_address_and_connect, parameter[self, addresses]]:
constant[Find and connect to the appropriate address.
:param addresses:
:raises AMQPConnectionError: Raises if the connection
encountered an error.
:rtype: socket.socket
]
variable[error_message] assign[=] constant[None]
for taget[name[address]] in starred[name[addresses]] begin[:]
variable[sock] assign[=] call[name[self]._create_socket, parameter[]]
<ast.Try object at 0x7da1b2347f10>
return[name[sock]]
<ast.Raise object at 0x7da1b2346a10> | keyword[def] identifier[_find_address_and_connect] ( identifier[self] , identifier[addresses] ):
literal[string]
identifier[error_message] = keyword[None]
keyword[for] identifier[address] keyword[in] identifier[addresses] :
identifier[sock] = identifier[self] . identifier[_create_socket] ( identifier[socket_family] = identifier[address] [ literal[int] ])
keyword[try] :
identifier[sock] . identifier[connect] ( identifier[address] [ literal[int] ])
keyword[except] ( identifier[IOError] , identifier[OSError] ) keyword[as] identifier[why] :
identifier[error_message] = identifier[why] . identifier[strerror]
keyword[continue]
keyword[return] identifier[sock]
keyword[raise] identifier[AMQPConnectionError] (
literal[string] %(
identifier[self] . identifier[_parameters] [ literal[string] ], identifier[self] . identifier[_parameters] [ literal[string] ],
identifier[error_message]
)
) | def _find_address_and_connect(self, addresses):
"""Find and connect to the appropriate address.
:param addresses:
:raises AMQPConnectionError: Raises if the connection
encountered an error.
:rtype: socket.socket
"""
error_message = None
for address in addresses:
sock = self._create_socket(socket_family=address[0])
try:
sock.connect(address[4]) # depends on [control=['try'], data=[]]
except (IOError, OSError) as why:
error_message = why.strerror
continue # depends on [control=['except'], data=['why']]
return sock # depends on [control=['for'], data=['address']]
raise AMQPConnectionError('Could not connect to %s:%d error: %s' % (self._parameters['hostname'], self._parameters['port'], error_message)) |
def from_dict(cls, fields, mapping):
"""
Create a Record from a dictionary of field mappings.
The *fields* object is used to determine the column indices
of fields in the mapping.
Args:
fields: the Relation schema for the table of this record
mapping: a dictionary or other mapping from field names to
column values
Returns:
a :class:`Record` object
"""
iterable = [None] * len(fields)
for key, value in mapping.items():
try:
index = fields.index(key)
except KeyError:
raise ItsdbError('Invalid field name(s): ' + key)
iterable[index] = value
return cls(fields, iterable) | def function[from_dict, parameter[cls, fields, mapping]]:
constant[
Create a Record from a dictionary of field mappings.
The *fields* object is used to determine the column indices
of fields in the mapping.
Args:
fields: the Relation schema for the table of this record
mapping: a dictionary or other mapping from field names to
column values
Returns:
a :class:`Record` object
]
variable[iterable] assign[=] binary_operation[list[[<ast.Constant object at 0x7da18f58cca0>]] * call[name[len], parameter[name[fields]]]]
for taget[tuple[[<ast.Name object at 0x7da18f58f850>, <ast.Name object at 0x7da18f58fac0>]]] in starred[call[name[mapping].items, parameter[]]] begin[:]
<ast.Try object at 0x7da18f58df30>
call[name[iterable]][name[index]] assign[=] name[value]
return[call[name[cls], parameter[name[fields], name[iterable]]]] | keyword[def] identifier[from_dict] ( identifier[cls] , identifier[fields] , identifier[mapping] ):
literal[string]
identifier[iterable] =[ keyword[None] ]* identifier[len] ( identifier[fields] )
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[mapping] . identifier[items] ():
keyword[try] :
identifier[index] = identifier[fields] . identifier[index] ( identifier[key] )
keyword[except] identifier[KeyError] :
keyword[raise] identifier[ItsdbError] ( literal[string] + identifier[key] )
identifier[iterable] [ identifier[index] ]= identifier[value]
keyword[return] identifier[cls] ( identifier[fields] , identifier[iterable] ) | def from_dict(cls, fields, mapping):
"""
Create a Record from a dictionary of field mappings.
The *fields* object is used to determine the column indices
of fields in the mapping.
Args:
fields: the Relation schema for the table of this record
mapping: a dictionary or other mapping from field names to
column values
Returns:
a :class:`Record` object
"""
iterable = [None] * len(fields)
for (key, value) in mapping.items():
try:
index = fields.index(key) # depends on [control=['try'], data=[]]
except KeyError:
raise ItsdbError('Invalid field name(s): ' + key) # depends on [control=['except'], data=[]]
iterable[index] = value # depends on [control=['for'], data=[]]
return cls(fields, iterable) |
def get(self, timeout=None):
"""Return the next available item from the tube.
Blocks if tube is empty, until a producer for the tube puts an item on it."""
if timeout:
# Todo: Consider locking the poll/recv block.
# Otherwise, this method is not thread safe.
if self._conn1.poll(timeout):
return (True, self._conn1.recv())
else:
return (False, None)
return self._conn1.recv() | def function[get, parameter[self, timeout]]:
constant[Return the next available item from the tube.
Blocks if tube is empty, until a producer for the tube puts an item on it.]
if name[timeout] begin[:]
if call[name[self]._conn1.poll, parameter[name[timeout]]] begin[:]
return[tuple[[<ast.Constant object at 0x7da18f58f460>, <ast.Call object at 0x7da18f58d9c0>]]]
return[call[name[self]._conn1.recv, parameter[]]] | keyword[def] identifier[get] ( identifier[self] , identifier[timeout] = keyword[None] ):
literal[string]
keyword[if] identifier[timeout] :
keyword[if] identifier[self] . identifier[_conn1] . identifier[poll] ( identifier[timeout] ):
keyword[return] ( keyword[True] , identifier[self] . identifier[_conn1] . identifier[recv] ())
keyword[else] :
keyword[return] ( keyword[False] , keyword[None] )
keyword[return] identifier[self] . identifier[_conn1] . identifier[recv] () | def get(self, timeout=None):
"""Return the next available item from the tube.
Blocks if tube is empty, until a producer for the tube puts an item on it."""
if timeout:
# Todo: Consider locking the poll/recv block.
# Otherwise, this method is not thread safe.
if self._conn1.poll(timeout):
return (True, self._conn1.recv()) # depends on [control=['if'], data=[]]
else:
return (False, None) # depends on [control=['if'], data=[]]
return self._conn1.recv() |
def validate_member_type(self, value):
"""Validate each member of the list, if member_type exists"""
if self.member_type:
for item in value:
self.member_type.validate(item) | def function[validate_member_type, parameter[self, value]]:
constant[Validate each member of the list, if member_type exists]
if name[self].member_type begin[:]
for taget[name[item]] in starred[name[value]] begin[:]
call[name[self].member_type.validate, parameter[name[item]]] | keyword[def] identifier[validate_member_type] ( identifier[self] , identifier[value] ):
literal[string]
keyword[if] identifier[self] . identifier[member_type] :
keyword[for] identifier[item] keyword[in] identifier[value] :
identifier[self] . identifier[member_type] . identifier[validate] ( identifier[item] ) | def validate_member_type(self, value):
"""Validate each member of the list, if member_type exists"""
if self.member_type:
for item in value:
self.member_type.validate(item) # depends on [control=['for'], data=['item']] # depends on [control=['if'], data=[]] |
def verify_counter(self, signature, counter):
""" Verifies that counter value is greater than previous signature"""
devices = self.__get_u2f_devices()
for device in devices:
# Searching for specific keyhandle
if device['keyHandle'] == signature['keyHandle']:
if counter > device['counter']:
# Updating counter record
device['counter'] = counter
self.__save_u2f_devices(devices)
return True
else:
return False | def function[verify_counter, parameter[self, signature, counter]]:
constant[ Verifies that counter value is greater than previous signature]
variable[devices] assign[=] call[name[self].__get_u2f_devices, parameter[]]
for taget[name[device]] in starred[name[devices]] begin[:]
if compare[call[name[device]][constant[keyHandle]] equal[==] call[name[signature]][constant[keyHandle]]] begin[:]
if compare[name[counter] greater[>] call[name[device]][constant[counter]]] begin[:]
call[name[device]][constant[counter]] assign[=] name[counter]
call[name[self].__save_u2f_devices, parameter[name[devices]]]
return[constant[True]] | keyword[def] identifier[verify_counter] ( identifier[self] , identifier[signature] , identifier[counter] ):
literal[string]
identifier[devices] = identifier[self] . identifier[__get_u2f_devices] ()
keyword[for] identifier[device] keyword[in] identifier[devices] :
keyword[if] identifier[device] [ literal[string] ]== identifier[signature] [ literal[string] ]:
keyword[if] identifier[counter] > identifier[device] [ literal[string] ]:
identifier[device] [ literal[string] ]= identifier[counter]
identifier[self] . identifier[__save_u2f_devices] ( identifier[devices] )
keyword[return] keyword[True]
keyword[else] :
keyword[return] keyword[False] | def verify_counter(self, signature, counter):
""" Verifies that counter value is greater than previous signature"""
devices = self.__get_u2f_devices()
for device in devices:
# Searching for specific keyhandle
if device['keyHandle'] == signature['keyHandle']:
if counter > device['counter']:
# Updating counter record
device['counter'] = counter
self.__save_u2f_devices(devices)
return True # depends on [control=['if'], data=['counter']]
else:
return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['device']] |
def create(self, segment):
"""A best-effort attempt to create directories.
Warnings are issued to the user if those directories could not
created or if they don't exist.
The caller should only call this function if the user
requested prefetching (i.e. concurrency) to avoid spurious
warnings.
"""
def lackadaisical_mkdir(place):
ok = False
place = path.realpath(place)
try:
os.makedirs(place, 0o700)
ok = True
except EnvironmentError as e:
if e.errno == errno.EEXIST:
# Has already been created: this is the most
# common situation, and is fine.
ok = True
else:
logger.warning(
msg='could not create prefetch directory',
detail=('Prefetch directory creation target: {0}, {1}'
.format(place, e.strerror)))
return ok
ok = True
for d in [self.prefetched_dir, self.running]:
ok &= lackadaisical_mkdir(d)
lackadaisical_mkdir(self.seg_dir(segment)) | def function[create, parameter[self, segment]]:
constant[A best-effort attempt to create directories.
Warnings are issued to the user if those directories could not
created or if they don't exist.
The caller should only call this function if the user
requested prefetching (i.e. concurrency) to avoid spurious
warnings.
]
def function[lackadaisical_mkdir, parameter[place]]:
variable[ok] assign[=] constant[False]
variable[place] assign[=] call[name[path].realpath, parameter[name[place]]]
<ast.Try object at 0x7da20c6a9450>
return[name[ok]]
variable[ok] assign[=] constant[True]
for taget[name[d]] in starred[list[[<ast.Attribute object at 0x7da20c6a9120>, <ast.Attribute object at 0x7da20c6a9180>]]] begin[:]
<ast.AugAssign object at 0x7da20c6aa6e0>
call[name[lackadaisical_mkdir], parameter[call[name[self].seg_dir, parameter[name[segment]]]]] | keyword[def] identifier[create] ( identifier[self] , identifier[segment] ):
literal[string]
keyword[def] identifier[lackadaisical_mkdir] ( identifier[place] ):
identifier[ok] = keyword[False]
identifier[place] = identifier[path] . identifier[realpath] ( identifier[place] )
keyword[try] :
identifier[os] . identifier[makedirs] ( identifier[place] , literal[int] )
identifier[ok] = keyword[True]
keyword[except] identifier[EnvironmentError] keyword[as] identifier[e] :
keyword[if] identifier[e] . identifier[errno] == identifier[errno] . identifier[EEXIST] :
identifier[ok] = keyword[True]
keyword[else] :
identifier[logger] . identifier[warning] (
identifier[msg] = literal[string] ,
identifier[detail] =( literal[string]
. identifier[format] ( identifier[place] , identifier[e] . identifier[strerror] )))
keyword[return] identifier[ok]
identifier[ok] = keyword[True]
keyword[for] identifier[d] keyword[in] [ identifier[self] . identifier[prefetched_dir] , identifier[self] . identifier[running] ]:
identifier[ok] &= identifier[lackadaisical_mkdir] ( identifier[d] )
identifier[lackadaisical_mkdir] ( identifier[self] . identifier[seg_dir] ( identifier[segment] )) | def create(self, segment):
"""A best-effort attempt to create directories.
Warnings are issued to the user if those directories could not
created or if they don't exist.
The caller should only call this function if the user
requested prefetching (i.e. concurrency) to avoid spurious
warnings.
"""
def lackadaisical_mkdir(place):
ok = False
place = path.realpath(place)
try:
os.makedirs(place, 448)
ok = True # depends on [control=['try'], data=[]]
except EnvironmentError as e:
if e.errno == errno.EEXIST:
# Has already been created: this is the most
# common situation, and is fine.
ok = True # depends on [control=['if'], data=[]]
else:
logger.warning(msg='could not create prefetch directory', detail='Prefetch directory creation target: {0}, {1}'.format(place, e.strerror)) # depends on [control=['except'], data=['e']]
return ok
ok = True
for d in [self.prefetched_dir, self.running]:
ok &= lackadaisical_mkdir(d) # depends on [control=['for'], data=['d']]
lackadaisical_mkdir(self.seg_dir(segment)) |
def create_user(config_data):
"""
Create admin user without user input
:param config_data: configuration data
"""
with chdir(os.path.abspath(config_data.project_directory)):
env = deepcopy(dict(os.environ))
env[str('DJANGO_SETTINGS_MODULE')] = str('{0}.settings'.format(config_data.project_name))
env[str('PYTHONPATH')] = str(os.pathsep.join(map(shlex_quote, sys.path)))
subprocess.check_call(
[sys.executable, 'create_user.py'], env=env, stderr=subprocess.STDOUT
)
for ext in ['py', 'pyc']:
try:
os.remove('create_user.{0}'.format(ext))
except OSError:
pass | def function[create_user, parameter[config_data]]:
constant[
Create admin user without user input
:param config_data: configuration data
]
with call[name[chdir], parameter[call[name[os].path.abspath, parameter[name[config_data].project_directory]]]] begin[:]
variable[env] assign[=] call[name[deepcopy], parameter[call[name[dict], parameter[name[os].environ]]]]
call[name[env]][call[name[str], parameter[constant[DJANGO_SETTINGS_MODULE]]]] assign[=] call[name[str], parameter[call[constant[{0}.settings].format, parameter[name[config_data].project_name]]]]
call[name[env]][call[name[str], parameter[constant[PYTHONPATH]]]] assign[=] call[name[str], parameter[call[name[os].pathsep.join, parameter[call[name[map], parameter[name[shlex_quote], name[sys].path]]]]]]
call[name[subprocess].check_call, parameter[list[[<ast.Attribute object at 0x7da18f09f4f0>, <ast.Constant object at 0x7da18f09c070>]]]]
for taget[name[ext]] in starred[list[[<ast.Constant object at 0x7da18f09c790>, <ast.Constant object at 0x7da18f09e0b0>]]] begin[:]
<ast.Try object at 0x7da18f09c610> | keyword[def] identifier[create_user] ( identifier[config_data] ):
literal[string]
keyword[with] identifier[chdir] ( identifier[os] . identifier[path] . identifier[abspath] ( identifier[config_data] . identifier[project_directory] )):
identifier[env] = identifier[deepcopy] ( identifier[dict] ( identifier[os] . identifier[environ] ))
identifier[env] [ identifier[str] ( literal[string] )]= identifier[str] ( literal[string] . identifier[format] ( identifier[config_data] . identifier[project_name] ))
identifier[env] [ identifier[str] ( literal[string] )]= identifier[str] ( identifier[os] . identifier[pathsep] . identifier[join] ( identifier[map] ( identifier[shlex_quote] , identifier[sys] . identifier[path] )))
identifier[subprocess] . identifier[check_call] (
[ identifier[sys] . identifier[executable] , literal[string] ], identifier[env] = identifier[env] , identifier[stderr] = identifier[subprocess] . identifier[STDOUT]
)
keyword[for] identifier[ext] keyword[in] [ literal[string] , literal[string] ]:
keyword[try] :
identifier[os] . identifier[remove] ( literal[string] . identifier[format] ( identifier[ext] ))
keyword[except] identifier[OSError] :
keyword[pass] | def create_user(config_data):
"""
Create admin user without user input
:param config_data: configuration data
"""
with chdir(os.path.abspath(config_data.project_directory)):
env = deepcopy(dict(os.environ))
env[str('DJANGO_SETTINGS_MODULE')] = str('{0}.settings'.format(config_data.project_name))
env[str('PYTHONPATH')] = str(os.pathsep.join(map(shlex_quote, sys.path)))
subprocess.check_call([sys.executable, 'create_user.py'], env=env, stderr=subprocess.STDOUT)
for ext in ['py', 'pyc']:
try:
os.remove('create_user.{0}'.format(ext)) # depends on [control=['try'], data=[]]
except OSError:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['ext']] # depends on [control=['with'], data=[]] |
def _proxy_settings(self):
"""
Create/replace ~/.datacats/run/proxy-environment and return
entry for ro mount for containers
"""
if not ('https_proxy' in environ or 'HTTPS_PROXY' in environ
or 'http_proxy' in environ or 'HTTP_PROXY' in environ):
return {}
https_proxy = environ.get('https_proxy')
if https_proxy is None:
https_proxy = environ.get('HTTPS_PROXY')
http_proxy = environ.get('http_proxy')
if http_proxy is None:
http_proxy = environ.get('HTTP_PROXY')
no_proxy = environ.get('no_proxy')
if no_proxy is None:
no_proxy = environ.get('NO_PROXY', '')
no_proxy = no_proxy + ',solr,db'
out = [
'PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:'
'/bin:/usr/games:/usr/local/games"\n']
if https_proxy is not None:
out.append('https_proxy=' + posix_quote(https_proxy) + '\n')
out.append('HTTPS_PROXY=' + posix_quote(https_proxy) + '\n')
if http_proxy is not None:
out.append('http_proxy=' + posix_quote(http_proxy) + '\n')
out.append('HTTP_PROXY=' + posix_quote(http_proxy) + '\n')
if no_proxy is not None:
out.append('no_proxy=' + posix_quote(no_proxy) + '\n')
out.append('NO_PROXY=' + posix_quote(no_proxy) + '\n')
with open(self.sitedir + '/run/proxy-environment', 'w') as f:
f.write("".join(out))
return {self.sitedir + '/run/proxy-environment': '/etc/environment'} | def function[_proxy_settings, parameter[self]]:
constant[
Create/replace ~/.datacats/run/proxy-environment and return
entry for ro mount for containers
]
if <ast.UnaryOp object at 0x7da18ede62f0> begin[:]
return[dictionary[[], []]]
variable[https_proxy] assign[=] call[name[environ].get, parameter[constant[https_proxy]]]
if compare[name[https_proxy] is constant[None]] begin[:]
variable[https_proxy] assign[=] call[name[environ].get, parameter[constant[HTTPS_PROXY]]]
variable[http_proxy] assign[=] call[name[environ].get, parameter[constant[http_proxy]]]
if compare[name[http_proxy] is constant[None]] begin[:]
variable[http_proxy] assign[=] call[name[environ].get, parameter[constant[HTTP_PROXY]]]
variable[no_proxy] assign[=] call[name[environ].get, parameter[constant[no_proxy]]]
if compare[name[no_proxy] is constant[None]] begin[:]
variable[no_proxy] assign[=] call[name[environ].get, parameter[constant[NO_PROXY], constant[]]]
variable[no_proxy] assign[=] binary_operation[name[no_proxy] + constant[,solr,db]]
variable[out] assign[=] list[[<ast.Constant object at 0x7da18ede6380>]]
if compare[name[https_proxy] is_not constant[None]] begin[:]
call[name[out].append, parameter[binary_operation[binary_operation[constant[https_proxy=] + call[name[posix_quote], parameter[name[https_proxy]]]] + constant[
]]]]
call[name[out].append, parameter[binary_operation[binary_operation[constant[HTTPS_PROXY=] + call[name[posix_quote], parameter[name[https_proxy]]]] + constant[
]]]]
if compare[name[http_proxy] is_not constant[None]] begin[:]
call[name[out].append, parameter[binary_operation[binary_operation[constant[http_proxy=] + call[name[posix_quote], parameter[name[http_proxy]]]] + constant[
]]]]
call[name[out].append, parameter[binary_operation[binary_operation[constant[HTTP_PROXY=] + call[name[posix_quote], parameter[name[http_proxy]]]] + constant[
]]]]
if compare[name[no_proxy] is_not constant[None]] begin[:]
call[name[out].append, parameter[binary_operation[binary_operation[constant[no_proxy=] + call[name[posix_quote], parameter[name[no_proxy]]]] + constant[
]]]]
call[name[out].append, parameter[binary_operation[binary_operation[constant[NO_PROXY=] + call[name[posix_quote], parameter[name[no_proxy]]]] + constant[
]]]]
with call[name[open], parameter[binary_operation[name[self].sitedir + constant[/run/proxy-environment]], constant[w]]] begin[:]
call[name[f].write, parameter[call[constant[].join, parameter[name[out]]]]]
return[dictionary[[<ast.BinOp object at 0x7da20c6c5870>], [<ast.Constant object at 0x7da20c6c6f20>]]] | keyword[def] identifier[_proxy_settings] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] ( literal[string] keyword[in] identifier[environ] keyword[or] literal[string] keyword[in] identifier[environ]
keyword[or] literal[string] keyword[in] identifier[environ] keyword[or] literal[string] keyword[in] identifier[environ] ):
keyword[return] {}
identifier[https_proxy] = identifier[environ] . identifier[get] ( literal[string] )
keyword[if] identifier[https_proxy] keyword[is] keyword[None] :
identifier[https_proxy] = identifier[environ] . identifier[get] ( literal[string] )
identifier[http_proxy] = identifier[environ] . identifier[get] ( literal[string] )
keyword[if] identifier[http_proxy] keyword[is] keyword[None] :
identifier[http_proxy] = identifier[environ] . identifier[get] ( literal[string] )
identifier[no_proxy] = identifier[environ] . identifier[get] ( literal[string] )
keyword[if] identifier[no_proxy] keyword[is] keyword[None] :
identifier[no_proxy] = identifier[environ] . identifier[get] ( literal[string] , literal[string] )
identifier[no_proxy] = identifier[no_proxy] + literal[string]
identifier[out] =[
literal[string]
literal[string] ]
keyword[if] identifier[https_proxy] keyword[is] keyword[not] keyword[None] :
identifier[out] . identifier[append] ( literal[string] + identifier[posix_quote] ( identifier[https_proxy] )+ literal[string] )
identifier[out] . identifier[append] ( literal[string] + identifier[posix_quote] ( identifier[https_proxy] )+ literal[string] )
keyword[if] identifier[http_proxy] keyword[is] keyword[not] keyword[None] :
identifier[out] . identifier[append] ( literal[string] + identifier[posix_quote] ( identifier[http_proxy] )+ literal[string] )
identifier[out] . identifier[append] ( literal[string] + identifier[posix_quote] ( identifier[http_proxy] )+ literal[string] )
keyword[if] identifier[no_proxy] keyword[is] keyword[not] keyword[None] :
identifier[out] . identifier[append] ( literal[string] + identifier[posix_quote] ( identifier[no_proxy] )+ literal[string] )
identifier[out] . identifier[append] ( literal[string] + identifier[posix_quote] ( identifier[no_proxy] )+ literal[string] )
keyword[with] identifier[open] ( identifier[self] . identifier[sitedir] + literal[string] , literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( literal[string] . identifier[join] ( identifier[out] ))
keyword[return] { identifier[self] . identifier[sitedir] + literal[string] : literal[string] } | def _proxy_settings(self):
"""
Create/replace ~/.datacats/run/proxy-environment and return
entry for ro mount for containers
"""
if not ('https_proxy' in environ or 'HTTPS_PROXY' in environ or 'http_proxy' in environ or ('HTTP_PROXY' in environ)):
return {} # depends on [control=['if'], data=[]]
https_proxy = environ.get('https_proxy')
if https_proxy is None:
https_proxy = environ.get('HTTPS_PROXY') # depends on [control=['if'], data=['https_proxy']]
http_proxy = environ.get('http_proxy')
if http_proxy is None:
http_proxy = environ.get('HTTP_PROXY') # depends on [control=['if'], data=['http_proxy']]
no_proxy = environ.get('no_proxy')
if no_proxy is None:
no_proxy = environ.get('NO_PROXY', '') # depends on [control=['if'], data=['no_proxy']]
no_proxy = no_proxy + ',solr,db'
out = ['PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games"\n']
if https_proxy is not None:
out.append('https_proxy=' + posix_quote(https_proxy) + '\n')
out.append('HTTPS_PROXY=' + posix_quote(https_proxy) + '\n') # depends on [control=['if'], data=['https_proxy']]
if http_proxy is not None:
out.append('http_proxy=' + posix_quote(http_proxy) + '\n')
out.append('HTTP_PROXY=' + posix_quote(http_proxy) + '\n') # depends on [control=['if'], data=['http_proxy']]
if no_proxy is not None:
out.append('no_proxy=' + posix_quote(no_proxy) + '\n')
out.append('NO_PROXY=' + posix_quote(no_proxy) + '\n') # depends on [control=['if'], data=['no_proxy']]
with open(self.sitedir + '/run/proxy-environment', 'w') as f:
f.write(''.join(out)) # depends on [control=['with'], data=['f']]
return {self.sitedir + '/run/proxy-environment': '/etc/environment'} |
def write(self, buf, **kwargs):
"""
Write the bytes from ``buffer`` to the device. Transmits a stop bit if
``stop`` is set.
If ``start`` or ``end`` is provided, then the buffer will be sliced
as if ``buffer[start:end]``. This will not cause an allocation like
``buffer[start:end]`` will so it saves memory.
:param bytearray buffer: buffer containing the bytes to write
:param int start: Index to start writing from
:param int end: Index to read up to but not include
:param bool stop: If true, output an I2C stop condition after the buffer is written
"""
self.i2c.writeto(self.device_address, buf, **kwargs)
if self._debug:
print("i2c_device.write:", [hex(i) for i in buf]) | def function[write, parameter[self, buf]]:
constant[
Write the bytes from ``buffer`` to the device. Transmits a stop bit if
``stop`` is set.
If ``start`` or ``end`` is provided, then the buffer will be sliced
as if ``buffer[start:end]``. This will not cause an allocation like
``buffer[start:end]`` will so it saves memory.
:param bytearray buffer: buffer containing the bytes to write
:param int start: Index to start writing from
:param int end: Index to read up to but not include
:param bool stop: If true, output an I2C stop condition after the buffer is written
]
call[name[self].i2c.writeto, parameter[name[self].device_address, name[buf]]]
if name[self]._debug begin[:]
call[name[print], parameter[constant[i2c_device.write:], <ast.ListComp object at 0x7da1b014cd00>]] | keyword[def] identifier[write] ( identifier[self] , identifier[buf] ,** identifier[kwargs] ):
literal[string]
identifier[self] . identifier[i2c] . identifier[writeto] ( identifier[self] . identifier[device_address] , identifier[buf] ,** identifier[kwargs] )
keyword[if] identifier[self] . identifier[_debug] :
identifier[print] ( literal[string] ,[ identifier[hex] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[buf] ]) | def write(self, buf, **kwargs):
"""
Write the bytes from ``buffer`` to the device. Transmits a stop bit if
``stop`` is set.
If ``start`` or ``end`` is provided, then the buffer will be sliced
as if ``buffer[start:end]``. This will not cause an allocation like
``buffer[start:end]`` will so it saves memory.
:param bytearray buffer: buffer containing the bytes to write
:param int start: Index to start writing from
:param int end: Index to read up to but not include
:param bool stop: If true, output an I2C stop condition after the buffer is written
"""
self.i2c.writeto(self.device_address, buf, **kwargs)
if self._debug:
print('i2c_device.write:', [hex(i) for i in buf]) # depends on [control=['if'], data=[]] |
def restart(self, timeout=300, config_callback=None):
"""restart server: stop() and start()
return status of start command
"""
self.stop()
if config_callback:
self.cfg = config_callback(self.cfg.copy())
self.config_path = process.write_config(self.cfg)
return self.start(timeout) | def function[restart, parameter[self, timeout, config_callback]]:
constant[restart server: stop() and start()
return status of start command
]
call[name[self].stop, parameter[]]
if name[config_callback] begin[:]
name[self].cfg assign[=] call[name[config_callback], parameter[call[name[self].cfg.copy, parameter[]]]]
name[self].config_path assign[=] call[name[process].write_config, parameter[name[self].cfg]]
return[call[name[self].start, parameter[name[timeout]]]] | keyword[def] identifier[restart] ( identifier[self] , identifier[timeout] = literal[int] , identifier[config_callback] = keyword[None] ):
literal[string]
identifier[self] . identifier[stop] ()
keyword[if] identifier[config_callback] :
identifier[self] . identifier[cfg] = identifier[config_callback] ( identifier[self] . identifier[cfg] . identifier[copy] ())
identifier[self] . identifier[config_path] = identifier[process] . identifier[write_config] ( identifier[self] . identifier[cfg] )
keyword[return] identifier[self] . identifier[start] ( identifier[timeout] ) | def restart(self, timeout=300, config_callback=None):
"""restart server: stop() and start()
return status of start command
"""
self.stop()
if config_callback:
self.cfg = config_callback(self.cfg.copy()) # depends on [control=['if'], data=[]]
self.config_path = process.write_config(self.cfg)
return self.start(timeout) |
def findB(self, tag_name, params=None, fn=None, case_sensitive=False):
"""
Same as :meth:`findAllB`, but without `endtags`.
You can always get them from :attr:`endtag` property.
"""
return [
x for x in self.findAllB(tag_name, params, fn, case_sensitive)
if not x.isEndTag()
] | def function[findB, parameter[self, tag_name, params, fn, case_sensitive]]:
constant[
Same as :meth:`findAllB`, but without `endtags`.
You can always get them from :attr:`endtag` property.
]
return[<ast.ListComp object at 0x7da1b16aa560>] | keyword[def] identifier[findB] ( identifier[self] , identifier[tag_name] , identifier[params] = keyword[None] , identifier[fn] = keyword[None] , identifier[case_sensitive] = keyword[False] ):
literal[string]
keyword[return] [
identifier[x] keyword[for] identifier[x] keyword[in] identifier[self] . identifier[findAllB] ( identifier[tag_name] , identifier[params] , identifier[fn] , identifier[case_sensitive] )
keyword[if] keyword[not] identifier[x] . identifier[isEndTag] ()
] | def findB(self, tag_name, params=None, fn=None, case_sensitive=False):
"""
Same as :meth:`findAllB`, but without `endtags`.
You can always get them from :attr:`endtag` property.
"""
return [x for x in self.findAllB(tag_name, params, fn, case_sensitive) if not x.isEndTag()] |
def create_view_for_template(app_name, template_name):
'''
Creates a view function for templates (used whe a view.py file doesn't exist but the .html does)
Raises TemplateDoesNotExist if the template doesn't exist.
'''
# ensure the template exists
apps.get_app_config('django_mako_plus').engine.get_template_loader(app_name).get_template(template_name)
# create the view function
def template_view(request, *args, **kwargs):
# not caching the template object (getting it each time) because Mako has its own cache
dmp = apps.get_app_config('django_mako_plus')
template = dmp.engine.get_template_loader(app_name).get_template(template_name)
return template.render_to_response(request=request, context=kwargs)
template_view.view_type = 'template'
return template_view | def function[create_view_for_template, parameter[app_name, template_name]]:
constant[
Creates a view function for templates (used whe a view.py file doesn't exist but the .html does)
Raises TemplateDoesNotExist if the template doesn't exist.
]
call[call[call[name[apps].get_app_config, parameter[constant[django_mako_plus]]].engine.get_template_loader, parameter[name[app_name]]].get_template, parameter[name[template_name]]]
def function[template_view, parameter[request]]:
variable[dmp] assign[=] call[name[apps].get_app_config, parameter[constant[django_mako_plus]]]
variable[template] assign[=] call[call[name[dmp].engine.get_template_loader, parameter[name[app_name]]].get_template, parameter[name[template_name]]]
return[call[name[template].render_to_response, parameter[]]]
name[template_view].view_type assign[=] constant[template]
return[name[template_view]] | keyword[def] identifier[create_view_for_template] ( identifier[app_name] , identifier[template_name] ):
literal[string]
identifier[apps] . identifier[get_app_config] ( literal[string] ). identifier[engine] . identifier[get_template_loader] ( identifier[app_name] ). identifier[get_template] ( identifier[template_name] )
keyword[def] identifier[template_view] ( identifier[request] ,* identifier[args] ,** identifier[kwargs] ):
identifier[dmp] = identifier[apps] . identifier[get_app_config] ( literal[string] )
identifier[template] = identifier[dmp] . identifier[engine] . identifier[get_template_loader] ( identifier[app_name] ). identifier[get_template] ( identifier[template_name] )
keyword[return] identifier[template] . identifier[render_to_response] ( identifier[request] = identifier[request] , identifier[context] = identifier[kwargs] )
identifier[template_view] . identifier[view_type] = literal[string]
keyword[return] identifier[template_view] | def create_view_for_template(app_name, template_name):
"""
Creates a view function for templates (used whe a view.py file doesn't exist but the .html does)
Raises TemplateDoesNotExist if the template doesn't exist.
"""
# ensure the template exists
apps.get_app_config('django_mako_plus').engine.get_template_loader(app_name).get_template(template_name)
# create the view function
def template_view(request, *args, **kwargs):
# not caching the template object (getting it each time) because Mako has its own cache
dmp = apps.get_app_config('django_mako_plus')
template = dmp.engine.get_template_loader(app_name).get_template(template_name)
return template.render_to_response(request=request, context=kwargs)
template_view.view_type = 'template'
return template_view |
def _add_observation(self, x_to_add, y_to_add):
"""Add observation to window, updating means/variance efficiently."""
self._add_observation_to_means(x_to_add, y_to_add)
self._add_observation_to_variances(x_to_add, y_to_add)
self.window_size += 1 | def function[_add_observation, parameter[self, x_to_add, y_to_add]]:
constant[Add observation to window, updating means/variance efficiently.]
call[name[self]._add_observation_to_means, parameter[name[x_to_add], name[y_to_add]]]
call[name[self]._add_observation_to_variances, parameter[name[x_to_add], name[y_to_add]]]
<ast.AugAssign object at 0x7da1b19a2a70> | keyword[def] identifier[_add_observation] ( identifier[self] , identifier[x_to_add] , identifier[y_to_add] ):
literal[string]
identifier[self] . identifier[_add_observation_to_means] ( identifier[x_to_add] , identifier[y_to_add] )
identifier[self] . identifier[_add_observation_to_variances] ( identifier[x_to_add] , identifier[y_to_add] )
identifier[self] . identifier[window_size] += literal[int] | def _add_observation(self, x_to_add, y_to_add):
"""Add observation to window, updating means/variance efficiently."""
self._add_observation_to_means(x_to_add, y_to_add)
self._add_observation_to_variances(x_to_add, y_to_add)
self.window_size += 1 |
def transform_data(function, input_data):
''' a function to apply a function to each value in a nested dictionary
:param function: callable function with a single input of any datatype
:param input_data: dictionary or list with nested data to transform
:return: dictionary or list with data transformed by function
'''
# construct copy
try:
from copy import deepcopy
output_data = deepcopy(input_data)
except:
raise ValueError('transform_data() input_data argument cannot contain module datatypes.')
# walk over data and apply function
for dot_path, value in walk_data(input_data):
current_endpoint = output_data
segment_list = segment_path(dot_path)
segment = None
if segment_list:
for i in range(len(segment_list)):
try:
segment = int(segment_list[i])
except:
segment = segment_list[i]
if i + 1 == len(segment_list):
pass
else:
current_endpoint = current_endpoint[segment]
current_endpoint[segment] = function(value)
return output_data | def function[transform_data, parameter[function, input_data]]:
constant[ a function to apply a function to each value in a nested dictionary
:param function: callable function with a single input of any datatype
:param input_data: dictionary or list with nested data to transform
:return: dictionary or list with data transformed by function
]
<ast.Try object at 0x7da204565d50>
for taget[tuple[[<ast.Name object at 0x7da204567b50>, <ast.Name object at 0x7da204567610>]]] in starred[call[name[walk_data], parameter[name[input_data]]]] begin[:]
variable[current_endpoint] assign[=] name[output_data]
variable[segment_list] assign[=] call[name[segment_path], parameter[name[dot_path]]]
variable[segment] assign[=] constant[None]
if name[segment_list] begin[:]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[segment_list]]]]]] begin[:]
<ast.Try object at 0x7da204565750>
if compare[binary_operation[name[i] + constant[1]] equal[==] call[name[len], parameter[name[segment_list]]]] begin[:]
pass
call[name[current_endpoint]][name[segment]] assign[=] call[name[function], parameter[name[value]]]
return[name[output_data]] | keyword[def] identifier[transform_data] ( identifier[function] , identifier[input_data] ):
literal[string]
keyword[try] :
keyword[from] identifier[copy] keyword[import] identifier[deepcopy]
identifier[output_data] = identifier[deepcopy] ( identifier[input_data] )
keyword[except] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[for] identifier[dot_path] , identifier[value] keyword[in] identifier[walk_data] ( identifier[input_data] ):
identifier[current_endpoint] = identifier[output_data]
identifier[segment_list] = identifier[segment_path] ( identifier[dot_path] )
identifier[segment] = keyword[None]
keyword[if] identifier[segment_list] :
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[segment_list] )):
keyword[try] :
identifier[segment] = identifier[int] ( identifier[segment_list] [ identifier[i] ])
keyword[except] :
identifier[segment] = identifier[segment_list] [ identifier[i] ]
keyword[if] identifier[i] + literal[int] == identifier[len] ( identifier[segment_list] ):
keyword[pass]
keyword[else] :
identifier[current_endpoint] = identifier[current_endpoint] [ identifier[segment] ]
identifier[current_endpoint] [ identifier[segment] ]= identifier[function] ( identifier[value] )
keyword[return] identifier[output_data] | def transform_data(function, input_data):
""" a function to apply a function to each value in a nested dictionary
:param function: callable function with a single input of any datatype
:param input_data: dictionary or list with nested data to transform
:return: dictionary or list with data transformed by function
"""
# construct copy
try:
from copy import deepcopy
output_data = deepcopy(input_data) # depends on [control=['try'], data=[]]
except:
raise ValueError('transform_data() input_data argument cannot contain module datatypes.') # depends on [control=['except'], data=[]]
# walk over data and apply function
for (dot_path, value) in walk_data(input_data):
current_endpoint = output_data
segment_list = segment_path(dot_path)
segment = None
if segment_list:
for i in range(len(segment_list)):
try:
segment = int(segment_list[i]) # depends on [control=['try'], data=[]]
except:
segment = segment_list[i] # depends on [control=['except'], data=[]]
if i + 1 == len(segment_list):
pass # depends on [control=['if'], data=[]]
else:
current_endpoint = current_endpoint[segment] # depends on [control=['for'], data=['i']]
current_endpoint[segment] = function(value) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return output_data |
def _stub_tag(constructor, node):
"""Stub a constructor with a dictionary."""
seen = getattr(constructor, "_stub_seen", None)
if seen is None:
seen = constructor._stub_seen = set()
if node.tag not in seen:
print("YAML tag {} is not supported".format(node.tag))
seen.add(node.tag)
return {} | def function[_stub_tag, parameter[constructor, node]]:
constant[Stub a constructor with a dictionary.]
variable[seen] assign[=] call[name[getattr], parameter[name[constructor], constant[_stub_seen], constant[None]]]
if compare[name[seen] is constant[None]] begin[:]
variable[seen] assign[=] call[name[set], parameter[]]
if compare[name[node].tag <ast.NotIn object at 0x7da2590d7190> name[seen]] begin[:]
call[name[print], parameter[call[constant[YAML tag {} is not supported].format, parameter[name[node].tag]]]]
call[name[seen].add, parameter[name[node].tag]]
return[dictionary[[], []]] | keyword[def] identifier[_stub_tag] ( identifier[constructor] , identifier[node] ):
literal[string]
identifier[seen] = identifier[getattr] ( identifier[constructor] , literal[string] , keyword[None] )
keyword[if] identifier[seen] keyword[is] keyword[None] :
identifier[seen] = identifier[constructor] . identifier[_stub_seen] = identifier[set] ()
keyword[if] identifier[node] . identifier[tag] keyword[not] keyword[in] identifier[seen] :
identifier[print] ( literal[string] . identifier[format] ( identifier[node] . identifier[tag] ))
identifier[seen] . identifier[add] ( identifier[node] . identifier[tag] )
keyword[return] {} | def _stub_tag(constructor, node):
"""Stub a constructor with a dictionary."""
seen = getattr(constructor, '_stub_seen', None)
if seen is None:
seen = constructor._stub_seen = set() # depends on [control=['if'], data=['seen']]
if node.tag not in seen:
print('YAML tag {} is not supported'.format(node.tag))
seen.add(node.tag) # depends on [control=['if'], data=['seen']]
return {} |
def start(self, timeout=None):
"""Start the client in a new thread.
Parameters
----------
timeout : float in seconds
Seconds to wait for client thread to start. Do not specify a
timeout if start() is being called from the same ioloop that this
client will be installed on, since it will block the ioloop without
progressing.
"""
if self._running.isSet():
raise RuntimeError("Device client already started.")
# Make sure we have an ioloop
self.ioloop = self._ioloop_manager.get_ioloop()
if timeout:
t0 = self.ioloop.time()
self._ioloop_manager.start(timeout)
self.ioloop.add_callback(self._install)
if timeout:
remaining_timeout = timeout - (self.ioloop.time() - t0)
self.wait_running(remaining_timeout) | def function[start, parameter[self, timeout]]:
constant[Start the client in a new thread.
Parameters
----------
timeout : float in seconds
Seconds to wait for client thread to start. Do not specify a
timeout if start() is being called from the same ioloop that this
client will be installed on, since it will block the ioloop without
progressing.
]
if call[name[self]._running.isSet, parameter[]] begin[:]
<ast.Raise object at 0x7da1b0535210>
name[self].ioloop assign[=] call[name[self]._ioloop_manager.get_ioloop, parameter[]]
if name[timeout] begin[:]
variable[t0] assign[=] call[name[self].ioloop.time, parameter[]]
call[name[self]._ioloop_manager.start, parameter[name[timeout]]]
call[name[self].ioloop.add_callback, parameter[name[self]._install]]
if name[timeout] begin[:]
variable[remaining_timeout] assign[=] binary_operation[name[timeout] - binary_operation[call[name[self].ioloop.time, parameter[]] - name[t0]]]
call[name[self].wait_running, parameter[name[remaining_timeout]]] | keyword[def] identifier[start] ( identifier[self] , identifier[timeout] = keyword[None] ):
literal[string]
keyword[if] identifier[self] . identifier[_running] . identifier[isSet] ():
keyword[raise] identifier[RuntimeError] ( literal[string] )
identifier[self] . identifier[ioloop] = identifier[self] . identifier[_ioloop_manager] . identifier[get_ioloop] ()
keyword[if] identifier[timeout] :
identifier[t0] = identifier[self] . identifier[ioloop] . identifier[time] ()
identifier[self] . identifier[_ioloop_manager] . identifier[start] ( identifier[timeout] )
identifier[self] . identifier[ioloop] . identifier[add_callback] ( identifier[self] . identifier[_install] )
keyword[if] identifier[timeout] :
identifier[remaining_timeout] = identifier[timeout] -( identifier[self] . identifier[ioloop] . identifier[time] ()- identifier[t0] )
identifier[self] . identifier[wait_running] ( identifier[remaining_timeout] ) | def start(self, timeout=None):
"""Start the client in a new thread.
Parameters
----------
timeout : float in seconds
Seconds to wait for client thread to start. Do not specify a
timeout if start() is being called from the same ioloop that this
client will be installed on, since it will block the ioloop without
progressing.
"""
if self._running.isSet():
raise RuntimeError('Device client already started.') # depends on [control=['if'], data=[]]
# Make sure we have an ioloop
self.ioloop = self._ioloop_manager.get_ioloop()
if timeout:
t0 = self.ioloop.time() # depends on [control=['if'], data=[]]
self._ioloop_manager.start(timeout)
self.ioloop.add_callback(self._install)
if timeout:
remaining_timeout = timeout - (self.ioloop.time() - t0)
self.wait_running(remaining_timeout) # depends on [control=['if'], data=[]] |
def add_site(self, name, site_elements=None):
"""
Add a VPN site with site elements to this engine.
VPN sites identify the sites with protected networks
to be included in the VPN.
Add a network and new VPN site::
>>> net = Network.get_or_create(name='wireless', ipv4_network='192.168.5.0/24')
>>> engine.vpn.add_site(name='wireless', site_elements=[net])
VPNSite(name=wireless)
>>> list(engine.vpn.sites)
[VPNSite(name=dingo - Primary Site), VPNSite(name=wireless)]
:param str name: name for VPN site
:param list site_elements: network elements for VPN site
:type site_elements: list(str,Element)
:raises ElementNotFound: if site element is not found
:raises UpdateElementFailed: failed to add vpn site
:rtype: VPNSite
.. note:: Update is immediate for this operation.
"""
site_elements = site_elements if site_elements else []
return self.sites.create(
name, site_elements) | def function[add_site, parameter[self, name, site_elements]]:
constant[
Add a VPN site with site elements to this engine.
VPN sites identify the sites with protected networks
to be included in the VPN.
Add a network and new VPN site::
>>> net = Network.get_or_create(name='wireless', ipv4_network='192.168.5.0/24')
>>> engine.vpn.add_site(name='wireless', site_elements=[net])
VPNSite(name=wireless)
>>> list(engine.vpn.sites)
[VPNSite(name=dingo - Primary Site), VPNSite(name=wireless)]
:param str name: name for VPN site
:param list site_elements: network elements for VPN site
:type site_elements: list(str,Element)
:raises ElementNotFound: if site element is not found
:raises UpdateElementFailed: failed to add vpn site
:rtype: VPNSite
.. note:: Update is immediate for this operation.
]
variable[site_elements] assign[=] <ast.IfExp object at 0x7da1b1a28a90>
return[call[name[self].sites.create, parameter[name[name], name[site_elements]]]] | keyword[def] identifier[add_site] ( identifier[self] , identifier[name] , identifier[site_elements] = keyword[None] ):
literal[string]
identifier[site_elements] = identifier[site_elements] keyword[if] identifier[site_elements] keyword[else] []
keyword[return] identifier[self] . identifier[sites] . identifier[create] (
identifier[name] , identifier[site_elements] ) | def add_site(self, name, site_elements=None):
"""
Add a VPN site with site elements to this engine.
VPN sites identify the sites with protected networks
to be included in the VPN.
Add a network and new VPN site::
>>> net = Network.get_or_create(name='wireless', ipv4_network='192.168.5.0/24')
>>> engine.vpn.add_site(name='wireless', site_elements=[net])
VPNSite(name=wireless)
>>> list(engine.vpn.sites)
[VPNSite(name=dingo - Primary Site), VPNSite(name=wireless)]
:param str name: name for VPN site
:param list site_elements: network elements for VPN site
:type site_elements: list(str,Element)
:raises ElementNotFound: if site element is not found
:raises UpdateElementFailed: failed to add vpn site
:rtype: VPNSite
.. note:: Update is immediate for this operation.
"""
site_elements = site_elements if site_elements else []
return self.sites.create(name, site_elements) |
def _xray_clean_up_entries_for_driver(self, driver_id):
"""Remove this driver's object/task entries from redis.
Removes control-state entries of all tasks and task return
objects belonging to the driver.
Args:
driver_id: The driver id.
"""
xray_task_table_prefix = (
ray.gcs_utils.TablePrefix_RAYLET_TASK_string.encode("ascii"))
xray_object_table_prefix = (
ray.gcs_utils.TablePrefix_OBJECT_string.encode("ascii"))
task_table_objects = self.state.task_table()
driver_id_hex = binary_to_hex(driver_id)
driver_task_id_bins = set()
for task_id_hex, task_info in task_table_objects.items():
task_table_object = task_info["TaskSpec"]
task_driver_id_hex = task_table_object["DriverID"]
if driver_id_hex != task_driver_id_hex:
# Ignore tasks that aren't from this driver.
continue
driver_task_id_bins.add(hex_to_binary(task_id_hex))
# Get objects associated with the driver.
object_table_objects = self.state.object_table()
driver_object_id_bins = set()
for object_id, _ in object_table_objects.items():
task_id_bin = ray._raylet.compute_task_id(object_id).binary()
if task_id_bin in driver_task_id_bins:
driver_object_id_bins.add(object_id.binary())
def to_shard_index(id_bin):
return binary_to_object_id(id_bin).redis_shard_hash() % len(
self.state.redis_clients)
# Form the redis keys to delete.
sharded_keys = [[] for _ in range(len(self.state.redis_clients))]
for task_id_bin in driver_task_id_bins:
sharded_keys[to_shard_index(task_id_bin)].append(
xray_task_table_prefix + task_id_bin)
for object_id_bin in driver_object_id_bins:
sharded_keys[to_shard_index(object_id_bin)].append(
xray_object_table_prefix + object_id_bin)
# Remove with best effort.
for shard_index in range(len(sharded_keys)):
keys = sharded_keys[shard_index]
if len(keys) == 0:
continue
redis = self.state.redis_clients[shard_index]
num_deleted = redis.delete(*keys)
logger.info("Monitor: "
"Removed {} dead redis entries of the "
"driver from redis shard {}.".format(
num_deleted, shard_index))
if num_deleted != len(keys):
logger.warning("Monitor: "
"Failed to remove {} relevant redis "
"entries from redis shard {}.".format(
len(keys) - num_deleted, shard_index)) | def function[_xray_clean_up_entries_for_driver, parameter[self, driver_id]]:
constant[Remove this driver's object/task entries from redis.
Removes control-state entries of all tasks and task return
objects belonging to the driver.
Args:
driver_id: The driver id.
]
variable[xray_task_table_prefix] assign[=] call[name[ray].gcs_utils.TablePrefix_RAYLET_TASK_string.encode, parameter[constant[ascii]]]
variable[xray_object_table_prefix] assign[=] call[name[ray].gcs_utils.TablePrefix_OBJECT_string.encode, parameter[constant[ascii]]]
variable[task_table_objects] assign[=] call[name[self].state.task_table, parameter[]]
variable[driver_id_hex] assign[=] call[name[binary_to_hex], parameter[name[driver_id]]]
variable[driver_task_id_bins] assign[=] call[name[set], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b2346650>, <ast.Name object at 0x7da1b23454b0>]]] in starred[call[name[task_table_objects].items, parameter[]]] begin[:]
variable[task_table_object] assign[=] call[name[task_info]][constant[TaskSpec]]
variable[task_driver_id_hex] assign[=] call[name[task_table_object]][constant[DriverID]]
if compare[name[driver_id_hex] not_equal[!=] name[task_driver_id_hex]] begin[:]
continue
call[name[driver_task_id_bins].add, parameter[call[name[hex_to_binary], parameter[name[task_id_hex]]]]]
variable[object_table_objects] assign[=] call[name[self].state.object_table, parameter[]]
variable[driver_object_id_bins] assign[=] call[name[set], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b2345210>, <ast.Name object at 0x7da1b23448e0>]]] in starred[call[name[object_table_objects].items, parameter[]]] begin[:]
variable[task_id_bin] assign[=] call[call[name[ray]._raylet.compute_task_id, parameter[name[object_id]]].binary, parameter[]]
if compare[name[task_id_bin] in name[driver_task_id_bins]] begin[:]
call[name[driver_object_id_bins].add, parameter[call[name[object_id].binary, parameter[]]]]
def function[to_shard_index, parameter[id_bin]]:
return[binary_operation[call[call[name[binary_to_object_id], parameter[name[id_bin]]].redis_shard_hash, parameter[]] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[self].state.redis_clients]]]]
variable[sharded_keys] assign[=] <ast.ListComp object at 0x7da1b2346aa0>
for taget[name[task_id_bin]] in starred[name[driver_task_id_bins]] begin[:]
call[call[name[sharded_keys]][call[name[to_shard_index], parameter[name[task_id_bin]]]].append, parameter[binary_operation[name[xray_task_table_prefix] + name[task_id_bin]]]]
for taget[name[object_id_bin]] in starred[name[driver_object_id_bins]] begin[:]
call[call[name[sharded_keys]][call[name[to_shard_index], parameter[name[object_id_bin]]]].append, parameter[binary_operation[name[xray_object_table_prefix] + name[object_id_bin]]]]
for taget[name[shard_index]] in starred[call[name[range], parameter[call[name[len], parameter[name[sharded_keys]]]]]] begin[:]
variable[keys] assign[=] call[name[sharded_keys]][name[shard_index]]
if compare[call[name[len], parameter[name[keys]]] equal[==] constant[0]] begin[:]
continue
variable[redis] assign[=] call[name[self].state.redis_clients][name[shard_index]]
variable[num_deleted] assign[=] call[name[redis].delete, parameter[<ast.Starred object at 0x7da20c7c8190>]]
call[name[logger].info, parameter[call[constant[Monitor: Removed {} dead redis entries of the driver from redis shard {}.].format, parameter[name[num_deleted], name[shard_index]]]]]
if compare[name[num_deleted] not_equal[!=] call[name[len], parameter[name[keys]]]] begin[:]
call[name[logger].warning, parameter[call[constant[Monitor: Failed to remove {} relevant redis entries from redis shard {}.].format, parameter[binary_operation[call[name[len], parameter[name[keys]]] - name[num_deleted]], name[shard_index]]]]] | keyword[def] identifier[_xray_clean_up_entries_for_driver] ( identifier[self] , identifier[driver_id] ):
literal[string]
identifier[xray_task_table_prefix] =(
identifier[ray] . identifier[gcs_utils] . identifier[TablePrefix_RAYLET_TASK_string] . identifier[encode] ( literal[string] ))
identifier[xray_object_table_prefix] =(
identifier[ray] . identifier[gcs_utils] . identifier[TablePrefix_OBJECT_string] . identifier[encode] ( literal[string] ))
identifier[task_table_objects] = identifier[self] . identifier[state] . identifier[task_table] ()
identifier[driver_id_hex] = identifier[binary_to_hex] ( identifier[driver_id] )
identifier[driver_task_id_bins] = identifier[set] ()
keyword[for] identifier[task_id_hex] , identifier[task_info] keyword[in] identifier[task_table_objects] . identifier[items] ():
identifier[task_table_object] = identifier[task_info] [ literal[string] ]
identifier[task_driver_id_hex] = identifier[task_table_object] [ literal[string] ]
keyword[if] identifier[driver_id_hex] != identifier[task_driver_id_hex] :
keyword[continue]
identifier[driver_task_id_bins] . identifier[add] ( identifier[hex_to_binary] ( identifier[task_id_hex] ))
identifier[object_table_objects] = identifier[self] . identifier[state] . identifier[object_table] ()
identifier[driver_object_id_bins] = identifier[set] ()
keyword[for] identifier[object_id] , identifier[_] keyword[in] identifier[object_table_objects] . identifier[items] ():
identifier[task_id_bin] = identifier[ray] . identifier[_raylet] . identifier[compute_task_id] ( identifier[object_id] ). identifier[binary] ()
keyword[if] identifier[task_id_bin] keyword[in] identifier[driver_task_id_bins] :
identifier[driver_object_id_bins] . identifier[add] ( identifier[object_id] . identifier[binary] ())
keyword[def] identifier[to_shard_index] ( identifier[id_bin] ):
keyword[return] identifier[binary_to_object_id] ( identifier[id_bin] ). identifier[redis_shard_hash] ()% identifier[len] (
identifier[self] . identifier[state] . identifier[redis_clients] )
identifier[sharded_keys] =[[] keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[len] ( identifier[self] . identifier[state] . identifier[redis_clients] ))]
keyword[for] identifier[task_id_bin] keyword[in] identifier[driver_task_id_bins] :
identifier[sharded_keys] [ identifier[to_shard_index] ( identifier[task_id_bin] )]. identifier[append] (
identifier[xray_task_table_prefix] + identifier[task_id_bin] )
keyword[for] identifier[object_id_bin] keyword[in] identifier[driver_object_id_bins] :
identifier[sharded_keys] [ identifier[to_shard_index] ( identifier[object_id_bin] )]. identifier[append] (
identifier[xray_object_table_prefix] + identifier[object_id_bin] )
keyword[for] identifier[shard_index] keyword[in] identifier[range] ( identifier[len] ( identifier[sharded_keys] )):
identifier[keys] = identifier[sharded_keys] [ identifier[shard_index] ]
keyword[if] identifier[len] ( identifier[keys] )== literal[int] :
keyword[continue]
identifier[redis] = identifier[self] . identifier[state] . identifier[redis_clients] [ identifier[shard_index] ]
identifier[num_deleted] = identifier[redis] . identifier[delete] (* identifier[keys] )
identifier[logger] . identifier[info] ( literal[string]
literal[string]
literal[string] . identifier[format] (
identifier[num_deleted] , identifier[shard_index] ))
keyword[if] identifier[num_deleted] != identifier[len] ( identifier[keys] ):
identifier[logger] . identifier[warning] ( literal[string]
literal[string]
literal[string] . identifier[format] (
identifier[len] ( identifier[keys] )- identifier[num_deleted] , identifier[shard_index] )) | def _xray_clean_up_entries_for_driver(self, driver_id):
"""Remove this driver's object/task entries from redis.
Removes control-state entries of all tasks and task return
objects belonging to the driver.
Args:
driver_id: The driver id.
"""
xray_task_table_prefix = ray.gcs_utils.TablePrefix_RAYLET_TASK_string.encode('ascii')
xray_object_table_prefix = ray.gcs_utils.TablePrefix_OBJECT_string.encode('ascii')
task_table_objects = self.state.task_table()
driver_id_hex = binary_to_hex(driver_id)
driver_task_id_bins = set()
for (task_id_hex, task_info) in task_table_objects.items():
task_table_object = task_info['TaskSpec']
task_driver_id_hex = task_table_object['DriverID']
if driver_id_hex != task_driver_id_hex:
# Ignore tasks that aren't from this driver.
continue # depends on [control=['if'], data=[]]
driver_task_id_bins.add(hex_to_binary(task_id_hex)) # depends on [control=['for'], data=[]]
# Get objects associated with the driver.
object_table_objects = self.state.object_table()
driver_object_id_bins = set()
for (object_id, _) in object_table_objects.items():
task_id_bin = ray._raylet.compute_task_id(object_id).binary()
if task_id_bin in driver_task_id_bins:
driver_object_id_bins.add(object_id.binary()) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
def to_shard_index(id_bin):
return binary_to_object_id(id_bin).redis_shard_hash() % len(self.state.redis_clients)
# Form the redis keys to delete.
sharded_keys = [[] for _ in range(len(self.state.redis_clients))]
for task_id_bin in driver_task_id_bins:
sharded_keys[to_shard_index(task_id_bin)].append(xray_task_table_prefix + task_id_bin) # depends on [control=['for'], data=['task_id_bin']]
for object_id_bin in driver_object_id_bins:
sharded_keys[to_shard_index(object_id_bin)].append(xray_object_table_prefix + object_id_bin) # depends on [control=['for'], data=['object_id_bin']]
# Remove with best effort.
for shard_index in range(len(sharded_keys)):
keys = sharded_keys[shard_index]
if len(keys) == 0:
continue # depends on [control=['if'], data=[]]
redis = self.state.redis_clients[shard_index]
num_deleted = redis.delete(*keys)
logger.info('Monitor: Removed {} dead redis entries of the driver from redis shard {}.'.format(num_deleted, shard_index))
if num_deleted != len(keys):
logger.warning('Monitor: Failed to remove {} relevant redis entries from redis shard {}.'.format(len(keys) - num_deleted, shard_index)) # depends on [control=['if'], data=['num_deleted']] # depends on [control=['for'], data=['shard_index']] |
def is_allowed(
self,
policy_name,
session_user,
session_group,
object_owner,
object_type,
operation
):
"""
Determine if object access is allowed for the provided policy and
session settings.
"""
policy_section = self.get_relevant_policy_section(
policy_name,
session_group
)
if policy_section is None:
return False
object_policy = policy_section.get(object_type)
if not object_policy:
self._logger.warning(
"The '{0}' policy does not apply to {1} objects.".format(
policy_name,
self._get_enum_string(object_type)
)
)
return False
operation_object_policy = object_policy.get(operation)
if not operation_object_policy:
self._logger.warning(
"The '{0}' policy does not apply to {1} operations on {2} "
"objects.".format(
policy_name,
self._get_enum_string(operation),
self._get_enum_string(object_type)
)
)
return False
if operation_object_policy == enums.Policy.ALLOW_ALL:
return True
elif operation_object_policy == enums.Policy.ALLOW_OWNER:
if session_user == object_owner:
return True
else:
return False
elif operation_object_policy == enums.Policy.DISALLOW_ALL:
return False
else:
return False | def function[is_allowed, parameter[self, policy_name, session_user, session_group, object_owner, object_type, operation]]:
constant[
Determine if object access is allowed for the provided policy and
session settings.
]
variable[policy_section] assign[=] call[name[self].get_relevant_policy_section, parameter[name[policy_name], name[session_group]]]
if compare[name[policy_section] is constant[None]] begin[:]
return[constant[False]]
variable[object_policy] assign[=] call[name[policy_section].get, parameter[name[object_type]]]
if <ast.UnaryOp object at 0x7da204961fc0> begin[:]
call[name[self]._logger.warning, parameter[call[constant[The '{0}' policy does not apply to {1} objects.].format, parameter[name[policy_name], call[name[self]._get_enum_string, parameter[name[object_type]]]]]]]
return[constant[False]]
variable[operation_object_policy] assign[=] call[name[object_policy].get, parameter[name[operation]]]
if <ast.UnaryOp object at 0x7da204962b60> begin[:]
call[name[self]._logger.warning, parameter[call[constant[The '{0}' policy does not apply to {1} operations on {2} objects.].format, parameter[name[policy_name], call[name[self]._get_enum_string, parameter[name[operation]]], call[name[self]._get_enum_string, parameter[name[object_type]]]]]]]
return[constant[False]]
if compare[name[operation_object_policy] equal[==] name[enums].Policy.ALLOW_ALL] begin[:]
return[constant[True]] | keyword[def] identifier[is_allowed] (
identifier[self] ,
identifier[policy_name] ,
identifier[session_user] ,
identifier[session_group] ,
identifier[object_owner] ,
identifier[object_type] ,
identifier[operation]
):
literal[string]
identifier[policy_section] = identifier[self] . identifier[get_relevant_policy_section] (
identifier[policy_name] ,
identifier[session_group]
)
keyword[if] identifier[policy_section] keyword[is] keyword[None] :
keyword[return] keyword[False]
identifier[object_policy] = identifier[policy_section] . identifier[get] ( identifier[object_type] )
keyword[if] keyword[not] identifier[object_policy] :
identifier[self] . identifier[_logger] . identifier[warning] (
literal[string] . identifier[format] (
identifier[policy_name] ,
identifier[self] . identifier[_get_enum_string] ( identifier[object_type] )
)
)
keyword[return] keyword[False]
identifier[operation_object_policy] = identifier[object_policy] . identifier[get] ( identifier[operation] )
keyword[if] keyword[not] identifier[operation_object_policy] :
identifier[self] . identifier[_logger] . identifier[warning] (
literal[string]
literal[string] . identifier[format] (
identifier[policy_name] ,
identifier[self] . identifier[_get_enum_string] ( identifier[operation] ),
identifier[self] . identifier[_get_enum_string] ( identifier[object_type] )
)
)
keyword[return] keyword[False]
keyword[if] identifier[operation_object_policy] == identifier[enums] . identifier[Policy] . identifier[ALLOW_ALL] :
keyword[return] keyword[True]
keyword[elif] identifier[operation_object_policy] == identifier[enums] . identifier[Policy] . identifier[ALLOW_OWNER] :
keyword[if] identifier[session_user] == identifier[object_owner] :
keyword[return] keyword[True]
keyword[else] :
keyword[return] keyword[False]
keyword[elif] identifier[operation_object_policy] == identifier[enums] . identifier[Policy] . identifier[DISALLOW_ALL] :
keyword[return] keyword[False]
keyword[else] :
keyword[return] keyword[False] | def is_allowed(self, policy_name, session_user, session_group, object_owner, object_type, operation):
"""
Determine if object access is allowed for the provided policy and
session settings.
"""
policy_section = self.get_relevant_policy_section(policy_name, session_group)
if policy_section is None:
return False # depends on [control=['if'], data=[]]
object_policy = policy_section.get(object_type)
if not object_policy:
self._logger.warning("The '{0}' policy does not apply to {1} objects.".format(policy_name, self._get_enum_string(object_type)))
return False # depends on [control=['if'], data=[]]
operation_object_policy = object_policy.get(operation)
if not operation_object_policy:
self._logger.warning("The '{0}' policy does not apply to {1} operations on {2} objects.".format(policy_name, self._get_enum_string(operation), self._get_enum_string(object_type)))
return False # depends on [control=['if'], data=[]]
if operation_object_policy == enums.Policy.ALLOW_ALL:
return True # depends on [control=['if'], data=[]]
elif operation_object_policy == enums.Policy.ALLOW_OWNER:
if session_user == object_owner:
return True # depends on [control=['if'], data=[]]
else:
return False # depends on [control=['if'], data=[]]
elif operation_object_policy == enums.Policy.DISALLOW_ALL:
return False # depends on [control=['if'], data=[]]
else:
return False |
def url_params(request, except_params=None, as_is=False):
"""
create string with GET-params of request
usage example:
c['sort_url'] = url_params(request, except_params=('sort',))
...
<a href="{{ sort_url }}&sort=lab_number">Лабораторный номер</a>
"""
if not request.GET:
return ''
params = []
for key, value in request.GET.items():
if except_params and key not in except_params:
for v in request.GET.getlist(key):
params.append('%s=%s' % (key, urlquote(v)))
if as_is:
str_params = '?' + '&'.join(params)
else:
str_params = '?' + '&'.join(params)
str_params = urlquote(str_params)
return mark_safe(str_params) | def function[url_params, parameter[request, except_params, as_is]]:
constant[
create string with GET-params of request
usage example:
c['sort_url'] = url_params(request, except_params=('sort',))
...
<a href="{{ sort_url }}&sort=lab_number">Лабораторный номер</a>
]
if <ast.UnaryOp object at 0x7da1b10404f0> begin[:]
return[constant[]]
variable[params] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b1043010>, <ast.Name object at 0x7da1b1041630>]]] in starred[call[name[request].GET.items, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da1b1040670> begin[:]
for taget[name[v]] in starred[call[name[request].GET.getlist, parameter[name[key]]]] begin[:]
call[name[params].append, parameter[binary_operation[constant[%s=%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1040ee0>, <ast.Call object at 0x7da1b1043730>]]]]]
if name[as_is] begin[:]
variable[str_params] assign[=] binary_operation[constant[?] + call[constant[&].join, parameter[name[params]]]]
return[call[name[mark_safe], parameter[name[str_params]]]] | keyword[def] identifier[url_params] ( identifier[request] , identifier[except_params] = keyword[None] , identifier[as_is] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[request] . identifier[GET] :
keyword[return] literal[string]
identifier[params] =[]
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[request] . identifier[GET] . identifier[items] ():
keyword[if] identifier[except_params] keyword[and] identifier[key] keyword[not] keyword[in] identifier[except_params] :
keyword[for] identifier[v] keyword[in] identifier[request] . identifier[GET] . identifier[getlist] ( identifier[key] ):
identifier[params] . identifier[append] ( literal[string] %( identifier[key] , identifier[urlquote] ( identifier[v] )))
keyword[if] identifier[as_is] :
identifier[str_params] = literal[string] + literal[string] . identifier[join] ( identifier[params] )
keyword[else] :
identifier[str_params] = literal[string] + literal[string] . identifier[join] ( identifier[params] )
identifier[str_params] = identifier[urlquote] ( identifier[str_params] )
keyword[return] identifier[mark_safe] ( identifier[str_params] ) | def url_params(request, except_params=None, as_is=False):
"""
create string with GET-params of request
usage example:
c['sort_url'] = url_params(request, except_params=('sort',))
...
<a href="{{ sort_url }}&sort=lab_number">Лабораторный номер</a>
"""
if not request.GET:
return '' # depends on [control=['if'], data=[]]
params = []
for (key, value) in request.GET.items():
if except_params and key not in except_params:
for v in request.GET.getlist(key):
params.append('%s=%s' % (key, urlquote(v))) # depends on [control=['for'], data=['v']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
if as_is:
str_params = '?' + '&'.join(params) # depends on [control=['if'], data=[]]
else:
str_params = '?' + '&'.join(params)
str_params = urlquote(str_params)
return mark_safe(str_params) |
def files(self, paths, access=None, extensions=None, minsize=None):
"""Verify list of files"""
self.failures = [path for path in paths if not
isvalid(path, access, extensions, 'file', minsize)]
return not self.failures | def function[files, parameter[self, paths, access, extensions, minsize]]:
constant[Verify list of files]
name[self].failures assign[=] <ast.ListComp object at 0x7da1b14c69b0>
return[<ast.UnaryOp object at 0x7da18f721930>] | keyword[def] identifier[files] ( identifier[self] , identifier[paths] , identifier[access] = keyword[None] , identifier[extensions] = keyword[None] , identifier[minsize] = keyword[None] ):
literal[string]
identifier[self] . identifier[failures] =[ identifier[path] keyword[for] identifier[path] keyword[in] identifier[paths] keyword[if] keyword[not]
identifier[isvalid] ( identifier[path] , identifier[access] , identifier[extensions] , literal[string] , identifier[minsize] )]
keyword[return] keyword[not] identifier[self] . identifier[failures] | def files(self, paths, access=None, extensions=None, minsize=None):
"""Verify list of files"""
self.failures = [path for path in paths if not isvalid(path, access, extensions, 'file', minsize)]
return not self.failures |
def legends(value):
"""list or KeyedList of ``Legends`` : Legend definitions
Legends visualize scales, and take one or more scales as their input.
They can be customized via a LegendProperty object.
"""
for i, entry in enumerate(value):
_assert_is_type('legends[{0}]'.format(i), entry, Legend) | def function[legends, parameter[value]]:
constant[list or KeyedList of ``Legends`` : Legend definitions
Legends visualize scales, and take one or more scales as their input.
They can be customized via a LegendProperty object.
]
for taget[tuple[[<ast.Name object at 0x7da204565c00>, <ast.Name object at 0x7da2045670a0>]]] in starred[call[name[enumerate], parameter[name[value]]]] begin[:]
call[name[_assert_is_type], parameter[call[constant[legends[{0}]].format, parameter[name[i]]], name[entry], name[Legend]]] | keyword[def] identifier[legends] ( identifier[value] ):
literal[string]
keyword[for] identifier[i] , identifier[entry] keyword[in] identifier[enumerate] ( identifier[value] ):
identifier[_assert_is_type] ( literal[string] . identifier[format] ( identifier[i] ), identifier[entry] , identifier[Legend] ) | def legends(value):
"""list or KeyedList of ``Legends`` : Legend definitions
Legends visualize scales, and take one or more scales as their input.
They can be customized via a LegendProperty object.
"""
for (i, entry) in enumerate(value):
_assert_is_type('legends[{0}]'.format(i), entry, Legend) # depends on [control=['for'], data=[]] |
def normalize_item(self, value=_nothing, coll=None, index=None):
"""Hook which is called on every *collection item*; this is a way to
perform context-aware clean-ups.
args:
``value=``\ *nothing*\ \|\ *anything*
The value in the collection slot. *nothing* can be detected in
sub-class methods as ``self._nothing``.
``coll=``\ *COLLECTION*
The parent :py:class:`normalize.coll.Collection` instance. If
this instance has a ``compare_item_as`` method, then that
method is called to perform a clean-up before the value is
passed to ``normalize_val``
``index=``\ *HASHABLE*
The key of the item in the collection.
"""
if value is not _nothing and hasattr(coll, "compare_item_as"):
value = coll.compare_item_as(value)
return self.normalize_val(value) | def function[normalize_item, parameter[self, value, coll, index]]:
constant[Hook which is called on every *collection item*; this is a way to
perform context-aware clean-ups.
args:
``value=``\ *nothing*\ \|\ *anything*
The value in the collection slot. *nothing* can be detected in
sub-class methods as ``self._nothing``.
``coll=``\ *COLLECTION*
The parent :py:class:`normalize.coll.Collection` instance. If
this instance has a ``compare_item_as`` method, then that
method is called to perform a clean-up before the value is
passed to ``normalize_val``
``index=``\ *HASHABLE*
The key of the item in the collection.
]
if <ast.BoolOp object at 0x7da18eb55e70> begin[:]
variable[value] assign[=] call[name[coll].compare_item_as, parameter[name[value]]]
return[call[name[self].normalize_val, parameter[name[value]]]] | keyword[def] identifier[normalize_item] ( identifier[self] , identifier[value] = identifier[_nothing] , identifier[coll] = keyword[None] , identifier[index] = keyword[None] ):
literal[string]
keyword[if] identifier[value] keyword[is] keyword[not] identifier[_nothing] keyword[and] identifier[hasattr] ( identifier[coll] , literal[string] ):
identifier[value] = identifier[coll] . identifier[compare_item_as] ( identifier[value] )
keyword[return] identifier[self] . identifier[normalize_val] ( identifier[value] ) | def normalize_item(self, value=_nothing, coll=None, index=None):
"""Hook which is called on every *collection item*; this is a way to
perform context-aware clean-ups.
args:
``value=``\\ *nothing*\\ \\|\\ *anything*
The value in the collection slot. *nothing* can be detected in
sub-class methods as ``self._nothing``.
``coll=``\\ *COLLECTION*
The parent :py:class:`normalize.coll.Collection` instance. If
this instance has a ``compare_item_as`` method, then that
method is called to perform a clean-up before the value is
passed to ``normalize_val``
``index=``\\ *HASHABLE*
The key of the item in the collection.
"""
if value is not _nothing and hasattr(coll, 'compare_item_as'):
value = coll.compare_item_as(value) # depends on [control=['if'], data=[]]
return self.normalize_val(value) |
def get_sampleS(self, res, DS=None, resMode='abs',
ind=None, offsetIn=0., Out='(X,Y,Z)', Ind=None):
""" Sample, with resolution res, the surface defined by DS or ind
An optionnal offset perpendicular to the surface can be used
(offsetIn>0 => inwards)
Parameters
----------
res : float / list of 2 floats
Desired resolution of the surfacic sample
float : same resolution for all directions of the sample
list : [dl,dXPhi] where:
dl : res. along polygon contours (cross-section)
dXPhi : res. along axis (toroidal/linear direction)
DS : None / list of 3 lists of 2 floats
Limits of the domain in which the sample should be computed
None : whole surface of the object
list : [D1,D2,D3], where Di is a len()=2 list
(increasing floats, setting limits along coordinate i)
[DR,DZ,DPhi]: in toroidal geometry (self.Id.Type=='Tor')
[DX,DY,DZ] : in linear geometry (self.Id.Type=='Lin')
resMode : str
Flag, specifies if res is absolute or relative to element sizes
'abs' : res is an absolute distance
'rel' : if res=0.1, each polygon segment is divided in 10,
as is the toroidal/linear length
ind : None / np.ndarray of int
If provided, DS is ignored and the sample points corresponding to
the provided indices are returned
Example (assuming obj is a Ves object)
> # We create a 5x5 cm2 sample of the whole surface
> pts, dS, ind, reseff = obj.get_sample(0.05)
> # Perform operations, save only the points indices (save space)
> ...
> # Retrieve the points from their indices (requires same res)
> pts2, dS2, ind2, reseff2 = obj.get_sample(0.05, ind=ind)
> np.allclose(pts,pts2)
True
offsetIn: float
Offset distance from the actual surface of the object
Inwards if positive
Useful to avoid numerical errors
Out : str
Flag indicating the coordinate system of returned points
e.g. : '(X,Y,Z)' or '(R,Z,Phi)'
Ind : None / iterable of ints
Array of indices of the entities to be considered
(only when multiple entities, i.e.: self.nLim>1)
Returns
-------
pts : np.ndarray / list of np.ndarrays
Sample points coordinates, as a (3,N) array.
A list is returned if the object has multiple entities
dS : np.ndarray / list of np.ndarrays
The surface (in m^2) associated to each point
ind : np.ndarray / list of np.ndarrays
The index of each point
reseff : np.ndarray / list of np.ndarrays
Effective resolution in both directions after sample computation
"""
if Ind is not None:
assert self.dgeom['Multi']
kwdargs = dict(DS=DS, dSMode=resMode, ind=ind, DIn=offsetIn,
VIn=self.dgeom['VIn'], VType=self.Id.Type,
VLim=np.ascontiguousarray(self.Lim), nVLim=self.noccur,
Out=Out, margin=1.e-9,
Multi=self.dgeom['Multi'], Ind=Ind)
args = [self.Poly, self.dgeom['P1Min'][0], self.dgeom['P1Max'][0],
self.dgeom['P2Min'][1], self.dgeom['P2Max'][1], res]
pts, dS, ind, reseff = _comp._Ves_get_sampleS(*args, **kwdargs)
return pts, dS, ind, reseff | def function[get_sampleS, parameter[self, res, DS, resMode, ind, offsetIn, Out, Ind]]:
constant[ Sample, with resolution res, the surface defined by DS or ind
An optionnal offset perpendicular to the surface can be used
(offsetIn>0 => inwards)
Parameters
----------
res : float / list of 2 floats
Desired resolution of the surfacic sample
float : same resolution for all directions of the sample
list : [dl,dXPhi] where:
dl : res. along polygon contours (cross-section)
dXPhi : res. along axis (toroidal/linear direction)
DS : None / list of 3 lists of 2 floats
Limits of the domain in which the sample should be computed
None : whole surface of the object
list : [D1,D2,D3], where Di is a len()=2 list
(increasing floats, setting limits along coordinate i)
[DR,DZ,DPhi]: in toroidal geometry (self.Id.Type=='Tor')
[DX,DY,DZ] : in linear geometry (self.Id.Type=='Lin')
resMode : str
Flag, specifies if res is absolute or relative to element sizes
'abs' : res is an absolute distance
'rel' : if res=0.1, each polygon segment is divided in 10,
as is the toroidal/linear length
ind : None / np.ndarray of int
If provided, DS is ignored and the sample points corresponding to
the provided indices are returned
Example (assuming obj is a Ves object)
> # We create a 5x5 cm2 sample of the whole surface
> pts, dS, ind, reseff = obj.get_sample(0.05)
> # Perform operations, save only the points indices (save space)
> ...
> # Retrieve the points from their indices (requires same res)
> pts2, dS2, ind2, reseff2 = obj.get_sample(0.05, ind=ind)
> np.allclose(pts,pts2)
True
offsetIn: float
Offset distance from the actual surface of the object
Inwards if positive
Useful to avoid numerical errors
Out : str
Flag indicating the coordinate system of returned points
e.g. : '(X,Y,Z)' or '(R,Z,Phi)'
Ind : None / iterable of ints
Array of indices of the entities to be considered
(only when multiple entities, i.e.: self.nLim>1)
Returns
-------
pts : np.ndarray / list of np.ndarrays
Sample points coordinates, as a (3,N) array.
A list is returned if the object has multiple entities
dS : np.ndarray / list of np.ndarrays
The surface (in m^2) associated to each point
ind : np.ndarray / list of np.ndarrays
The index of each point
reseff : np.ndarray / list of np.ndarrays
Effective resolution in both directions after sample computation
]
if compare[name[Ind] is_not constant[None]] begin[:]
assert[call[name[self].dgeom][constant[Multi]]]
variable[kwdargs] assign[=] call[name[dict], parameter[]]
variable[args] assign[=] list[[<ast.Attribute object at 0x7da20e9621d0>, <ast.Subscript object at 0x7da20e962500>, <ast.Subscript object at 0x7da20e961690>, <ast.Subscript object at 0x7da20e961d20>, <ast.Subscript object at 0x7da20e963940>, <ast.Name object at 0x7da20e960580>]]
<ast.Tuple object at 0x7da20e961270> assign[=] call[name[_comp]._Ves_get_sampleS, parameter[<ast.Starred object at 0x7da20e9612a0>]]
return[tuple[[<ast.Name object at 0x7da204623850>, <ast.Name object at 0x7da204623610>, <ast.Name object at 0x7da2046216f0>, <ast.Name object at 0x7da204622f50>]]] | keyword[def] identifier[get_sampleS] ( identifier[self] , identifier[res] , identifier[DS] = keyword[None] , identifier[resMode] = literal[string] ,
identifier[ind] = keyword[None] , identifier[offsetIn] = literal[int] , identifier[Out] = literal[string] , identifier[Ind] = keyword[None] ):
literal[string]
keyword[if] identifier[Ind] keyword[is] keyword[not] keyword[None] :
keyword[assert] identifier[self] . identifier[dgeom] [ literal[string] ]
identifier[kwdargs] = identifier[dict] ( identifier[DS] = identifier[DS] , identifier[dSMode] = identifier[resMode] , identifier[ind] = identifier[ind] , identifier[DIn] = identifier[offsetIn] ,
identifier[VIn] = identifier[self] . identifier[dgeom] [ literal[string] ], identifier[VType] = identifier[self] . identifier[Id] . identifier[Type] ,
identifier[VLim] = identifier[np] . identifier[ascontiguousarray] ( identifier[self] . identifier[Lim] ), identifier[nVLim] = identifier[self] . identifier[noccur] ,
identifier[Out] = identifier[Out] , identifier[margin] = literal[int] ,
identifier[Multi] = identifier[self] . identifier[dgeom] [ literal[string] ], identifier[Ind] = identifier[Ind] )
identifier[args] =[ identifier[self] . identifier[Poly] , identifier[self] . identifier[dgeom] [ literal[string] ][ literal[int] ], identifier[self] . identifier[dgeom] [ literal[string] ][ literal[int] ],
identifier[self] . identifier[dgeom] [ literal[string] ][ literal[int] ], identifier[self] . identifier[dgeom] [ literal[string] ][ literal[int] ], identifier[res] ]
identifier[pts] , identifier[dS] , identifier[ind] , identifier[reseff] = identifier[_comp] . identifier[_Ves_get_sampleS] (* identifier[args] ,** identifier[kwdargs] )
keyword[return] identifier[pts] , identifier[dS] , identifier[ind] , identifier[reseff] | def get_sampleS(self, res, DS=None, resMode='abs', ind=None, offsetIn=0.0, Out='(X,Y,Z)', Ind=None):
""" Sample, with resolution res, the surface defined by DS or ind
An optionnal offset perpendicular to the surface can be used
(offsetIn>0 => inwards)
Parameters
----------
res : float / list of 2 floats
Desired resolution of the surfacic sample
float : same resolution for all directions of the sample
list : [dl,dXPhi] where:
dl : res. along polygon contours (cross-section)
dXPhi : res. along axis (toroidal/linear direction)
DS : None / list of 3 lists of 2 floats
Limits of the domain in which the sample should be computed
None : whole surface of the object
list : [D1,D2,D3], where Di is a len()=2 list
(increasing floats, setting limits along coordinate i)
[DR,DZ,DPhi]: in toroidal geometry (self.Id.Type=='Tor')
[DX,DY,DZ] : in linear geometry (self.Id.Type=='Lin')
resMode : str
Flag, specifies if res is absolute or relative to element sizes
'abs' : res is an absolute distance
'rel' : if res=0.1, each polygon segment is divided in 10,
as is the toroidal/linear length
ind : None / np.ndarray of int
If provided, DS is ignored and the sample points corresponding to
the provided indices are returned
Example (assuming obj is a Ves object)
> # We create a 5x5 cm2 sample of the whole surface
> pts, dS, ind, reseff = obj.get_sample(0.05)
> # Perform operations, save only the points indices (save space)
> ...
> # Retrieve the points from their indices (requires same res)
> pts2, dS2, ind2, reseff2 = obj.get_sample(0.05, ind=ind)
> np.allclose(pts,pts2)
True
offsetIn: float
Offset distance from the actual surface of the object
Inwards if positive
Useful to avoid numerical errors
Out : str
Flag indicating the coordinate system of returned points
e.g. : '(X,Y,Z)' or '(R,Z,Phi)'
Ind : None / iterable of ints
Array of indices of the entities to be considered
(only when multiple entities, i.e.: self.nLim>1)
Returns
-------
pts : np.ndarray / list of np.ndarrays
Sample points coordinates, as a (3,N) array.
A list is returned if the object has multiple entities
dS : np.ndarray / list of np.ndarrays
The surface (in m^2) associated to each point
ind : np.ndarray / list of np.ndarrays
The index of each point
reseff : np.ndarray / list of np.ndarrays
Effective resolution in both directions after sample computation
"""
if Ind is not None:
assert self.dgeom['Multi'] # depends on [control=['if'], data=[]]
kwdargs = dict(DS=DS, dSMode=resMode, ind=ind, DIn=offsetIn, VIn=self.dgeom['VIn'], VType=self.Id.Type, VLim=np.ascontiguousarray(self.Lim), nVLim=self.noccur, Out=Out, margin=1e-09, Multi=self.dgeom['Multi'], Ind=Ind)
args = [self.Poly, self.dgeom['P1Min'][0], self.dgeom['P1Max'][0], self.dgeom['P2Min'][1], self.dgeom['P2Max'][1], res]
(pts, dS, ind, reseff) = _comp._Ves_get_sampleS(*args, **kwdargs)
return (pts, dS, ind, reseff) |
def clean_geometry_type(geometry, target_type, allow_multipart=True):
"""
Return geometry of a specific type if possible.
Filters and splits up GeometryCollection into target types. This is
necessary when after clipping and/or reprojecting the geometry types from
source geometries change (i.e. a Polygon becomes a LineString or a
LineString becomes Point) in some edge cases.
Parameters
----------
geometry : ``shapely.geometry``
target_type : string
target geometry type
allow_multipart : bool
allow multipart geometries (default: True)
Returns
-------
cleaned geometry : ``shapely.geometry``
returns None if input geometry type differs from target type
Raises
------
GeometryTypeError : if geometry type does not match target_type
"""
multipart_geoms = {
"Point": MultiPoint,
"LineString": MultiLineString,
"Polygon": MultiPolygon,
"MultiPoint": MultiPoint,
"MultiLineString": MultiLineString,
"MultiPolygon": MultiPolygon
}
if target_type not in multipart_geoms.keys():
raise TypeError("target type is not supported: %s" % target_type)
if geometry.geom_type == target_type:
return geometry
elif allow_multipart:
target_multipart_type = multipart_geoms[target_type]
if geometry.geom_type == "GeometryCollection":
return target_multipart_type([
clean_geometry_type(g, target_type, allow_multipart)
for g in geometry])
elif any([
isinstance(geometry, target_multipart_type),
multipart_geoms[geometry.geom_type] == target_multipart_type
]):
return geometry
raise GeometryTypeError(
"geometry type does not match: %s, %s" % (geometry.geom_type, target_type)
) | def function[clean_geometry_type, parameter[geometry, target_type, allow_multipart]]:
constant[
Return geometry of a specific type if possible.
Filters and splits up GeometryCollection into target types. This is
necessary when after clipping and/or reprojecting the geometry types from
source geometries change (i.e. a Polygon becomes a LineString or a
LineString becomes Point) in some edge cases.
Parameters
----------
geometry : ``shapely.geometry``
target_type : string
target geometry type
allow_multipart : bool
allow multipart geometries (default: True)
Returns
-------
cleaned geometry : ``shapely.geometry``
returns None if input geometry type differs from target type
Raises
------
GeometryTypeError : if geometry type does not match target_type
]
variable[multipart_geoms] assign[=] dictionary[[<ast.Constant object at 0x7da20c993a30>, <ast.Constant object at 0x7da20c9929b0>, <ast.Constant object at 0x7da20c991f90>, <ast.Constant object at 0x7da20c990610>, <ast.Constant object at 0x7da20c991a50>, <ast.Constant object at 0x7da20c991a80>], [<ast.Name object at 0x7da20c9911b0>, <ast.Name object at 0x7da20c992260>, <ast.Name object at 0x7da20c990400>, <ast.Name object at 0x7da20c993190>, <ast.Name object at 0x7da20c990f10>, <ast.Name object at 0x7da20c992980>]]
if compare[name[target_type] <ast.NotIn object at 0x7da2590d7190> call[name[multipart_geoms].keys, parameter[]]] begin[:]
<ast.Raise object at 0x7da20c993fd0>
if compare[name[geometry].geom_type equal[==] name[target_type]] begin[:]
return[name[geometry]]
<ast.Raise object at 0x7da1b00336d0> | keyword[def] identifier[clean_geometry_type] ( identifier[geometry] , identifier[target_type] , identifier[allow_multipart] = keyword[True] ):
literal[string]
identifier[multipart_geoms] ={
literal[string] : identifier[MultiPoint] ,
literal[string] : identifier[MultiLineString] ,
literal[string] : identifier[MultiPolygon] ,
literal[string] : identifier[MultiPoint] ,
literal[string] : identifier[MultiLineString] ,
literal[string] : identifier[MultiPolygon]
}
keyword[if] identifier[target_type] keyword[not] keyword[in] identifier[multipart_geoms] . identifier[keys] ():
keyword[raise] identifier[TypeError] ( literal[string] % identifier[target_type] )
keyword[if] identifier[geometry] . identifier[geom_type] == identifier[target_type] :
keyword[return] identifier[geometry]
keyword[elif] identifier[allow_multipart] :
identifier[target_multipart_type] = identifier[multipart_geoms] [ identifier[target_type] ]
keyword[if] identifier[geometry] . identifier[geom_type] == literal[string] :
keyword[return] identifier[target_multipart_type] ([
identifier[clean_geometry_type] ( identifier[g] , identifier[target_type] , identifier[allow_multipart] )
keyword[for] identifier[g] keyword[in] identifier[geometry] ])
keyword[elif] identifier[any] ([
identifier[isinstance] ( identifier[geometry] , identifier[target_multipart_type] ),
identifier[multipart_geoms] [ identifier[geometry] . identifier[geom_type] ]== identifier[target_multipart_type]
]):
keyword[return] identifier[geometry]
keyword[raise] identifier[GeometryTypeError] (
literal[string] %( identifier[geometry] . identifier[geom_type] , identifier[target_type] )
) | def clean_geometry_type(geometry, target_type, allow_multipart=True):
"""
Return geometry of a specific type if possible.
Filters and splits up GeometryCollection into target types. This is
necessary when after clipping and/or reprojecting the geometry types from
source geometries change (i.e. a Polygon becomes a LineString or a
LineString becomes Point) in some edge cases.
Parameters
----------
geometry : ``shapely.geometry``
target_type : string
target geometry type
allow_multipart : bool
allow multipart geometries (default: True)
Returns
-------
cleaned geometry : ``shapely.geometry``
returns None if input geometry type differs from target type
Raises
------
GeometryTypeError : if geometry type does not match target_type
"""
multipart_geoms = {'Point': MultiPoint, 'LineString': MultiLineString, 'Polygon': MultiPolygon, 'MultiPoint': MultiPoint, 'MultiLineString': MultiLineString, 'MultiPolygon': MultiPolygon}
if target_type not in multipart_geoms.keys():
raise TypeError('target type is not supported: %s' % target_type) # depends on [control=['if'], data=['target_type']]
if geometry.geom_type == target_type:
return geometry # depends on [control=['if'], data=[]]
elif allow_multipart:
target_multipart_type = multipart_geoms[target_type]
if geometry.geom_type == 'GeometryCollection':
return target_multipart_type([clean_geometry_type(g, target_type, allow_multipart) for g in geometry]) # depends on [control=['if'], data=[]]
elif any([isinstance(geometry, target_multipart_type), multipart_geoms[geometry.geom_type] == target_multipart_type]):
return geometry # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
raise GeometryTypeError('geometry type does not match: %s, %s' % (geometry.geom_type, target_type)) |
def attach(zpool, device, new_device, force=False):
'''
Attach specified device to zpool
zpool : string
Name of storage pool
device : string
Existing device name too
new_device : string
New device name (to be attached to ``device``)
force : boolean
Forces use of device
CLI Example:
.. code-block:: bash
salt '*' zpool.attach myzpool /path/to/vdev1 /path/to/vdev2 [...]
'''
## Configure pool
# NOTE: initialize the defaults
flags = []
target = []
# NOTE: set extra config
if force:
flags.append('-f')
# NOTE: append the pool name and specifications
target.append(zpool)
target.append(device)
target.append(new_device)
## Update storage pool
res = __salt__['cmd.run_all'](
__utils__['zfs.zpool_command'](
command='attach',
flags=flags,
target=target,
),
python_shell=False,
)
ret = __utils__['zfs.parse_command_result'](res, 'attached')
if ret['attached']:
## NOTE: lookup zpool status for vdev config
ret['vdevs'] = _clean_vdev_config(
__salt__['zpool.status'](zpool=zpool)[zpool]['config'][zpool],
)
return ret | def function[attach, parameter[zpool, device, new_device, force]]:
constant[
Attach specified device to zpool
zpool : string
Name of storage pool
device : string
Existing device name too
new_device : string
New device name (to be attached to ``device``)
force : boolean
Forces use of device
CLI Example:
.. code-block:: bash
salt '*' zpool.attach myzpool /path/to/vdev1 /path/to/vdev2 [...]
]
variable[flags] assign[=] list[[]]
variable[target] assign[=] list[[]]
if name[force] begin[:]
call[name[flags].append, parameter[constant[-f]]]
call[name[target].append, parameter[name[zpool]]]
call[name[target].append, parameter[name[device]]]
call[name[target].append, parameter[name[new_device]]]
variable[res] assign[=] call[call[name[__salt__]][constant[cmd.run_all]], parameter[call[call[name[__utils__]][constant[zfs.zpool_command]], parameter[]]]]
variable[ret] assign[=] call[call[name[__utils__]][constant[zfs.parse_command_result]], parameter[name[res], constant[attached]]]
if call[name[ret]][constant[attached]] begin[:]
call[name[ret]][constant[vdevs]] assign[=] call[name[_clean_vdev_config], parameter[call[call[call[call[call[name[__salt__]][constant[zpool.status]], parameter[]]][name[zpool]]][constant[config]]][name[zpool]]]]
return[name[ret]] | keyword[def] identifier[attach] ( identifier[zpool] , identifier[device] , identifier[new_device] , identifier[force] = keyword[False] ):
literal[string]
identifier[flags] =[]
identifier[target] =[]
keyword[if] identifier[force] :
identifier[flags] . identifier[append] ( literal[string] )
identifier[target] . identifier[append] ( identifier[zpool] )
identifier[target] . identifier[append] ( identifier[device] )
identifier[target] . identifier[append] ( identifier[new_device] )
identifier[res] = identifier[__salt__] [ literal[string] ](
identifier[__utils__] [ literal[string] ](
identifier[command] = literal[string] ,
identifier[flags] = identifier[flags] ,
identifier[target] = identifier[target] ,
),
identifier[python_shell] = keyword[False] ,
)
identifier[ret] = identifier[__utils__] [ literal[string] ]( identifier[res] , literal[string] )
keyword[if] identifier[ret] [ literal[string] ]:
identifier[ret] [ literal[string] ]= identifier[_clean_vdev_config] (
identifier[__salt__] [ literal[string] ]( identifier[zpool] = identifier[zpool] )[ identifier[zpool] ][ literal[string] ][ identifier[zpool] ],
)
keyword[return] identifier[ret] | def attach(zpool, device, new_device, force=False):
"""
Attach specified device to zpool
zpool : string
Name of storage pool
device : string
Existing device name too
new_device : string
New device name (to be attached to ``device``)
force : boolean
Forces use of device
CLI Example:
.. code-block:: bash
salt '*' zpool.attach myzpool /path/to/vdev1 /path/to/vdev2 [...]
"""
## Configure pool
# NOTE: initialize the defaults
flags = []
target = []
# NOTE: set extra config
if force:
flags.append('-f') # depends on [control=['if'], data=[]]
# NOTE: append the pool name and specifications
target.append(zpool)
target.append(device)
target.append(new_device)
## Update storage pool
res = __salt__['cmd.run_all'](__utils__['zfs.zpool_command'](command='attach', flags=flags, target=target), python_shell=False)
ret = __utils__['zfs.parse_command_result'](res, 'attached')
if ret['attached']:
## NOTE: lookup zpool status for vdev config
ret['vdevs'] = _clean_vdev_config(__salt__['zpool.status'](zpool=zpool)[zpool]['config'][zpool]) # depends on [control=['if'], data=[]]
return ret |
def get_thread(self, email, mailinglist):
"""Group messages by thread looking for similar subjects"""
subject_slug = slugify(email.subject_clean)
thread = self.THREAD_CACHE.get(subject_slug, {}).get(mailinglist.id)
if thread is None:
thread = Thread.all_objects.get_or_create(
mailinglist=mailinglist,
subject_token=subject_slug
)[0]
if self.THREAD_CACHE.get(subject_slug) is None:
self.THREAD_CACHE[subject_slug] = dict()
self.THREAD_CACHE[subject_slug][mailinglist.id] = thread
thread.latest_message = email
thread.update_keywords()
thread.save()
return thread | def function[get_thread, parameter[self, email, mailinglist]]:
constant[Group messages by thread looking for similar subjects]
variable[subject_slug] assign[=] call[name[slugify], parameter[name[email].subject_clean]]
variable[thread] assign[=] call[call[name[self].THREAD_CACHE.get, parameter[name[subject_slug], dictionary[[], []]]].get, parameter[name[mailinglist].id]]
if compare[name[thread] is constant[None]] begin[:]
variable[thread] assign[=] call[call[name[Thread].all_objects.get_or_create, parameter[]]][constant[0]]
if compare[call[name[self].THREAD_CACHE.get, parameter[name[subject_slug]]] is constant[None]] begin[:]
call[name[self].THREAD_CACHE][name[subject_slug]] assign[=] call[name[dict], parameter[]]
call[call[name[self].THREAD_CACHE][name[subject_slug]]][name[mailinglist].id] assign[=] name[thread]
name[thread].latest_message assign[=] name[email]
call[name[thread].update_keywords, parameter[]]
call[name[thread].save, parameter[]]
return[name[thread]] | keyword[def] identifier[get_thread] ( identifier[self] , identifier[email] , identifier[mailinglist] ):
literal[string]
identifier[subject_slug] = identifier[slugify] ( identifier[email] . identifier[subject_clean] )
identifier[thread] = identifier[self] . identifier[THREAD_CACHE] . identifier[get] ( identifier[subject_slug] ,{}). identifier[get] ( identifier[mailinglist] . identifier[id] )
keyword[if] identifier[thread] keyword[is] keyword[None] :
identifier[thread] = identifier[Thread] . identifier[all_objects] . identifier[get_or_create] (
identifier[mailinglist] = identifier[mailinglist] ,
identifier[subject_token] = identifier[subject_slug]
)[ literal[int] ]
keyword[if] identifier[self] . identifier[THREAD_CACHE] . identifier[get] ( identifier[subject_slug] ) keyword[is] keyword[None] :
identifier[self] . identifier[THREAD_CACHE] [ identifier[subject_slug] ]= identifier[dict] ()
identifier[self] . identifier[THREAD_CACHE] [ identifier[subject_slug] ][ identifier[mailinglist] . identifier[id] ]= identifier[thread]
identifier[thread] . identifier[latest_message] = identifier[email]
identifier[thread] . identifier[update_keywords] ()
identifier[thread] . identifier[save] ()
keyword[return] identifier[thread] | def get_thread(self, email, mailinglist):
"""Group messages by thread looking for similar subjects"""
subject_slug = slugify(email.subject_clean)
thread = self.THREAD_CACHE.get(subject_slug, {}).get(mailinglist.id)
if thread is None:
thread = Thread.all_objects.get_or_create(mailinglist=mailinglist, subject_token=subject_slug)[0]
if self.THREAD_CACHE.get(subject_slug) is None:
self.THREAD_CACHE[subject_slug] = dict() # depends on [control=['if'], data=[]]
self.THREAD_CACHE[subject_slug][mailinglist.id] = thread # depends on [control=['if'], data=['thread']]
thread.latest_message = email
thread.update_keywords()
thread.save()
return thread |
def post(self, endpoint, d, *args, **kwargs):
"""
**post**
Make a POST call to a remote endpoint
Input:
* An endpoint relative to the ``base_url``
* POST data
**NOTE**: Passed POST data will be automatically serialized to a JSON string
if it's not already a string
Output:
* A :py:mod:`pygett.request.GettResponse` object
"""
endpoint = self.base_url + endpoint
if not isinstance(d, str):
d = json.dumps(d)
return self._make_request(endpoint, type='POST', data=d) | def function[post, parameter[self, endpoint, d]]:
constant[
**post**
Make a POST call to a remote endpoint
Input:
* An endpoint relative to the ``base_url``
* POST data
**NOTE**: Passed POST data will be automatically serialized to a JSON string
if it's not already a string
Output:
* A :py:mod:`pygett.request.GettResponse` object
]
variable[endpoint] assign[=] binary_operation[name[self].base_url + name[endpoint]]
if <ast.UnaryOp object at 0x7da1b14d2710> begin[:]
variable[d] assign[=] call[name[json].dumps, parameter[name[d]]]
return[call[name[self]._make_request, parameter[name[endpoint]]]] | keyword[def] identifier[post] ( identifier[self] , identifier[endpoint] , identifier[d] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[endpoint] = identifier[self] . identifier[base_url] + identifier[endpoint]
keyword[if] keyword[not] identifier[isinstance] ( identifier[d] , identifier[str] ):
identifier[d] = identifier[json] . identifier[dumps] ( identifier[d] )
keyword[return] identifier[self] . identifier[_make_request] ( identifier[endpoint] , identifier[type] = literal[string] , identifier[data] = identifier[d] ) | def post(self, endpoint, d, *args, **kwargs):
"""
**post**
Make a POST call to a remote endpoint
Input:
* An endpoint relative to the ``base_url``
* POST data
**NOTE**: Passed POST data will be automatically serialized to a JSON string
if it's not already a string
Output:
* A :py:mod:`pygett.request.GettResponse` object
"""
endpoint = self.base_url + endpoint
if not isinstance(d, str):
d = json.dumps(d) # depends on [control=['if'], data=[]]
return self._make_request(endpoint, type='POST', data=d) |
def _get_bin(self, key):
'''
Returns a binned dictionary based on redis zscore
@return: The sorted dict
'''
# keys based on score
sortedDict = {}
# this doesnt return them in order, need to bin first
for item in self.redis_conn.zscan_iter(key):
my_item = ujson.loads(item[0])
# score is negated in redis
my_score = -item[1]
if my_score not in sortedDict:
sortedDict[my_score] = []
sortedDict[my_score].append(my_item)
return sortedDict | def function[_get_bin, parameter[self, key]]:
constant[
Returns a binned dictionary based on redis zscore
@return: The sorted dict
]
variable[sortedDict] assign[=] dictionary[[], []]
for taget[name[item]] in starred[call[name[self].redis_conn.zscan_iter, parameter[name[key]]]] begin[:]
variable[my_item] assign[=] call[name[ujson].loads, parameter[call[name[item]][constant[0]]]]
variable[my_score] assign[=] <ast.UnaryOp object at 0x7da1b1906da0>
if compare[name[my_score] <ast.NotIn object at 0x7da2590d7190> name[sortedDict]] begin[:]
call[name[sortedDict]][name[my_score]] assign[=] list[[]]
call[call[name[sortedDict]][name[my_score]].append, parameter[name[my_item]]]
return[name[sortedDict]] | keyword[def] identifier[_get_bin] ( identifier[self] , identifier[key] ):
literal[string]
identifier[sortedDict] ={}
keyword[for] identifier[item] keyword[in] identifier[self] . identifier[redis_conn] . identifier[zscan_iter] ( identifier[key] ):
identifier[my_item] = identifier[ujson] . identifier[loads] ( identifier[item] [ literal[int] ])
identifier[my_score] =- identifier[item] [ literal[int] ]
keyword[if] identifier[my_score] keyword[not] keyword[in] identifier[sortedDict] :
identifier[sortedDict] [ identifier[my_score] ]=[]
identifier[sortedDict] [ identifier[my_score] ]. identifier[append] ( identifier[my_item] )
keyword[return] identifier[sortedDict] | def _get_bin(self, key):
"""
Returns a binned dictionary based on redis zscore
@return: The sorted dict
"""
# keys based on score
sortedDict = {}
# this doesnt return them in order, need to bin first
for item in self.redis_conn.zscan_iter(key):
my_item = ujson.loads(item[0])
# score is negated in redis
my_score = -item[1]
if my_score not in sortedDict:
sortedDict[my_score] = [] # depends on [control=['if'], data=['my_score', 'sortedDict']]
sortedDict[my_score].append(my_item) # depends on [control=['for'], data=['item']]
return sortedDict |
def randurl(self):
""" -> a random url-like #str via :prop:randdomain, :prop:randtld,
and :prop:randpath
"""
return "{}://{}.{}/{}".format(
self.random.choice(("http", "https")),
self.randdomain, self.randtld, self.randpath) | def function[randurl, parameter[self]]:
constant[ -> a random url-like #str via :prop:randdomain, :prop:randtld,
and :prop:randpath
]
return[call[constant[{}://{}.{}/{}].format, parameter[call[name[self].random.choice, parameter[tuple[[<ast.Constant object at 0x7da20c76fd00>, <ast.Constant object at 0x7da20c76ead0>]]]], name[self].randdomain, name[self].randtld, name[self].randpath]]] | keyword[def] identifier[randurl] ( identifier[self] ):
literal[string]
keyword[return] literal[string] . identifier[format] (
identifier[self] . identifier[random] . identifier[choice] (( literal[string] , literal[string] )),
identifier[self] . identifier[randdomain] , identifier[self] . identifier[randtld] , identifier[self] . identifier[randpath] ) | def randurl(self):
""" -> a random url-like #str via :prop:randdomain, :prop:randtld,
and :prop:randpath
"""
return '{}://{}.{}/{}'.format(self.random.choice(('http', 'https')), self.randdomain, self.randtld, self.randpath) |
def relabel_atoms(self, start=1):
"""Relabels all Atoms in numerical order, offset by the start parameter.
Parameters
----------
start : int, optional
Defines an offset for the labelling.
"""
counter = start
for atom in self.get_atoms(ligands=True):
atom.id = counter
counter += 1
return | def function[relabel_atoms, parameter[self, start]]:
constant[Relabels all Atoms in numerical order, offset by the start parameter.
Parameters
----------
start : int, optional
Defines an offset for the labelling.
]
variable[counter] assign[=] name[start]
for taget[name[atom]] in starred[call[name[self].get_atoms, parameter[]]] begin[:]
name[atom].id assign[=] name[counter]
<ast.AugAssign object at 0x7da1b262ae60>
return[None] | keyword[def] identifier[relabel_atoms] ( identifier[self] , identifier[start] = literal[int] ):
literal[string]
identifier[counter] = identifier[start]
keyword[for] identifier[atom] keyword[in] identifier[self] . identifier[get_atoms] ( identifier[ligands] = keyword[True] ):
identifier[atom] . identifier[id] = identifier[counter]
identifier[counter] += literal[int]
keyword[return] | def relabel_atoms(self, start=1):
"""Relabels all Atoms in numerical order, offset by the start parameter.
Parameters
----------
start : int, optional
Defines an offset for the labelling.
"""
counter = start
for atom in self.get_atoms(ligands=True):
atom.id = counter
counter += 1 # depends on [control=['for'], data=['atom']]
return |
def _Struct_set_Poly(Poly, pos=None, extent=None, arrayorder='C',
Type='Tor', Clock=False):
""" Compute geometrical attributes of a Struct object """
# Make Poly closed, counter-clockwise, with '(cc,N)' layout and arrayorder
Poly = _GG.Poly_Order(Poly, order='C', Clock=False,
close=True, layout='(cc,N)', Test=True)
assert Poly.shape[0]==2, "Arg Poly must be a 2D polygon !"
fPfmt = np.ascontiguousarray if arrayorder=='C' else np.asfortranarray
# Get all remarkable points and moments
NP = Poly.shape[1]-1
P1Max = Poly[:,np.argmax(Poly[0,:])]
P1Min = Poly[:,np.argmin(Poly[0,:])]
P2Max = Poly[:,np.argmax(Poly[1,:])]
P2Min = Poly[:,np.argmin(Poly[1,:])]
BaryP = np.sum(Poly[:,:-1],axis=1,keepdims=False)/(Poly.shape[1]-1)
BaryL = np.array([(P1Max[0]+P1Min[0])/2., (P2Max[1]+P2Min[1])/2.])
TorP = plg.Polygon(Poly.T)
Surf = TorP.area()
BaryS = np.array(TorP.center()).flatten()
# Get lim-related indicators
noccur = int(pos.size)
Multi = noccur>1
# Get Tor-related quantities
if Type.lower()=='lin':
Vol, BaryV = None, None
else:
Vol, BaryV = _GG.Poly_VolAngTor(Poly)
msg = "Pb. with volume computation for Ves object of type 'Tor' !"
assert Vol>0., msg
# Compute the non-normalized vector of each side of the Poly
Vect = np.diff(Poly,n=1,axis=1)
Vect = fPfmt(Vect)
# Compute the normalised vectors directed inwards
Vin = np.array([Vect[1,:],-Vect[0,:]])
if not _GG.Poly_isClockwise(Poly):
Vin = -Vin
Vin = Vin/np.hypot(Vin[0,:],Vin[1,:])[np.newaxis,:]
Vin = fPfmt(Vin)
poly = _GG.Poly_Order(Poly, order=arrayorder, Clock=Clock,
close=False, layout='(cc,N)', Test=True)
# Get bounding circle
circC = BaryS
r = np.sqrt(np.sum((poly-circC[:,np.newaxis])**2,axis=0))
circr = np.max(r)
dout = {'Poly':poly, 'pos':pos, 'extent':extent,
'noccur':noccur, 'Multi':Multi, 'nP':NP,
'P1Max':P1Max, 'P1Min':P1Min, 'P2Max':P2Max, 'P2Min':P2Min,
'BaryP':BaryP, 'BaryL':BaryL, 'BaryS':BaryS, 'BaryV':BaryV,
'Surf':Surf, 'VolAng':Vol, 'Vect':Vect, 'VIn':Vin,
'circ-C':circC, 'circ-r':circr, 'Clock':Clock}
return dout | def function[_Struct_set_Poly, parameter[Poly, pos, extent, arrayorder, Type, Clock]]:
constant[ Compute geometrical attributes of a Struct object ]
variable[Poly] assign[=] call[name[_GG].Poly_Order, parameter[name[Poly]]]
assert[compare[call[name[Poly].shape][constant[0]] equal[==] constant[2]]]
variable[fPfmt] assign[=] <ast.IfExp object at 0x7da18fe93ac0>
variable[NP] assign[=] binary_operation[call[name[Poly].shape][constant[1]] - constant[1]]
variable[P1Max] assign[=] call[name[Poly]][tuple[[<ast.Slice object at 0x7da18fe90c10>, <ast.Call object at 0x7da18fe93e20>]]]
variable[P1Min] assign[=] call[name[Poly]][tuple[[<ast.Slice object at 0x7da18fe90fa0>, <ast.Call object at 0x7da18fe91ab0>]]]
variable[P2Max] assign[=] call[name[Poly]][tuple[[<ast.Slice object at 0x7da18fe92710>, <ast.Call object at 0x7da18fe917e0>]]]
variable[P2Min] assign[=] call[name[Poly]][tuple[[<ast.Slice object at 0x7da18fe92b60>, <ast.Call object at 0x7da18fe918a0>]]]
variable[BaryP] assign[=] binary_operation[call[name[np].sum, parameter[call[name[Poly]][tuple[[<ast.Slice object at 0x7da18fe92fb0>, <ast.Slice object at 0x7da18fe90430>]]]]] / binary_operation[call[name[Poly].shape][constant[1]] - constant[1]]]
variable[BaryL] assign[=] call[name[np].array, parameter[list[[<ast.BinOp object at 0x7da18fe93af0>, <ast.BinOp object at 0x7da18fe927d0>]]]]
variable[TorP] assign[=] call[name[plg].Polygon, parameter[name[Poly].T]]
variable[Surf] assign[=] call[name[TorP].area, parameter[]]
variable[BaryS] assign[=] call[call[name[np].array, parameter[call[name[TorP].center, parameter[]]]].flatten, parameter[]]
variable[noccur] assign[=] call[name[int], parameter[name[pos].size]]
variable[Multi] assign[=] compare[name[noccur] greater[>] constant[1]]
if compare[call[name[Type].lower, parameter[]] equal[==] constant[lin]] begin[:]
<ast.Tuple object at 0x7da18fe91090> assign[=] tuple[[<ast.Constant object at 0x7da18fe90760>, <ast.Constant object at 0x7da18fe925c0>]]
variable[Vect] assign[=] call[name[np].diff, parameter[name[Poly]]]
variable[Vect] assign[=] call[name[fPfmt], parameter[name[Vect]]]
variable[Vin] assign[=] call[name[np].array, parameter[list[[<ast.Subscript object at 0x7da18fe928f0>, <ast.UnaryOp object at 0x7da18fe91450>]]]]
if <ast.UnaryOp object at 0x7da1b0c0a8c0> begin[:]
variable[Vin] assign[=] <ast.UnaryOp object at 0x7da1b0c0a5c0>
variable[Vin] assign[=] binary_operation[name[Vin] / call[call[name[np].hypot, parameter[call[name[Vin]][tuple[[<ast.Constant object at 0x7da1b0c0b4c0>, <ast.Slice object at 0x7da1b0c09930>]]], call[name[Vin]][tuple[[<ast.Constant object at 0x7da1b0c09630>, <ast.Slice object at 0x7da1b0c08d90>]]]]]][tuple[[<ast.Attribute object at 0x7da1b0c0b610>, <ast.Slice object at 0x7da1b0c0b970>]]]]
variable[Vin] assign[=] call[name[fPfmt], parameter[name[Vin]]]
variable[poly] assign[=] call[name[_GG].Poly_Order, parameter[name[Poly]]]
variable[circC] assign[=] name[BaryS]
variable[r] assign[=] call[name[np].sqrt, parameter[call[name[np].sum, parameter[binary_operation[binary_operation[name[poly] - call[name[circC]][tuple[[<ast.Slice object at 0x7da1b0c09f60>, <ast.Attribute object at 0x7da1b0c0a110>]]]] ** constant[2]]]]]]
variable[circr] assign[=] call[name[np].max, parameter[name[r]]]
variable[dout] assign[=] dictionary[[<ast.Constant object at 0x7da2041d85e0>, <ast.Constant object at 0x7da2041db070>, <ast.Constant object at 0x7da2041d9f30>, <ast.Constant object at 0x7da2041dac80>, <ast.Constant object at 0x7da2041d85b0>, <ast.Constant object at 0x7da2041d8580>, <ast.Constant object at 0x7da2041d80a0>, <ast.Constant object at 0x7da2041db820>, <ast.Constant object at 0x7da2041daf50>, <ast.Constant object at 0x7da2041d99f0>, <ast.Constant object at 0x7da2041db520>, <ast.Constant object at 0x7da2041da6e0>, <ast.Constant object at 0x7da2041d94e0>, <ast.Constant object at 0x7da2041d8070>, <ast.Constant object at 0x7da2041daa40>, <ast.Constant object at 0x7da2041db370>, <ast.Constant object at 0x7da2041db400>, <ast.Constant object at 0x7da2041dbb80>, <ast.Constant object at 0x7da2041d9990>, <ast.Constant object at 0x7da2041dbeb0>, <ast.Constant object at 0x7da2041d8790>], [<ast.Name object at 0x7da2041d8640>, <ast.Name object at 0x7da2041d9960>, <ast.Name object at 0x7da2041d91b0>, <ast.Name object at 0x7da2041d9180>, <ast.Name object at 0x7da2041da3e0>, <ast.Name object at 0x7da2041d8fd0>, <ast.Name object at 0x7da2041d8100>, <ast.Name object at 0x7da2041d9210>, <ast.Name object at 0x7da2041d82e0>, <ast.Name object at 0x7da2041da7a0>, <ast.Name object at 0x7da2041d9510>, <ast.Name object at 0x7da2041db130>, <ast.Name object at 0x7da2041da8c0>, <ast.Name object at 0x7da2041dbdf0>, <ast.Name object at 0x7da2041dbc70>, <ast.Name object at 0x7da2041d99c0>, <ast.Name object at 0x7da2041d8940>, <ast.Name object at 0x7da2041da950>, <ast.Name object at 0x7da2041d8d60>, <ast.Name object at 0x7da2041d84f0>, <ast.Name object at 0x7da2041d8eb0>]]
return[name[dout]] | keyword[def] identifier[_Struct_set_Poly] ( identifier[Poly] , identifier[pos] = keyword[None] , identifier[extent] = keyword[None] , identifier[arrayorder] = literal[string] ,
identifier[Type] = literal[string] , identifier[Clock] = keyword[False] ):
literal[string]
identifier[Poly] = identifier[_GG] . identifier[Poly_Order] ( identifier[Poly] , identifier[order] = literal[string] , identifier[Clock] = keyword[False] ,
identifier[close] = keyword[True] , identifier[layout] = literal[string] , identifier[Test] = keyword[True] )
keyword[assert] identifier[Poly] . identifier[shape] [ literal[int] ]== literal[int] , literal[string]
identifier[fPfmt] = identifier[np] . identifier[ascontiguousarray] keyword[if] identifier[arrayorder] == literal[string] keyword[else] identifier[np] . identifier[asfortranarray]
identifier[NP] = identifier[Poly] . identifier[shape] [ literal[int] ]- literal[int]
identifier[P1Max] = identifier[Poly] [:, identifier[np] . identifier[argmax] ( identifier[Poly] [ literal[int] ,:])]
identifier[P1Min] = identifier[Poly] [:, identifier[np] . identifier[argmin] ( identifier[Poly] [ literal[int] ,:])]
identifier[P2Max] = identifier[Poly] [:, identifier[np] . identifier[argmax] ( identifier[Poly] [ literal[int] ,:])]
identifier[P2Min] = identifier[Poly] [:, identifier[np] . identifier[argmin] ( identifier[Poly] [ literal[int] ,:])]
identifier[BaryP] = identifier[np] . identifier[sum] ( identifier[Poly] [:,:- literal[int] ], identifier[axis] = literal[int] , identifier[keepdims] = keyword[False] )/( identifier[Poly] . identifier[shape] [ literal[int] ]- literal[int] )
identifier[BaryL] = identifier[np] . identifier[array] ([( identifier[P1Max] [ literal[int] ]+ identifier[P1Min] [ literal[int] ])/ literal[int] ,( identifier[P2Max] [ literal[int] ]+ identifier[P2Min] [ literal[int] ])/ literal[int] ])
identifier[TorP] = identifier[plg] . identifier[Polygon] ( identifier[Poly] . identifier[T] )
identifier[Surf] = identifier[TorP] . identifier[area] ()
identifier[BaryS] = identifier[np] . identifier[array] ( identifier[TorP] . identifier[center] ()). identifier[flatten] ()
identifier[noccur] = identifier[int] ( identifier[pos] . identifier[size] )
identifier[Multi] = identifier[noccur] > literal[int]
keyword[if] identifier[Type] . identifier[lower] ()== literal[string] :
identifier[Vol] , identifier[BaryV] = keyword[None] , keyword[None]
keyword[else] :
identifier[Vol] , identifier[BaryV] = identifier[_GG] . identifier[Poly_VolAngTor] ( identifier[Poly] )
identifier[msg] = literal[string]
keyword[assert] identifier[Vol] > literal[int] , identifier[msg]
identifier[Vect] = identifier[np] . identifier[diff] ( identifier[Poly] , identifier[n] = literal[int] , identifier[axis] = literal[int] )
identifier[Vect] = identifier[fPfmt] ( identifier[Vect] )
identifier[Vin] = identifier[np] . identifier[array] ([ identifier[Vect] [ literal[int] ,:],- identifier[Vect] [ literal[int] ,:]])
keyword[if] keyword[not] identifier[_GG] . identifier[Poly_isClockwise] ( identifier[Poly] ):
identifier[Vin] =- identifier[Vin]
identifier[Vin] = identifier[Vin] / identifier[np] . identifier[hypot] ( identifier[Vin] [ literal[int] ,:], identifier[Vin] [ literal[int] ,:])[ identifier[np] . identifier[newaxis] ,:]
identifier[Vin] = identifier[fPfmt] ( identifier[Vin] )
identifier[poly] = identifier[_GG] . identifier[Poly_Order] ( identifier[Poly] , identifier[order] = identifier[arrayorder] , identifier[Clock] = identifier[Clock] ,
identifier[close] = keyword[False] , identifier[layout] = literal[string] , identifier[Test] = keyword[True] )
identifier[circC] = identifier[BaryS]
identifier[r] = identifier[np] . identifier[sqrt] ( identifier[np] . identifier[sum] (( identifier[poly] - identifier[circC] [:, identifier[np] . identifier[newaxis] ])** literal[int] , identifier[axis] = literal[int] ))
identifier[circr] = identifier[np] . identifier[max] ( identifier[r] )
identifier[dout] ={ literal[string] : identifier[poly] , literal[string] : identifier[pos] , literal[string] : identifier[extent] ,
literal[string] : identifier[noccur] , literal[string] : identifier[Multi] , literal[string] : identifier[NP] ,
literal[string] : identifier[P1Max] , literal[string] : identifier[P1Min] , literal[string] : identifier[P2Max] , literal[string] : identifier[P2Min] ,
literal[string] : identifier[BaryP] , literal[string] : identifier[BaryL] , literal[string] : identifier[BaryS] , literal[string] : identifier[BaryV] ,
literal[string] : identifier[Surf] , literal[string] : identifier[Vol] , literal[string] : identifier[Vect] , literal[string] : identifier[Vin] ,
literal[string] : identifier[circC] , literal[string] : identifier[circr] , literal[string] : identifier[Clock] }
keyword[return] identifier[dout] | def _Struct_set_Poly(Poly, pos=None, extent=None, arrayorder='C', Type='Tor', Clock=False):
""" Compute geometrical attributes of a Struct object """
# Make Poly closed, counter-clockwise, with '(cc,N)' layout and arrayorder
Poly = _GG.Poly_Order(Poly, order='C', Clock=False, close=True, layout='(cc,N)', Test=True)
assert Poly.shape[0] == 2, 'Arg Poly must be a 2D polygon !'
fPfmt = np.ascontiguousarray if arrayorder == 'C' else np.asfortranarray
# Get all remarkable points and moments
NP = Poly.shape[1] - 1
P1Max = Poly[:, np.argmax(Poly[0, :])]
P1Min = Poly[:, np.argmin(Poly[0, :])]
P2Max = Poly[:, np.argmax(Poly[1, :])]
P2Min = Poly[:, np.argmin(Poly[1, :])]
BaryP = np.sum(Poly[:, :-1], axis=1, keepdims=False) / (Poly.shape[1] - 1)
BaryL = np.array([(P1Max[0] + P1Min[0]) / 2.0, (P2Max[1] + P2Min[1]) / 2.0])
TorP = plg.Polygon(Poly.T)
Surf = TorP.area()
BaryS = np.array(TorP.center()).flatten()
# Get lim-related indicators
noccur = int(pos.size)
Multi = noccur > 1
# Get Tor-related quantities
if Type.lower() == 'lin':
(Vol, BaryV) = (None, None) # depends on [control=['if'], data=[]]
else:
(Vol, BaryV) = _GG.Poly_VolAngTor(Poly)
msg = "Pb. with volume computation for Ves object of type 'Tor' !"
assert Vol > 0.0, msg
# Compute the non-normalized vector of each side of the Poly
Vect = np.diff(Poly, n=1, axis=1)
Vect = fPfmt(Vect)
# Compute the normalised vectors directed inwards
Vin = np.array([Vect[1, :], -Vect[0, :]])
if not _GG.Poly_isClockwise(Poly):
Vin = -Vin # depends on [control=['if'], data=[]]
Vin = Vin / np.hypot(Vin[0, :], Vin[1, :])[np.newaxis, :]
Vin = fPfmt(Vin)
poly = _GG.Poly_Order(Poly, order=arrayorder, Clock=Clock, close=False, layout='(cc,N)', Test=True)
# Get bounding circle
circC = BaryS
r = np.sqrt(np.sum((poly - circC[:, np.newaxis]) ** 2, axis=0))
circr = np.max(r)
dout = {'Poly': poly, 'pos': pos, 'extent': extent, 'noccur': noccur, 'Multi': Multi, 'nP': NP, 'P1Max': P1Max, 'P1Min': P1Min, 'P2Max': P2Max, 'P2Min': P2Min, 'BaryP': BaryP, 'BaryL': BaryL, 'BaryS': BaryS, 'BaryV': BaryV, 'Surf': Surf, 'VolAng': Vol, 'Vect': Vect, 'VIn': Vin, 'circ-C': circC, 'circ-r': circr, 'Clock': Clock}
return dout |
def _project_perturbation(perturbation, epsilon, input_image, clip_min=None,
clip_max=None):
"""Project `perturbation` onto L-infinity ball of radius `epsilon`.
Also project into hypercube such that the resulting adversarial example
is between clip_min and clip_max, if applicable.
"""
if clip_min is None or clip_max is None:
raise NotImplementedError("_project_perturbation currently has clipping "
"hard-coded in.")
# Ensure inputs are in the correct range
with tf.control_dependencies([
utils_tf.assert_less_equal(input_image,
tf.cast(clip_max, input_image.dtype)),
utils_tf.assert_greater_equal(input_image,
tf.cast(clip_min, input_image.dtype))
]):
clipped_perturbation = utils_tf.clip_by_value(
perturbation, -epsilon, epsilon)
new_image = utils_tf.clip_by_value(
input_image + clipped_perturbation, clip_min, clip_max)
return new_image - input_image | def function[_project_perturbation, parameter[perturbation, epsilon, input_image, clip_min, clip_max]]:
constant[Project `perturbation` onto L-infinity ball of radius `epsilon`.
Also project into hypercube such that the resulting adversarial example
is between clip_min and clip_max, if applicable.
]
if <ast.BoolOp object at 0x7da18dc990f0> begin[:]
<ast.Raise object at 0x7da18dc98670>
with call[name[tf].control_dependencies, parameter[list[[<ast.Call object at 0x7da18dc98280>, <ast.Call object at 0x7da18dc9ab30>]]]] begin[:]
variable[clipped_perturbation] assign[=] call[name[utils_tf].clip_by_value, parameter[name[perturbation], <ast.UnaryOp object at 0x7da18dc98fd0>, name[epsilon]]]
variable[new_image] assign[=] call[name[utils_tf].clip_by_value, parameter[binary_operation[name[input_image] + name[clipped_perturbation]], name[clip_min], name[clip_max]]]
return[binary_operation[name[new_image] - name[input_image]]] | keyword[def] identifier[_project_perturbation] ( identifier[perturbation] , identifier[epsilon] , identifier[input_image] , identifier[clip_min] = keyword[None] ,
identifier[clip_max] = keyword[None] ):
literal[string]
keyword[if] identifier[clip_min] keyword[is] keyword[None] keyword[or] identifier[clip_max] keyword[is] keyword[None] :
keyword[raise] identifier[NotImplementedError] ( literal[string]
literal[string] )
keyword[with] identifier[tf] . identifier[control_dependencies] ([
identifier[utils_tf] . identifier[assert_less_equal] ( identifier[input_image] ,
identifier[tf] . identifier[cast] ( identifier[clip_max] , identifier[input_image] . identifier[dtype] )),
identifier[utils_tf] . identifier[assert_greater_equal] ( identifier[input_image] ,
identifier[tf] . identifier[cast] ( identifier[clip_min] , identifier[input_image] . identifier[dtype] ))
]):
identifier[clipped_perturbation] = identifier[utils_tf] . identifier[clip_by_value] (
identifier[perturbation] ,- identifier[epsilon] , identifier[epsilon] )
identifier[new_image] = identifier[utils_tf] . identifier[clip_by_value] (
identifier[input_image] + identifier[clipped_perturbation] , identifier[clip_min] , identifier[clip_max] )
keyword[return] identifier[new_image] - identifier[input_image] | def _project_perturbation(perturbation, epsilon, input_image, clip_min=None, clip_max=None):
"""Project `perturbation` onto L-infinity ball of radius `epsilon`.
Also project into hypercube such that the resulting adversarial example
is between clip_min and clip_max, if applicable.
"""
if clip_min is None or clip_max is None:
raise NotImplementedError('_project_perturbation currently has clipping hard-coded in.') # depends on [control=['if'], data=[]]
# Ensure inputs are in the correct range
with tf.control_dependencies([utils_tf.assert_less_equal(input_image, tf.cast(clip_max, input_image.dtype)), utils_tf.assert_greater_equal(input_image, tf.cast(clip_min, input_image.dtype))]):
clipped_perturbation = utils_tf.clip_by_value(perturbation, -epsilon, epsilon)
new_image = utils_tf.clip_by_value(input_image + clipped_perturbation, clip_min, clip_max)
return new_image - input_image # depends on [control=['with'], data=[]] |
def next_sequence_id(session, sequence_ids, parent_vid, table_class, force_query = False):
"""
Return the next sequence id for a object, identified by the vid of the parent object, and the database prefix
for the child object. On the first call, will load the max sequence number
from the database, but subsequence calls will run in process, so this isn't suitable for
multi-process operation -- all of the tables in a dataset should be created by one process
The child table must have a sequence_id value.
:param session: Database session or connection ( must have an execute() method )
:param sequence_ids: A dict for caching sequence ids
:param parent_vid: The VID of the parent object, which sets the namespace for the sequence
:param table_class: Table class of the child object, the one getting a number
:return:
"""
from sqlalchemy import text
seq_col = table_class.sequence_id.property.columns[0].name
try:
parent_col = table_class._parent_col
except AttributeError:
parent_col = table_class.d_vid.property.columns[0].name
assert bool(parent_vid)
key = (parent_vid, table_class.__name__)
number = sequence_ids.get(key, None)
if (not number and session) or force_query:
sql = text("SELECT max({seq_col})+1 FROM {table} WHERE {parent_col} = '{vid}'"
.format(table=table_class.__tablename__, parent_col=parent_col,
seq_col=seq_col, vid=parent_vid))
max_id, = session.execute(sql).fetchone()
if not max_id:
max_id = 1
sequence_ids[key] = int(max_id)
elif not session:
# There was no session set. This should only happen when the parent object is new, and therefore,
# there are no child number, so the appropriate starting number is 1. If the object is not new,
# there will be conflicts.
sequence_ids[key] = 1
else:
# There were no previous numbers, so start with 1
sequence_ids[key] += 1
return sequence_ids[key] | def function[next_sequence_id, parameter[session, sequence_ids, parent_vid, table_class, force_query]]:
constant[
Return the next sequence id for a object, identified by the vid of the parent object, and the database prefix
for the child object. On the first call, will load the max sequence number
from the database, but subsequence calls will run in process, so this isn't suitable for
multi-process operation -- all of the tables in a dataset should be created by one process
The child table must have a sequence_id value.
:param session: Database session or connection ( must have an execute() method )
:param sequence_ids: A dict for caching sequence ids
:param parent_vid: The VID of the parent object, which sets the namespace for the sequence
:param table_class: Table class of the child object, the one getting a number
:return:
]
from relative_module[sqlalchemy] import module[text]
variable[seq_col] assign[=] call[name[table_class].sequence_id.property.columns][constant[0]].name
<ast.Try object at 0x7da18dc041f0>
assert[call[name[bool], parameter[name[parent_vid]]]]
variable[key] assign[=] tuple[[<ast.Name object at 0x7da20e963160>, <ast.Attribute object at 0x7da20e963370>]]
variable[number] assign[=] call[name[sequence_ids].get, parameter[name[key], constant[None]]]
if <ast.BoolOp object at 0x7da20e961b40> begin[:]
variable[sql] assign[=] call[name[text], parameter[call[constant[SELECT max({seq_col})+1 FROM {table} WHERE {parent_col} = '{vid}'].format, parameter[]]]]
<ast.Tuple object at 0x7da20e963dc0> assign[=] call[call[name[session].execute, parameter[name[sql]]].fetchone, parameter[]]
if <ast.UnaryOp object at 0x7da18f58faf0> begin[:]
variable[max_id] assign[=] constant[1]
call[name[sequence_ids]][name[key]] assign[=] call[name[int], parameter[name[max_id]]]
return[call[name[sequence_ids]][name[key]]] | keyword[def] identifier[next_sequence_id] ( identifier[session] , identifier[sequence_ids] , identifier[parent_vid] , identifier[table_class] , identifier[force_query] = keyword[False] ):
literal[string]
keyword[from] identifier[sqlalchemy] keyword[import] identifier[text]
identifier[seq_col] = identifier[table_class] . identifier[sequence_id] . identifier[property] . identifier[columns] [ literal[int] ]. identifier[name]
keyword[try] :
identifier[parent_col] = identifier[table_class] . identifier[_parent_col]
keyword[except] identifier[AttributeError] :
identifier[parent_col] = identifier[table_class] . identifier[d_vid] . identifier[property] . identifier[columns] [ literal[int] ]. identifier[name]
keyword[assert] identifier[bool] ( identifier[parent_vid] )
identifier[key] =( identifier[parent_vid] , identifier[table_class] . identifier[__name__] )
identifier[number] = identifier[sequence_ids] . identifier[get] ( identifier[key] , keyword[None] )
keyword[if] ( keyword[not] identifier[number] keyword[and] identifier[session] ) keyword[or] identifier[force_query] :
identifier[sql] = identifier[text] ( literal[string]
. identifier[format] ( identifier[table] = identifier[table_class] . identifier[__tablename__] , identifier[parent_col] = identifier[parent_col] ,
identifier[seq_col] = identifier[seq_col] , identifier[vid] = identifier[parent_vid] ))
identifier[max_id] ,= identifier[session] . identifier[execute] ( identifier[sql] ). identifier[fetchone] ()
keyword[if] keyword[not] identifier[max_id] :
identifier[max_id] = literal[int]
identifier[sequence_ids] [ identifier[key] ]= identifier[int] ( identifier[max_id] )
keyword[elif] keyword[not] identifier[session] :
identifier[sequence_ids] [ identifier[key] ]= literal[int]
keyword[else] :
identifier[sequence_ids] [ identifier[key] ]+= literal[int]
keyword[return] identifier[sequence_ids] [ identifier[key] ] | def next_sequence_id(session, sequence_ids, parent_vid, table_class, force_query=False):
"""
Return the next sequence id for a object, identified by the vid of the parent object, and the database prefix
for the child object. On the first call, will load the max sequence number
from the database, but subsequence calls will run in process, so this isn't suitable for
multi-process operation -- all of the tables in a dataset should be created by one process
The child table must have a sequence_id value.
:param session: Database session or connection ( must have an execute() method )
:param sequence_ids: A dict for caching sequence ids
:param parent_vid: The VID of the parent object, which sets the namespace for the sequence
:param table_class: Table class of the child object, the one getting a number
:return:
"""
from sqlalchemy import text
seq_col = table_class.sequence_id.property.columns[0].name
try:
parent_col = table_class._parent_col # depends on [control=['try'], data=[]]
except AttributeError:
parent_col = table_class.d_vid.property.columns[0].name # depends on [control=['except'], data=[]]
assert bool(parent_vid)
key = (parent_vid, table_class.__name__)
number = sequence_ids.get(key, None)
if not number and session or force_query:
sql = text("SELECT max({seq_col})+1 FROM {table} WHERE {parent_col} = '{vid}'".format(table=table_class.__tablename__, parent_col=parent_col, seq_col=seq_col, vid=parent_vid))
(max_id,) = session.execute(sql).fetchone()
if not max_id:
max_id = 1 # depends on [control=['if'], data=[]]
sequence_ids[key] = int(max_id) # depends on [control=['if'], data=[]]
elif not session:
# There was no session set. This should only happen when the parent object is new, and therefore,
# there are no child number, so the appropriate starting number is 1. If the object is not new,
# there will be conflicts.
sequence_ids[key] = 1 # depends on [control=['if'], data=[]]
else:
# There were no previous numbers, so start with 1
sequence_ids[key] += 1
return sequence_ids[key] |
def api_routes(self, callsign: str) -> Tuple[Airport, ...]:
"""Returns the route associated to a callsign."""
from .. import airports
c = requests.get(
f"https://opensky-network.org/api/routes?callsign={callsign}"
)
if c.status_code == 404:
raise ValueError("Unknown callsign")
if c.status_code != 200:
raise ValueError(c.content.decode())
json = c.json()
return tuple(airports[a] for a in json["route"]) | def function[api_routes, parameter[self, callsign]]:
constant[Returns the route associated to a callsign.]
from relative_module[None] import module[airports]
variable[c] assign[=] call[name[requests].get, parameter[<ast.JoinedStr object at 0x7da204346a10>]]
if compare[name[c].status_code equal[==] constant[404]] begin[:]
<ast.Raise object at 0x7da204345ab0>
if compare[name[c].status_code not_equal[!=] constant[200]] begin[:]
<ast.Raise object at 0x7da204346110>
variable[json] assign[=] call[name[c].json, parameter[]]
return[call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da204344b20>]]] | keyword[def] identifier[api_routes] ( identifier[self] , identifier[callsign] : identifier[str] )-> identifier[Tuple] [ identifier[Airport] ,...]:
literal[string]
keyword[from] .. keyword[import] identifier[airports]
identifier[c] = identifier[requests] . identifier[get] (
literal[string]
)
keyword[if] identifier[c] . identifier[status_code] == literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[c] . identifier[status_code] != literal[int] :
keyword[raise] identifier[ValueError] ( identifier[c] . identifier[content] . identifier[decode] ())
identifier[json] = identifier[c] . identifier[json] ()
keyword[return] identifier[tuple] ( identifier[airports] [ identifier[a] ] keyword[for] identifier[a] keyword[in] identifier[json] [ literal[string] ]) | def api_routes(self, callsign: str) -> Tuple[Airport, ...]:
"""Returns the route associated to a callsign."""
from .. import airports
c = requests.get(f'https://opensky-network.org/api/routes?callsign={callsign}')
if c.status_code == 404:
raise ValueError('Unknown callsign') # depends on [control=['if'], data=[]]
if c.status_code != 200:
raise ValueError(c.content.decode()) # depends on [control=['if'], data=[]]
json = c.json()
return tuple((airports[a] for a in json['route'])) |
def GetFormatStringAttributeNames(self):
"""Retrieves the attribute names in the format string.
Returns:
set(str): attribute names.
"""
if self._format_string_attribute_names is None:
self._format_string_attribute_names = []
for format_string_piece in self.FORMAT_STRING_PIECES:
attribute_names = self._FORMAT_STRING_ATTRIBUTE_NAME_RE.findall(
format_string_piece)
if attribute_names:
self._format_string_attribute_names.extend(attribute_names)
return set(self._format_string_attribute_names) | def function[GetFormatStringAttributeNames, parameter[self]]:
constant[Retrieves the attribute names in the format string.
Returns:
set(str): attribute names.
]
if compare[name[self]._format_string_attribute_names is constant[None]] begin[:]
name[self]._format_string_attribute_names assign[=] list[[]]
for taget[name[format_string_piece]] in starred[name[self].FORMAT_STRING_PIECES] begin[:]
variable[attribute_names] assign[=] call[name[self]._FORMAT_STRING_ATTRIBUTE_NAME_RE.findall, parameter[name[format_string_piece]]]
if name[attribute_names] begin[:]
call[name[self]._format_string_attribute_names.extend, parameter[name[attribute_names]]]
return[call[name[set], parameter[name[self]._format_string_attribute_names]]] | keyword[def] identifier[GetFormatStringAttributeNames] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_format_string_attribute_names] keyword[is] keyword[None] :
identifier[self] . identifier[_format_string_attribute_names] =[]
keyword[for] identifier[format_string_piece] keyword[in] identifier[self] . identifier[FORMAT_STRING_PIECES] :
identifier[attribute_names] = identifier[self] . identifier[_FORMAT_STRING_ATTRIBUTE_NAME_RE] . identifier[findall] (
identifier[format_string_piece] )
keyword[if] identifier[attribute_names] :
identifier[self] . identifier[_format_string_attribute_names] . identifier[extend] ( identifier[attribute_names] )
keyword[return] identifier[set] ( identifier[self] . identifier[_format_string_attribute_names] ) | def GetFormatStringAttributeNames(self):
"""Retrieves the attribute names in the format string.
Returns:
set(str): attribute names.
"""
if self._format_string_attribute_names is None:
self._format_string_attribute_names = []
for format_string_piece in self.FORMAT_STRING_PIECES:
attribute_names = self._FORMAT_STRING_ATTRIBUTE_NAME_RE.findall(format_string_piece)
if attribute_names:
self._format_string_attribute_names.extend(attribute_names) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['format_string_piece']] # depends on [control=['if'], data=[]]
return set(self._format_string_attribute_names) |
def haversine(self, other):
"""
Calculate the great circle distance between two points
on the earth (specified in decimal degrees)
"""
# convert decimal degrees to radians
lon1, lat1, lon2, lat2 = map(radians,\
[self.lon.low, self['lat'].low, other.lon.low,
other['lat'].low])
# haversine formula
dlon = lon2 - lon1
dlat = lat2 - lat1
a = sin(dlat/2)**2 + cos(lat1) * cos(lat2) * sin(dlon/2) ** 2
c = 2 * asin(sqrt(a))
# multiply by radius of the earth
# km = 6367 * c
miles = 3961 * c
return miles | def function[haversine, parameter[self, other]]:
constant[
Calculate the great circle distance between two points
on the earth (specified in decimal degrees)
]
<ast.Tuple object at 0x7da1b13d5d50> assign[=] call[name[map], parameter[name[radians], list[[<ast.Attribute object at 0x7da1b13d5810>, <ast.Attribute object at 0x7da1b13d6980>, <ast.Attribute object at 0x7da1b13d5a50>, <ast.Attribute object at 0x7da1b13d4220>]]]]
variable[dlon] assign[=] binary_operation[name[lon2] - name[lon1]]
variable[dlat] assign[=] binary_operation[name[lat2] - name[lat1]]
variable[a] assign[=] binary_operation[binary_operation[call[name[sin], parameter[binary_operation[name[dlat] / constant[2]]]] ** constant[2]] + binary_operation[binary_operation[call[name[cos], parameter[name[lat1]]] * call[name[cos], parameter[name[lat2]]]] * binary_operation[call[name[sin], parameter[binary_operation[name[dlon] / constant[2]]]] ** constant[2]]]]
variable[c] assign[=] binary_operation[constant[2] * call[name[asin], parameter[call[name[sqrt], parameter[name[a]]]]]]
variable[miles] assign[=] binary_operation[constant[3961] * name[c]]
return[name[miles]] | keyword[def] identifier[haversine] ( identifier[self] , identifier[other] ):
literal[string]
identifier[lon1] , identifier[lat1] , identifier[lon2] , identifier[lat2] = identifier[map] ( identifier[radians] ,[ identifier[self] . identifier[lon] . identifier[low] , identifier[self] [ literal[string] ]. identifier[low] , identifier[other] . identifier[lon] . identifier[low] ,
identifier[other] [ literal[string] ]. identifier[low] ])
identifier[dlon] = identifier[lon2] - identifier[lon1]
identifier[dlat] = identifier[lat2] - identifier[lat1]
identifier[a] = identifier[sin] ( identifier[dlat] / literal[int] )** literal[int] + identifier[cos] ( identifier[lat1] )* identifier[cos] ( identifier[lat2] )* identifier[sin] ( identifier[dlon] / literal[int] )** literal[int]
identifier[c] = literal[int] * identifier[asin] ( identifier[sqrt] ( identifier[a] ))
identifier[miles] = literal[int] * identifier[c]
keyword[return] identifier[miles] | def haversine(self, other):
"""
Calculate the great circle distance between two points
on the earth (specified in decimal degrees)
"""
# convert decimal degrees to radians
(lon1, lat1, lon2, lat2) = map(radians, [self.lon.low, self['lat'].low, other.lon.low, other['lat'].low])
# haversine formula
dlon = lon2 - lon1
dlat = lat2 - lat1
a = sin(dlat / 2) ** 2 + cos(lat1) * cos(lat2) * sin(dlon / 2) ** 2
c = 2 * asin(sqrt(a))
# multiply by radius of the earth
# km = 6367 * c
miles = 3961 * c
return miles |
def delete(self, option=None):
"""Delete the current document in the Firestore database.
Args:
option (Optional[~.firestore_v1beta1.client.WriteOption]): A
write option to make assertions / preconditions on the server
state of the document before applying changes.
Returns:
google.protobuf.timestamp_pb2.Timestamp: The time that the delete
request was received by the server. If the document did not exist
when the delete was sent (i.e. nothing was deleted), this method
will still succeed and will still return the time that the
request was received by the server.
"""
write_pb = _helpers.pb_for_delete(self._document_path, option)
commit_response = self._client._firestore_api.commit(
self._client._database_string,
[write_pb],
transaction=None,
metadata=self._client._rpc_metadata,
)
return commit_response.commit_time | def function[delete, parameter[self, option]]:
constant[Delete the current document in the Firestore database.
Args:
option (Optional[~.firestore_v1beta1.client.WriteOption]): A
write option to make assertions / preconditions on the server
state of the document before applying changes.
Returns:
google.protobuf.timestamp_pb2.Timestamp: The time that the delete
request was received by the server. If the document did not exist
when the delete was sent (i.e. nothing was deleted), this method
will still succeed and will still return the time that the
request was received by the server.
]
variable[write_pb] assign[=] call[name[_helpers].pb_for_delete, parameter[name[self]._document_path, name[option]]]
variable[commit_response] assign[=] call[name[self]._client._firestore_api.commit, parameter[name[self]._client._database_string, list[[<ast.Name object at 0x7da2047e90f0>]]]]
return[name[commit_response].commit_time] | keyword[def] identifier[delete] ( identifier[self] , identifier[option] = keyword[None] ):
literal[string]
identifier[write_pb] = identifier[_helpers] . identifier[pb_for_delete] ( identifier[self] . identifier[_document_path] , identifier[option] )
identifier[commit_response] = identifier[self] . identifier[_client] . identifier[_firestore_api] . identifier[commit] (
identifier[self] . identifier[_client] . identifier[_database_string] ,
[ identifier[write_pb] ],
identifier[transaction] = keyword[None] ,
identifier[metadata] = identifier[self] . identifier[_client] . identifier[_rpc_metadata] ,
)
keyword[return] identifier[commit_response] . identifier[commit_time] | def delete(self, option=None):
"""Delete the current document in the Firestore database.
Args:
option (Optional[~.firestore_v1beta1.client.WriteOption]): A
write option to make assertions / preconditions on the server
state of the document before applying changes.
Returns:
google.protobuf.timestamp_pb2.Timestamp: The time that the delete
request was received by the server. If the document did not exist
when the delete was sent (i.e. nothing was deleted), this method
will still succeed and will still return the time that the
request was received by the server.
"""
write_pb = _helpers.pb_for_delete(self._document_path, option)
commit_response = self._client._firestore_api.commit(self._client._database_string, [write_pb], transaction=None, metadata=self._client._rpc_metadata)
return commit_response.commit_time |
def execute_request(server_url, creds, namespace, classname):
""" Open a connection with the server_url and creds, and
enumerate instances defined by the functions namespace and
classname arguments.
Displays either the error return or the mof for instances
returned.
"""
print('Requesting url=%s, ns=%s, class=%s' % \
(server_url, namespace, classname))
try:
# Create a connection
CONN = WBEMConnection(server_url, creds,
default_namespace=namespace,
no_verification=True)
#Issue the request to EnumerateInstances on the defined class
INSTANCES = CONN.EnumerateInstances(classname)
#Display of characteristics of the result object
print('instances type=%s len=%s' % (type(INSTANCES),
len(INSTANCES)))
#display the mof output
for inst in INSTANCES:
print('path=%s\n' % inst.path)
print(inst.tomof())
# handle any exception
except Error as err:
# If CIMError, display CIMError attributes
if isinstance(err, CIMError):
print('Operation Failed: CIMError: code=%s, Description=%s' % \
(err.status_code_name, err.status_description))
else:
print ("Operation failed: %s" % err)
sys.exit(1) | def function[execute_request, parameter[server_url, creds, namespace, classname]]:
constant[ Open a connection with the server_url and creds, and
enumerate instances defined by the functions namespace and
classname arguments.
Displays either the error return or the mof for instances
returned.
]
call[name[print], parameter[binary_operation[constant[Requesting url=%s, ns=%s, class=%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da204344880>, <ast.Name object at 0x7da2043475b0>, <ast.Name object at 0x7da204344190>]]]]]
<ast.Try object at 0x7da204345960> | keyword[def] identifier[execute_request] ( identifier[server_url] , identifier[creds] , identifier[namespace] , identifier[classname] ):
literal[string]
identifier[print] ( literal[string] %( identifier[server_url] , identifier[namespace] , identifier[classname] ))
keyword[try] :
identifier[CONN] = identifier[WBEMConnection] ( identifier[server_url] , identifier[creds] ,
identifier[default_namespace] = identifier[namespace] ,
identifier[no_verification] = keyword[True] )
identifier[INSTANCES] = identifier[CONN] . identifier[EnumerateInstances] ( identifier[classname] )
identifier[print] ( literal[string] %( identifier[type] ( identifier[INSTANCES] ),
identifier[len] ( identifier[INSTANCES] )))
keyword[for] identifier[inst] keyword[in] identifier[INSTANCES] :
identifier[print] ( literal[string] % identifier[inst] . identifier[path] )
identifier[print] ( identifier[inst] . identifier[tomof] ())
keyword[except] identifier[Error] keyword[as] identifier[err] :
keyword[if] identifier[isinstance] ( identifier[err] , identifier[CIMError] ):
identifier[print] ( literal[string] %( identifier[err] . identifier[status_code_name] , identifier[err] . identifier[status_description] ))
keyword[else] :
identifier[print] ( literal[string] % identifier[err] )
identifier[sys] . identifier[exit] ( literal[int] ) | def execute_request(server_url, creds, namespace, classname):
""" Open a connection with the server_url and creds, and
enumerate instances defined by the functions namespace and
classname arguments.
Displays either the error return or the mof for instances
returned.
"""
print('Requesting url=%s, ns=%s, class=%s' % (server_url, namespace, classname))
try:
# Create a connection
CONN = WBEMConnection(server_url, creds, default_namespace=namespace, no_verification=True)
#Issue the request to EnumerateInstances on the defined class
INSTANCES = CONN.EnumerateInstances(classname)
#Display of characteristics of the result object
print('instances type=%s len=%s' % (type(INSTANCES), len(INSTANCES)))
#display the mof output
for inst in INSTANCES:
print('path=%s\n' % inst.path)
print(inst.tomof()) # depends on [control=['for'], data=['inst']] # depends on [control=['try'], data=[]]
# handle any exception
except Error as err:
# If CIMError, display CIMError attributes
if isinstance(err, CIMError):
print('Operation Failed: CIMError: code=%s, Description=%s' % (err.status_code_name, err.status_description)) # depends on [control=['if'], data=[]]
else:
print('Operation failed: %s' % err)
sys.exit(1) # depends on [control=['except'], data=['err']] |
def get_model(self):
"""
Returns the fitted bayesian model
Example
----------
>>> from pgmpy.readwrite import BIFReader
>>> reader = BIFReader("bif_test.bif")
>>> reader.get_model()
<pgmpy.models.BayesianModel.BayesianModel object at 0x7f20af154320>
"""
try:
model = BayesianModel()
model.add_nodes_from(self.variable_names)
model.add_edges_from(self.variable_edges)
model.name = self.network_name
tabular_cpds = []
for var in sorted(self.variable_cpds.keys()):
values = self.variable_cpds[var]
cpd = TabularCPD(var, len(self.variable_states[var]), values,
evidence=self.variable_parents[var],
evidence_card=[len(self.variable_states[evidence_var])
for evidence_var in self.variable_parents[var]])
tabular_cpds.append(cpd)
model.add_cpds(*tabular_cpds)
for node, properties in self.variable_properties.items():
for prop in properties:
prop_name, prop_value = map(lambda t: t.strip(), prop.split('='))
model.node[node][prop_name] = prop_value
return model
except AttributeError:
raise AttributeError('First get states of variables, edges, parents and network name') | def function[get_model, parameter[self]]:
constant[
Returns the fitted bayesian model
Example
----------
>>> from pgmpy.readwrite import BIFReader
>>> reader = BIFReader("bif_test.bif")
>>> reader.get_model()
<pgmpy.models.BayesianModel.BayesianModel object at 0x7f20af154320>
]
<ast.Try object at 0x7da20c6aa5f0> | keyword[def] identifier[get_model] ( identifier[self] ):
literal[string]
keyword[try] :
identifier[model] = identifier[BayesianModel] ()
identifier[model] . identifier[add_nodes_from] ( identifier[self] . identifier[variable_names] )
identifier[model] . identifier[add_edges_from] ( identifier[self] . identifier[variable_edges] )
identifier[model] . identifier[name] = identifier[self] . identifier[network_name]
identifier[tabular_cpds] =[]
keyword[for] identifier[var] keyword[in] identifier[sorted] ( identifier[self] . identifier[variable_cpds] . identifier[keys] ()):
identifier[values] = identifier[self] . identifier[variable_cpds] [ identifier[var] ]
identifier[cpd] = identifier[TabularCPD] ( identifier[var] , identifier[len] ( identifier[self] . identifier[variable_states] [ identifier[var] ]), identifier[values] ,
identifier[evidence] = identifier[self] . identifier[variable_parents] [ identifier[var] ],
identifier[evidence_card] =[ identifier[len] ( identifier[self] . identifier[variable_states] [ identifier[evidence_var] ])
keyword[for] identifier[evidence_var] keyword[in] identifier[self] . identifier[variable_parents] [ identifier[var] ]])
identifier[tabular_cpds] . identifier[append] ( identifier[cpd] )
identifier[model] . identifier[add_cpds] (* identifier[tabular_cpds] )
keyword[for] identifier[node] , identifier[properties] keyword[in] identifier[self] . identifier[variable_properties] . identifier[items] ():
keyword[for] identifier[prop] keyword[in] identifier[properties] :
identifier[prop_name] , identifier[prop_value] = identifier[map] ( keyword[lambda] identifier[t] : identifier[t] . identifier[strip] (), identifier[prop] . identifier[split] ( literal[string] ))
identifier[model] . identifier[node] [ identifier[node] ][ identifier[prop_name] ]= identifier[prop_value]
keyword[return] identifier[model]
keyword[except] identifier[AttributeError] :
keyword[raise] identifier[AttributeError] ( literal[string] ) | def get_model(self):
"""
Returns the fitted bayesian model
Example
----------
>>> from pgmpy.readwrite import BIFReader
>>> reader = BIFReader("bif_test.bif")
>>> reader.get_model()
<pgmpy.models.BayesianModel.BayesianModel object at 0x7f20af154320>
"""
try:
model = BayesianModel()
model.add_nodes_from(self.variable_names)
model.add_edges_from(self.variable_edges)
model.name = self.network_name
tabular_cpds = []
for var in sorted(self.variable_cpds.keys()):
values = self.variable_cpds[var]
cpd = TabularCPD(var, len(self.variable_states[var]), values, evidence=self.variable_parents[var], evidence_card=[len(self.variable_states[evidence_var]) for evidence_var in self.variable_parents[var]])
tabular_cpds.append(cpd) # depends on [control=['for'], data=['var']]
model.add_cpds(*tabular_cpds)
for (node, properties) in self.variable_properties.items():
for prop in properties:
(prop_name, prop_value) = map(lambda t: t.strip(), prop.split('='))
model.node[node][prop_name] = prop_value # depends on [control=['for'], data=['prop']] # depends on [control=['for'], data=[]]
return model # depends on [control=['try'], data=[]]
except AttributeError:
raise AttributeError('First get states of variables, edges, parents and network name') # depends on [control=['except'], data=[]] |
def scroll_deck(self, decknum, scroll_x, scroll_y):
"""Move a deck."""
self.scroll_deck_x(decknum, scroll_x)
self.scroll_deck_y(decknum, scroll_y) | def function[scroll_deck, parameter[self, decknum, scroll_x, scroll_y]]:
constant[Move a deck.]
call[name[self].scroll_deck_x, parameter[name[decknum], name[scroll_x]]]
call[name[self].scroll_deck_y, parameter[name[decknum], name[scroll_y]]] | keyword[def] identifier[scroll_deck] ( identifier[self] , identifier[decknum] , identifier[scroll_x] , identifier[scroll_y] ):
literal[string]
identifier[self] . identifier[scroll_deck_x] ( identifier[decknum] , identifier[scroll_x] )
identifier[self] . identifier[scroll_deck_y] ( identifier[decknum] , identifier[scroll_y] ) | def scroll_deck(self, decknum, scroll_x, scroll_y):
"""Move a deck."""
self.scroll_deck_x(decknum, scroll_x)
self.scroll_deck_y(decknum, scroll_y) |
def btc_make_p2wpkh_address( pubkey_hex ):
"""
Make a p2wpkh address from a hex pubkey
"""
pubkey_hex = keylib.key_formatting.compress(pubkey_hex)
hash160_bin = hashing.bin_hash160(pubkey_hex.decode('hex'))
return segwit_addr_encode(hash160_bin) | def function[btc_make_p2wpkh_address, parameter[pubkey_hex]]:
constant[
Make a p2wpkh address from a hex pubkey
]
variable[pubkey_hex] assign[=] call[name[keylib].key_formatting.compress, parameter[name[pubkey_hex]]]
variable[hash160_bin] assign[=] call[name[hashing].bin_hash160, parameter[call[name[pubkey_hex].decode, parameter[constant[hex]]]]]
return[call[name[segwit_addr_encode], parameter[name[hash160_bin]]]] | keyword[def] identifier[btc_make_p2wpkh_address] ( identifier[pubkey_hex] ):
literal[string]
identifier[pubkey_hex] = identifier[keylib] . identifier[key_formatting] . identifier[compress] ( identifier[pubkey_hex] )
identifier[hash160_bin] = identifier[hashing] . identifier[bin_hash160] ( identifier[pubkey_hex] . identifier[decode] ( literal[string] ))
keyword[return] identifier[segwit_addr_encode] ( identifier[hash160_bin] ) | def btc_make_p2wpkh_address(pubkey_hex):
"""
Make a p2wpkh address from a hex pubkey
"""
pubkey_hex = keylib.key_formatting.compress(pubkey_hex)
hash160_bin = hashing.bin_hash160(pubkey_hex.decode('hex'))
return segwit_addr_encode(hash160_bin) |
def __generate_actor(self, instance_id, operator, input, output):
"""Generates an actor that will execute a particular instance of
the logical operator
Attributes:
instance_id (UUID): The id of the instance the actor will execute.
operator (Operator): The metadata of the logical operator.
input (DataInput): The input gate that manages input channels of
the instance (see: DataInput in communication.py).
input (DataOutput): The output gate that manages output channels
of the instance (see: DataOutput in communication.py).
"""
actor_id = (operator.id, instance_id)
# Record the physical dataflow graph (for debugging purposes)
self.__add_channel(actor_id, input, output)
# Select actor to construct
if operator.type == OpType.Source:
source = operator_instance.Source.remote(actor_id, operator, input,
output)
source.register_handle.remote(source)
return source.start.remote()
elif operator.type == OpType.Map:
map = operator_instance.Map.remote(actor_id, operator, input,
output)
map.register_handle.remote(map)
return map.start.remote()
elif operator.type == OpType.FlatMap:
flatmap = operator_instance.FlatMap.remote(actor_id, operator,
input, output)
flatmap.register_handle.remote(flatmap)
return flatmap.start.remote()
elif operator.type == OpType.Filter:
filter = operator_instance.Filter.remote(actor_id, operator, input,
output)
filter.register_handle.remote(filter)
return filter.start.remote()
elif operator.type == OpType.Reduce:
reduce = operator_instance.Reduce.remote(actor_id, operator, input,
output)
reduce.register_handle.remote(reduce)
return reduce.start.remote()
elif operator.type == OpType.TimeWindow:
pass
elif operator.type == OpType.KeyBy:
keyby = operator_instance.KeyBy.remote(actor_id, operator, input,
output)
keyby.register_handle.remote(keyby)
return keyby.start.remote()
elif operator.type == OpType.Sum:
sum = operator_instance.Reduce.remote(actor_id, operator, input,
output)
# Register target handle at state actor
state_actor = operator.state_actor
if state_actor is not None:
state_actor.register_target.remote(sum)
# Register own handle
sum.register_handle.remote(sum)
return sum.start.remote()
elif operator.type == OpType.Sink:
pass
elif operator.type == OpType.Inspect:
inspect = operator_instance.Inspect.remote(actor_id, operator,
input, output)
inspect.register_handle.remote(inspect)
return inspect.start.remote()
elif operator.type == OpType.ReadTextFile:
# TODO (john): Colocate the source with the input file
read = operator_instance.ReadTextFile.remote(
actor_id, operator, input, output)
read.register_handle.remote(read)
return read.start.remote()
else: # TODO (john): Add support for other types of operators
sys.exit("Unrecognized or unsupported {} operator type.".format(
operator.type)) | def function[__generate_actor, parameter[self, instance_id, operator, input, output]]:
constant[Generates an actor that will execute a particular instance of
the logical operator
Attributes:
instance_id (UUID): The id of the instance the actor will execute.
operator (Operator): The metadata of the logical operator.
input (DataInput): The input gate that manages input channels of
the instance (see: DataInput in communication.py).
input (DataOutput): The output gate that manages output channels
of the instance (see: DataOutput in communication.py).
]
variable[actor_id] assign[=] tuple[[<ast.Attribute object at 0x7da2044c1d20>, <ast.Name object at 0x7da2044c1270>]]
call[name[self].__add_channel, parameter[name[actor_id], name[input], name[output]]]
if compare[name[operator].type equal[==] name[OpType].Source] begin[:]
variable[source] assign[=] call[name[operator_instance].Source.remote, parameter[name[actor_id], name[operator], name[input], name[output]]]
call[name[source].register_handle.remote, parameter[name[source]]]
return[call[name[source].start.remote, parameter[]]] | keyword[def] identifier[__generate_actor] ( identifier[self] , identifier[instance_id] , identifier[operator] , identifier[input] , identifier[output] ):
literal[string]
identifier[actor_id] =( identifier[operator] . identifier[id] , identifier[instance_id] )
identifier[self] . identifier[__add_channel] ( identifier[actor_id] , identifier[input] , identifier[output] )
keyword[if] identifier[operator] . identifier[type] == identifier[OpType] . identifier[Source] :
identifier[source] = identifier[operator_instance] . identifier[Source] . identifier[remote] ( identifier[actor_id] , identifier[operator] , identifier[input] ,
identifier[output] )
identifier[source] . identifier[register_handle] . identifier[remote] ( identifier[source] )
keyword[return] identifier[source] . identifier[start] . identifier[remote] ()
keyword[elif] identifier[operator] . identifier[type] == identifier[OpType] . identifier[Map] :
identifier[map] = identifier[operator_instance] . identifier[Map] . identifier[remote] ( identifier[actor_id] , identifier[operator] , identifier[input] ,
identifier[output] )
identifier[map] . identifier[register_handle] . identifier[remote] ( identifier[map] )
keyword[return] identifier[map] . identifier[start] . identifier[remote] ()
keyword[elif] identifier[operator] . identifier[type] == identifier[OpType] . identifier[FlatMap] :
identifier[flatmap] = identifier[operator_instance] . identifier[FlatMap] . identifier[remote] ( identifier[actor_id] , identifier[operator] ,
identifier[input] , identifier[output] )
identifier[flatmap] . identifier[register_handle] . identifier[remote] ( identifier[flatmap] )
keyword[return] identifier[flatmap] . identifier[start] . identifier[remote] ()
keyword[elif] identifier[operator] . identifier[type] == identifier[OpType] . identifier[Filter] :
identifier[filter] = identifier[operator_instance] . identifier[Filter] . identifier[remote] ( identifier[actor_id] , identifier[operator] , identifier[input] ,
identifier[output] )
identifier[filter] . identifier[register_handle] . identifier[remote] ( identifier[filter] )
keyword[return] identifier[filter] . identifier[start] . identifier[remote] ()
keyword[elif] identifier[operator] . identifier[type] == identifier[OpType] . identifier[Reduce] :
identifier[reduce] = identifier[operator_instance] . identifier[Reduce] . identifier[remote] ( identifier[actor_id] , identifier[operator] , identifier[input] ,
identifier[output] )
identifier[reduce] . identifier[register_handle] . identifier[remote] ( identifier[reduce] )
keyword[return] identifier[reduce] . identifier[start] . identifier[remote] ()
keyword[elif] identifier[operator] . identifier[type] == identifier[OpType] . identifier[TimeWindow] :
keyword[pass]
keyword[elif] identifier[operator] . identifier[type] == identifier[OpType] . identifier[KeyBy] :
identifier[keyby] = identifier[operator_instance] . identifier[KeyBy] . identifier[remote] ( identifier[actor_id] , identifier[operator] , identifier[input] ,
identifier[output] )
identifier[keyby] . identifier[register_handle] . identifier[remote] ( identifier[keyby] )
keyword[return] identifier[keyby] . identifier[start] . identifier[remote] ()
keyword[elif] identifier[operator] . identifier[type] == identifier[OpType] . identifier[Sum] :
identifier[sum] = identifier[operator_instance] . identifier[Reduce] . identifier[remote] ( identifier[actor_id] , identifier[operator] , identifier[input] ,
identifier[output] )
identifier[state_actor] = identifier[operator] . identifier[state_actor]
keyword[if] identifier[state_actor] keyword[is] keyword[not] keyword[None] :
identifier[state_actor] . identifier[register_target] . identifier[remote] ( identifier[sum] )
identifier[sum] . identifier[register_handle] . identifier[remote] ( identifier[sum] )
keyword[return] identifier[sum] . identifier[start] . identifier[remote] ()
keyword[elif] identifier[operator] . identifier[type] == identifier[OpType] . identifier[Sink] :
keyword[pass]
keyword[elif] identifier[operator] . identifier[type] == identifier[OpType] . identifier[Inspect] :
identifier[inspect] = identifier[operator_instance] . identifier[Inspect] . identifier[remote] ( identifier[actor_id] , identifier[operator] ,
identifier[input] , identifier[output] )
identifier[inspect] . identifier[register_handle] . identifier[remote] ( identifier[inspect] )
keyword[return] identifier[inspect] . identifier[start] . identifier[remote] ()
keyword[elif] identifier[operator] . identifier[type] == identifier[OpType] . identifier[ReadTextFile] :
identifier[read] = identifier[operator_instance] . identifier[ReadTextFile] . identifier[remote] (
identifier[actor_id] , identifier[operator] , identifier[input] , identifier[output] )
identifier[read] . identifier[register_handle] . identifier[remote] ( identifier[read] )
keyword[return] identifier[read] . identifier[start] . identifier[remote] ()
keyword[else] :
identifier[sys] . identifier[exit] ( literal[string] . identifier[format] (
identifier[operator] . identifier[type] )) | def __generate_actor(self, instance_id, operator, input, output):
"""Generates an actor that will execute a particular instance of
the logical operator
Attributes:
instance_id (UUID): The id of the instance the actor will execute.
operator (Operator): The metadata of the logical operator.
input (DataInput): The input gate that manages input channels of
the instance (see: DataInput in communication.py).
input (DataOutput): The output gate that manages output channels
of the instance (see: DataOutput in communication.py).
"""
actor_id = (operator.id, instance_id)
# Record the physical dataflow graph (for debugging purposes)
self.__add_channel(actor_id, input, output)
# Select actor to construct
if operator.type == OpType.Source:
source = operator_instance.Source.remote(actor_id, operator, input, output)
source.register_handle.remote(source)
return source.start.remote() # depends on [control=['if'], data=[]]
elif operator.type == OpType.Map:
map = operator_instance.Map.remote(actor_id, operator, input, output)
map.register_handle.remote(map)
return map.start.remote() # depends on [control=['if'], data=[]]
elif operator.type == OpType.FlatMap:
flatmap = operator_instance.FlatMap.remote(actor_id, operator, input, output)
flatmap.register_handle.remote(flatmap)
return flatmap.start.remote() # depends on [control=['if'], data=[]]
elif operator.type == OpType.Filter:
filter = operator_instance.Filter.remote(actor_id, operator, input, output)
filter.register_handle.remote(filter)
return filter.start.remote() # depends on [control=['if'], data=[]]
elif operator.type == OpType.Reduce:
reduce = operator_instance.Reduce.remote(actor_id, operator, input, output)
reduce.register_handle.remote(reduce)
return reduce.start.remote() # depends on [control=['if'], data=[]]
elif operator.type == OpType.TimeWindow:
pass # depends on [control=['if'], data=[]]
elif operator.type == OpType.KeyBy:
keyby = operator_instance.KeyBy.remote(actor_id, operator, input, output)
keyby.register_handle.remote(keyby)
return keyby.start.remote() # depends on [control=['if'], data=[]]
elif operator.type == OpType.Sum:
sum = operator_instance.Reduce.remote(actor_id, operator, input, output)
# Register target handle at state actor
state_actor = operator.state_actor
if state_actor is not None:
state_actor.register_target.remote(sum) # depends on [control=['if'], data=['state_actor']]
# Register own handle
sum.register_handle.remote(sum)
return sum.start.remote() # depends on [control=['if'], data=[]]
elif operator.type == OpType.Sink:
pass # depends on [control=['if'], data=[]]
elif operator.type == OpType.Inspect:
inspect = operator_instance.Inspect.remote(actor_id, operator, input, output)
inspect.register_handle.remote(inspect)
return inspect.start.remote() # depends on [control=['if'], data=[]]
elif operator.type == OpType.ReadTextFile:
# TODO (john): Colocate the source with the input file
read = operator_instance.ReadTextFile.remote(actor_id, operator, input, output)
read.register_handle.remote(read)
return read.start.remote() # depends on [control=['if'], data=[]]
else: # TODO (john): Add support for other types of operators
sys.exit('Unrecognized or unsupported {} operator type.'.format(operator.type)) |
def symbol(name: str=None, symbol_type: Type[Symbol]=Symbol) -> 'SymbolWildcard':
"""Create a `SymbolWildcard` that matches a single `Symbol` argument.
Args:
name:
Optional variable name for the wildcard.
symbol_type:
An optional subclass of `Symbol` to further limit which kind of symbols are
matched by the wildcard.
Returns:
A `SymbolWildcard` that matches the *symbol_type*.
"""
if isinstance(name, type) and issubclass(name, Symbol) and symbol_type is Symbol:
return SymbolWildcard(name)
return SymbolWildcard(symbol_type, variable_name=name) | def function[symbol, parameter[name, symbol_type]]:
constant[Create a `SymbolWildcard` that matches a single `Symbol` argument.
Args:
name:
Optional variable name for the wildcard.
symbol_type:
An optional subclass of `Symbol` to further limit which kind of symbols are
matched by the wildcard.
Returns:
A `SymbolWildcard` that matches the *symbol_type*.
]
if <ast.BoolOp object at 0x7da204623df0> begin[:]
return[call[name[SymbolWildcard], parameter[name[name]]]]
return[call[name[SymbolWildcard], parameter[name[symbol_type]]]] | keyword[def] identifier[symbol] ( identifier[name] : identifier[str] = keyword[None] , identifier[symbol_type] : identifier[Type] [ identifier[Symbol] ]= identifier[Symbol] )-> literal[string] :
literal[string]
keyword[if] identifier[isinstance] ( identifier[name] , identifier[type] ) keyword[and] identifier[issubclass] ( identifier[name] , identifier[Symbol] ) keyword[and] identifier[symbol_type] keyword[is] identifier[Symbol] :
keyword[return] identifier[SymbolWildcard] ( identifier[name] )
keyword[return] identifier[SymbolWildcard] ( identifier[symbol_type] , identifier[variable_name] = identifier[name] ) | def symbol(name: str=None, symbol_type: Type[Symbol]=Symbol) -> 'SymbolWildcard':
"""Create a `SymbolWildcard` that matches a single `Symbol` argument.
Args:
name:
Optional variable name for the wildcard.
symbol_type:
An optional subclass of `Symbol` to further limit which kind of symbols are
matched by the wildcard.
Returns:
A `SymbolWildcard` that matches the *symbol_type*.
"""
if isinstance(name, type) and issubclass(name, Symbol) and (symbol_type is Symbol):
return SymbolWildcard(name) # depends on [control=['if'], data=[]]
return SymbolWildcard(symbol_type, variable_name=name) |
def free_slave(**connection_args):
'''
Frees a slave from its master. This is a WIP, do not use.
CLI Example:
.. code-block:: bash
salt '*' mysql.free_slave
'''
slave_db = _connect(**connection_args)
if slave_db is None:
return ''
slave_cur = slave_db.cursor(MySQLdb.cursors.DictCursor)
slave_cur.execute('show slave status')
slave_status = slave_cur.fetchone()
master = {'host': slave_status['Master_Host']}
try:
# Try to connect to the master and flush logs before promoting to
# master. This may fail if the master is no longer available.
# I am also assuming that the admin password is the same on both
# servers here, and only overriding the host option in the connect
# function.
master_db = _connect(**master)
if master_db is None:
return ''
master_cur = master_db.cursor()
master_cur.execute('flush logs')
master_db.close()
except MySQLdb.OperationalError:
pass
slave_cur.execute('stop slave')
slave_cur.execute('reset master')
slave_cur.execute('change master to MASTER_HOST=''')
slave_cur.execute('show slave status')
results = slave_cur.fetchone()
if results is None:
return 'promoted'
else:
return 'failed' | def function[free_slave, parameter[]]:
constant[
Frees a slave from its master. This is a WIP, do not use.
CLI Example:
.. code-block:: bash
salt '*' mysql.free_slave
]
variable[slave_db] assign[=] call[name[_connect], parameter[]]
if compare[name[slave_db] is constant[None]] begin[:]
return[constant[]]
variable[slave_cur] assign[=] call[name[slave_db].cursor, parameter[name[MySQLdb].cursors.DictCursor]]
call[name[slave_cur].execute, parameter[constant[show slave status]]]
variable[slave_status] assign[=] call[name[slave_cur].fetchone, parameter[]]
variable[master] assign[=] dictionary[[<ast.Constant object at 0x7da20e9b1ae0>], [<ast.Subscript object at 0x7da20e9b3100>]]
<ast.Try object at 0x7da20e9b00a0>
call[name[slave_cur].execute, parameter[constant[stop slave]]]
call[name[slave_cur].execute, parameter[constant[reset master]]]
call[name[slave_cur].execute, parameter[constant[change master to MASTER_HOST=]]]
call[name[slave_cur].execute, parameter[constant[show slave status]]]
variable[results] assign[=] call[name[slave_cur].fetchone, parameter[]]
if compare[name[results] is constant[None]] begin[:]
return[constant[promoted]] | keyword[def] identifier[free_slave] (** identifier[connection_args] ):
literal[string]
identifier[slave_db] = identifier[_connect] (** identifier[connection_args] )
keyword[if] identifier[slave_db] keyword[is] keyword[None] :
keyword[return] literal[string]
identifier[slave_cur] = identifier[slave_db] . identifier[cursor] ( identifier[MySQLdb] . identifier[cursors] . identifier[DictCursor] )
identifier[slave_cur] . identifier[execute] ( literal[string] )
identifier[slave_status] = identifier[slave_cur] . identifier[fetchone] ()
identifier[master] ={ literal[string] : identifier[slave_status] [ literal[string] ]}
keyword[try] :
identifier[master_db] = identifier[_connect] (** identifier[master] )
keyword[if] identifier[master_db] keyword[is] keyword[None] :
keyword[return] literal[string]
identifier[master_cur] = identifier[master_db] . identifier[cursor] ()
identifier[master_cur] . identifier[execute] ( literal[string] )
identifier[master_db] . identifier[close] ()
keyword[except] identifier[MySQLdb] . identifier[OperationalError] :
keyword[pass]
identifier[slave_cur] . identifier[execute] ( literal[string] )
identifier[slave_cur] . identifier[execute] ( literal[string] )
identifier[slave_cur] . identifier[execute] ( literal[string] literal[string] )
identifier[slave_cur] . identifier[execute] ( literal[string] )
identifier[results] = identifier[slave_cur] . identifier[fetchone] ()
keyword[if] identifier[results] keyword[is] keyword[None] :
keyword[return] literal[string]
keyword[else] :
keyword[return] literal[string] | def free_slave(**connection_args):
"""
Frees a slave from its master. This is a WIP, do not use.
CLI Example:
.. code-block:: bash
salt '*' mysql.free_slave
"""
slave_db = _connect(**connection_args)
if slave_db is None:
return '' # depends on [control=['if'], data=[]]
slave_cur = slave_db.cursor(MySQLdb.cursors.DictCursor)
slave_cur.execute('show slave status')
slave_status = slave_cur.fetchone()
master = {'host': slave_status['Master_Host']}
try:
# Try to connect to the master and flush logs before promoting to
# master. This may fail if the master is no longer available.
# I am also assuming that the admin password is the same on both
# servers here, and only overriding the host option in the connect
# function.
master_db = _connect(**master)
if master_db is None:
return '' # depends on [control=['if'], data=[]]
master_cur = master_db.cursor()
master_cur.execute('flush logs')
master_db.close() # depends on [control=['try'], data=[]]
except MySQLdb.OperationalError:
pass # depends on [control=['except'], data=[]]
slave_cur.execute('stop slave')
slave_cur.execute('reset master')
slave_cur.execute('change master to MASTER_HOST=')
slave_cur.execute('show slave status')
results = slave_cur.fetchone()
if results is None:
return 'promoted' # depends on [control=['if'], data=[]]
else:
return 'failed' |
def full_split(text, regex):
"""
Split the text by the regex, keeping all parts.
The parts should re-join back into the original text.
>>> list(full_split('word', re.compile('&.*?')))
['word']
"""
while text:
m = regex.search(text)
if not m:
yield text
break
left = text[:m.start()]
middle = text[m.start():m.end()]
right = text[m.end():]
if left:
yield left
if middle:
yield middle
text = right | def function[full_split, parameter[text, regex]]:
constant[
Split the text by the regex, keeping all parts.
The parts should re-join back into the original text.
>>> list(full_split('word', re.compile('&.*?')))
['word']
]
while name[text] begin[:]
variable[m] assign[=] call[name[regex].search, parameter[name[text]]]
if <ast.UnaryOp object at 0x7da2041da770> begin[:]
<ast.Yield object at 0x7da2041d8160>
break
variable[left] assign[=] call[name[text]][<ast.Slice object at 0x7da2041d93c0>]
variable[middle] assign[=] call[name[text]][<ast.Slice object at 0x7da2041da4d0>]
variable[right] assign[=] call[name[text]][<ast.Slice object at 0x7da2041d9e70>]
if name[left] begin[:]
<ast.Yield object at 0x7da2041db820>
if name[middle] begin[:]
<ast.Yield object at 0x7da2041da4a0>
variable[text] assign[=] name[right] | keyword[def] identifier[full_split] ( identifier[text] , identifier[regex] ):
literal[string]
keyword[while] identifier[text] :
identifier[m] = identifier[regex] . identifier[search] ( identifier[text] )
keyword[if] keyword[not] identifier[m] :
keyword[yield] identifier[text]
keyword[break]
identifier[left] = identifier[text] [: identifier[m] . identifier[start] ()]
identifier[middle] = identifier[text] [ identifier[m] . identifier[start] (): identifier[m] . identifier[end] ()]
identifier[right] = identifier[text] [ identifier[m] . identifier[end] ():]
keyword[if] identifier[left] :
keyword[yield] identifier[left]
keyword[if] identifier[middle] :
keyword[yield] identifier[middle]
identifier[text] = identifier[right] | def full_split(text, regex):
"""
Split the text by the regex, keeping all parts.
The parts should re-join back into the original text.
>>> list(full_split('word', re.compile('&.*?')))
['word']
"""
while text:
m = regex.search(text)
if not m:
yield text
break # depends on [control=['if'], data=[]]
left = text[:m.start()]
middle = text[m.start():m.end()]
right = text[m.end():]
if left:
yield left # depends on [control=['if'], data=[]]
if middle:
yield middle # depends on [control=['if'], data=[]]
text = right # depends on [control=['while'], data=[]] |
def select_location(self):
"""Select directory."""
location = osp.normpath(getexistingdirectory(self,
_("Select directory"),
self.location))
if location:
if is_writable(location):
self.location = location
self.update_location() | def function[select_location, parameter[self]]:
constant[Select directory.]
variable[location] assign[=] call[name[osp].normpath, parameter[call[name[getexistingdirectory], parameter[name[self], call[name[_], parameter[constant[Select directory]]], name[self].location]]]]
if name[location] begin[:]
if call[name[is_writable], parameter[name[location]]] begin[:]
name[self].location assign[=] name[location]
call[name[self].update_location, parameter[]] | keyword[def] identifier[select_location] ( identifier[self] ):
literal[string]
identifier[location] = identifier[osp] . identifier[normpath] ( identifier[getexistingdirectory] ( identifier[self] ,
identifier[_] ( literal[string] ),
identifier[self] . identifier[location] ))
keyword[if] identifier[location] :
keyword[if] identifier[is_writable] ( identifier[location] ):
identifier[self] . identifier[location] = identifier[location]
identifier[self] . identifier[update_location] () | def select_location(self):
"""Select directory."""
location = osp.normpath(getexistingdirectory(self, _('Select directory'), self.location))
if location:
if is_writable(location):
self.location = location
self.update_location() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def chownr(path, owner, group, follow_links=True, chowntopdir=False):
"""Recursively change user and group ownership of files and directories
in given path. Doesn't chown path itself by default, only its children.
:param str path: The string path to start changing ownership.
:param str owner: The owner string to use when looking up the uid.
:param str group: The group string to use when looking up the gid.
:param bool follow_links: Also follow and chown links if True
:param bool chowntopdir: Also chown path itself if True
"""
uid = pwd.getpwnam(owner).pw_uid
gid = grp.getgrnam(group).gr_gid
if follow_links:
chown = os.chown
else:
chown = os.lchown
if chowntopdir:
broken_symlink = os.path.lexists(path) and not os.path.exists(path)
if not broken_symlink:
chown(path, uid, gid)
for root, dirs, files in os.walk(path, followlinks=follow_links):
for name in dirs + files:
full = os.path.join(root, name)
broken_symlink = os.path.lexists(full) and not os.path.exists(full)
if not broken_symlink:
chown(full, uid, gid) | def function[chownr, parameter[path, owner, group, follow_links, chowntopdir]]:
constant[Recursively change user and group ownership of files and directories
in given path. Doesn't chown path itself by default, only its children.
:param str path: The string path to start changing ownership.
:param str owner: The owner string to use when looking up the uid.
:param str group: The group string to use when looking up the gid.
:param bool follow_links: Also follow and chown links if True
:param bool chowntopdir: Also chown path itself if True
]
variable[uid] assign[=] call[name[pwd].getpwnam, parameter[name[owner]]].pw_uid
variable[gid] assign[=] call[name[grp].getgrnam, parameter[name[group]]].gr_gid
if name[follow_links] begin[:]
variable[chown] assign[=] name[os].chown
if name[chowntopdir] begin[:]
variable[broken_symlink] assign[=] <ast.BoolOp object at 0x7da18f00c2e0>
if <ast.UnaryOp object at 0x7da18bc717e0> begin[:]
call[name[chown], parameter[name[path], name[uid], name[gid]]]
for taget[tuple[[<ast.Name object at 0x7da18bc73af0>, <ast.Name object at 0x7da18bc703a0>, <ast.Name object at 0x7da18bc71e70>]]] in starred[call[name[os].walk, parameter[name[path]]]] begin[:]
for taget[name[name]] in starred[binary_operation[name[dirs] + name[files]]] begin[:]
variable[full] assign[=] call[name[os].path.join, parameter[name[root], name[name]]]
variable[broken_symlink] assign[=] <ast.BoolOp object at 0x7da20c6aac20>
if <ast.UnaryOp object at 0x7da20c6a9c60> begin[:]
call[name[chown], parameter[name[full], name[uid], name[gid]]] | keyword[def] identifier[chownr] ( identifier[path] , identifier[owner] , identifier[group] , identifier[follow_links] = keyword[True] , identifier[chowntopdir] = keyword[False] ):
literal[string]
identifier[uid] = identifier[pwd] . identifier[getpwnam] ( identifier[owner] ). identifier[pw_uid]
identifier[gid] = identifier[grp] . identifier[getgrnam] ( identifier[group] ). identifier[gr_gid]
keyword[if] identifier[follow_links] :
identifier[chown] = identifier[os] . identifier[chown]
keyword[else] :
identifier[chown] = identifier[os] . identifier[lchown]
keyword[if] identifier[chowntopdir] :
identifier[broken_symlink] = identifier[os] . identifier[path] . identifier[lexists] ( identifier[path] ) keyword[and] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[path] )
keyword[if] keyword[not] identifier[broken_symlink] :
identifier[chown] ( identifier[path] , identifier[uid] , identifier[gid] )
keyword[for] identifier[root] , identifier[dirs] , identifier[files] keyword[in] identifier[os] . identifier[walk] ( identifier[path] , identifier[followlinks] = identifier[follow_links] ):
keyword[for] identifier[name] keyword[in] identifier[dirs] + identifier[files] :
identifier[full] = identifier[os] . identifier[path] . identifier[join] ( identifier[root] , identifier[name] )
identifier[broken_symlink] = identifier[os] . identifier[path] . identifier[lexists] ( identifier[full] ) keyword[and] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[full] )
keyword[if] keyword[not] identifier[broken_symlink] :
identifier[chown] ( identifier[full] , identifier[uid] , identifier[gid] ) | def chownr(path, owner, group, follow_links=True, chowntopdir=False):
"""Recursively change user and group ownership of files and directories
in given path. Doesn't chown path itself by default, only its children.
:param str path: The string path to start changing ownership.
:param str owner: The owner string to use when looking up the uid.
:param str group: The group string to use when looking up the gid.
:param bool follow_links: Also follow and chown links if True
:param bool chowntopdir: Also chown path itself if True
"""
uid = pwd.getpwnam(owner).pw_uid
gid = grp.getgrnam(group).gr_gid
if follow_links:
chown = os.chown # depends on [control=['if'], data=[]]
else:
chown = os.lchown
if chowntopdir:
broken_symlink = os.path.lexists(path) and (not os.path.exists(path))
if not broken_symlink:
chown(path, uid, gid) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
for (root, dirs, files) in os.walk(path, followlinks=follow_links):
for name in dirs + files:
full = os.path.join(root, name)
broken_symlink = os.path.lexists(full) and (not os.path.exists(full))
if not broken_symlink:
chown(full, uid, gid) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['name']] # depends on [control=['for'], data=[]] |
def predict(self, dt=None, UT=None, fx=None, **fx_args):
r"""
Performs the predict step of the UKF. On return, self.x and
self.P contain the predicted state (x) and covariance (P). '
Important: this MUST be called before update() is called for the first
time.
Parameters
----------
dt : double, optional
If specified, the time step to be used for this prediction.
self._dt is used if this is not provided.
fx : callable f(x, **fx_args), optional
State transition function. If not provided, the default
function passed in during construction will be used.
UT : function(sigmas, Wm, Wc, noise_cov), optional
Optional function to compute the unscented transform for the sigma
points passed through hx. Typically the default function will
work - you can use x_mean_fn and z_mean_fn to alter the behavior
of the unscented transform.
**fx_args : keyword arguments
optional keyword arguments to be passed into f(x).
"""
if dt is None:
dt = self._dt
if UT is None:
UT = unscented_transform
# calculate sigma points for given mean and covariance
self.compute_process_sigmas(dt, fx, **fx_args)
#and pass sigmas through the unscented transform to compute prior
self.x, self.P = UT(self.sigmas_f, self.Wm, self.Wc, self.Q,
self.x_mean, self.residual_x)
# save prior
self.x_prior = np.copy(self.x)
self.P_prior = np.copy(self.P) | def function[predict, parameter[self, dt, UT, fx]]:
constant[
Performs the predict step of the UKF. On return, self.x and
self.P contain the predicted state (x) and covariance (P). '
Important: this MUST be called before update() is called for the first
time.
Parameters
----------
dt : double, optional
If specified, the time step to be used for this prediction.
self._dt is used if this is not provided.
fx : callable f(x, **fx_args), optional
State transition function. If not provided, the default
function passed in during construction will be used.
UT : function(sigmas, Wm, Wc, noise_cov), optional
Optional function to compute the unscented transform for the sigma
points passed through hx. Typically the default function will
work - you can use x_mean_fn and z_mean_fn to alter the behavior
of the unscented transform.
**fx_args : keyword arguments
optional keyword arguments to be passed into f(x).
]
if compare[name[dt] is constant[None]] begin[:]
variable[dt] assign[=] name[self]._dt
if compare[name[UT] is constant[None]] begin[:]
variable[UT] assign[=] name[unscented_transform]
call[name[self].compute_process_sigmas, parameter[name[dt], name[fx]]]
<ast.Tuple object at 0x7da18ede5600> assign[=] call[name[UT], parameter[name[self].sigmas_f, name[self].Wm, name[self].Wc, name[self].Q, name[self].x_mean, name[self].residual_x]]
name[self].x_prior assign[=] call[name[np].copy, parameter[name[self].x]]
name[self].P_prior assign[=] call[name[np].copy, parameter[name[self].P]] | keyword[def] identifier[predict] ( identifier[self] , identifier[dt] = keyword[None] , identifier[UT] = keyword[None] , identifier[fx] = keyword[None] ,** identifier[fx_args] ):
literal[string]
keyword[if] identifier[dt] keyword[is] keyword[None] :
identifier[dt] = identifier[self] . identifier[_dt]
keyword[if] identifier[UT] keyword[is] keyword[None] :
identifier[UT] = identifier[unscented_transform]
identifier[self] . identifier[compute_process_sigmas] ( identifier[dt] , identifier[fx] ,** identifier[fx_args] )
identifier[self] . identifier[x] , identifier[self] . identifier[P] = identifier[UT] ( identifier[self] . identifier[sigmas_f] , identifier[self] . identifier[Wm] , identifier[self] . identifier[Wc] , identifier[self] . identifier[Q] ,
identifier[self] . identifier[x_mean] , identifier[self] . identifier[residual_x] )
identifier[self] . identifier[x_prior] = identifier[np] . identifier[copy] ( identifier[self] . identifier[x] )
identifier[self] . identifier[P_prior] = identifier[np] . identifier[copy] ( identifier[self] . identifier[P] ) | def predict(self, dt=None, UT=None, fx=None, **fx_args):
"""
Performs the predict step of the UKF. On return, self.x and
self.P contain the predicted state (x) and covariance (P). '
Important: this MUST be called before update() is called for the first
time.
Parameters
----------
dt : double, optional
If specified, the time step to be used for this prediction.
self._dt is used if this is not provided.
fx : callable f(x, **fx_args), optional
State transition function. If not provided, the default
function passed in during construction will be used.
UT : function(sigmas, Wm, Wc, noise_cov), optional
Optional function to compute the unscented transform for the sigma
points passed through hx. Typically the default function will
work - you can use x_mean_fn and z_mean_fn to alter the behavior
of the unscented transform.
**fx_args : keyword arguments
optional keyword arguments to be passed into f(x).
"""
if dt is None:
dt = self._dt # depends on [control=['if'], data=['dt']]
if UT is None:
UT = unscented_transform # depends on [control=['if'], data=['UT']]
# calculate sigma points for given mean and covariance
self.compute_process_sigmas(dt, fx, **fx_args)
#and pass sigmas through the unscented transform to compute prior
(self.x, self.P) = UT(self.sigmas_f, self.Wm, self.Wc, self.Q, self.x_mean, self.residual_x)
# save prior
self.x_prior = np.copy(self.x)
self.P_prior = np.copy(self.P) |
def from_dict(cls, context_options_dict):
"""Return a context job from a dict output by Context.to_dict."""
import copy
context_options = copy.deepcopy(context_options_dict)
tasks_inserted = context_options.pop('_tasks_inserted', False)
insert_tasks = context_options.pop('insert_tasks', None)
if insert_tasks:
context_options['insert_tasks'] = path_to_reference(insert_tasks)
# The constructor expects a reference to the persistence engine.
persistence_engine = context_options.pop('persistence_engine', None)
if persistence_engine:
context_options['persistence_engine'] = path_to_reference(
persistence_engine)
# If there are callbacks, reconstitute them.
callbacks = context_options.pop('callbacks', None)
if callbacks:
context_options['callbacks'] = decode_callbacks(callbacks)
context = cls(**context_options)
context._tasks_inserted = tasks_inserted
return context | def function[from_dict, parameter[cls, context_options_dict]]:
constant[Return a context job from a dict output by Context.to_dict.]
import module[copy]
variable[context_options] assign[=] call[name[copy].deepcopy, parameter[name[context_options_dict]]]
variable[tasks_inserted] assign[=] call[name[context_options].pop, parameter[constant[_tasks_inserted], constant[False]]]
variable[insert_tasks] assign[=] call[name[context_options].pop, parameter[constant[insert_tasks], constant[None]]]
if name[insert_tasks] begin[:]
call[name[context_options]][constant[insert_tasks]] assign[=] call[name[path_to_reference], parameter[name[insert_tasks]]]
variable[persistence_engine] assign[=] call[name[context_options].pop, parameter[constant[persistence_engine], constant[None]]]
if name[persistence_engine] begin[:]
call[name[context_options]][constant[persistence_engine]] assign[=] call[name[path_to_reference], parameter[name[persistence_engine]]]
variable[callbacks] assign[=] call[name[context_options].pop, parameter[constant[callbacks], constant[None]]]
if name[callbacks] begin[:]
call[name[context_options]][constant[callbacks]] assign[=] call[name[decode_callbacks], parameter[name[callbacks]]]
variable[context] assign[=] call[name[cls], parameter[]]
name[context]._tasks_inserted assign[=] name[tasks_inserted]
return[name[context]] | keyword[def] identifier[from_dict] ( identifier[cls] , identifier[context_options_dict] ):
literal[string]
keyword[import] identifier[copy]
identifier[context_options] = identifier[copy] . identifier[deepcopy] ( identifier[context_options_dict] )
identifier[tasks_inserted] = identifier[context_options] . identifier[pop] ( literal[string] , keyword[False] )
identifier[insert_tasks] = identifier[context_options] . identifier[pop] ( literal[string] , keyword[None] )
keyword[if] identifier[insert_tasks] :
identifier[context_options] [ literal[string] ]= identifier[path_to_reference] ( identifier[insert_tasks] )
identifier[persistence_engine] = identifier[context_options] . identifier[pop] ( literal[string] , keyword[None] )
keyword[if] identifier[persistence_engine] :
identifier[context_options] [ literal[string] ]= identifier[path_to_reference] (
identifier[persistence_engine] )
identifier[callbacks] = identifier[context_options] . identifier[pop] ( literal[string] , keyword[None] )
keyword[if] identifier[callbacks] :
identifier[context_options] [ literal[string] ]= identifier[decode_callbacks] ( identifier[callbacks] )
identifier[context] = identifier[cls] (** identifier[context_options] )
identifier[context] . identifier[_tasks_inserted] = identifier[tasks_inserted]
keyword[return] identifier[context] | def from_dict(cls, context_options_dict):
"""Return a context job from a dict output by Context.to_dict."""
import copy
context_options = copy.deepcopy(context_options_dict)
tasks_inserted = context_options.pop('_tasks_inserted', False)
insert_tasks = context_options.pop('insert_tasks', None)
if insert_tasks:
context_options['insert_tasks'] = path_to_reference(insert_tasks) # depends on [control=['if'], data=[]]
# The constructor expects a reference to the persistence engine.
persistence_engine = context_options.pop('persistence_engine', None)
if persistence_engine:
context_options['persistence_engine'] = path_to_reference(persistence_engine) # depends on [control=['if'], data=[]]
# If there are callbacks, reconstitute them.
callbacks = context_options.pop('callbacks', None)
if callbacks:
context_options['callbacks'] = decode_callbacks(callbacks) # depends on [control=['if'], data=[]]
context = cls(**context_options)
context._tasks_inserted = tasks_inserted
return context |
def scatter(x, y, z=None,
c=None, cmap=None, s=None, discrete=None,
ax=None, legend=None, figsize=None,
xticks=False,
yticks=False,
zticks=False,
xticklabels=True,
yticklabels=True,
zticklabels=True,
label_prefix="PHATE",
xlabel=None,
ylabel=None,
zlabel=None,
title=None,
legend_title="",
legend_loc='best',
filename=None,
dpi=None,
**plot_kwargs):
"""Create a scatter plot
Builds upon `matplotlib.pyplot.scatter` with nice defaults
and handles categorical colors / legends better. For easy access, use
`scatter2d` or `scatter3d`.
Parameters
----------
x : list-like
data for x axis
y : list-like
data for y axis
z : list-like, optional (default: None)
data for z axis
c : list-like or None, optional (default: None)
Color vector. Can be a single color value (RGB, RGBA, or named
matplotlib colors), an array of these of length n_samples, or a list of
discrete or continuous values of any data type. If `c` is not a single
or list of matplotlib colors, the values in `c` will be used to
populate the legend / colorbar with colors from `cmap`
cmap : `matplotlib` colormap, str, dict or None, optional (default: None)
matplotlib colormap. If None, uses `tab20` for discrete data and
`inferno` for continuous data. If a dictionary, expects one key
for every unique value in `c`, where values are valid matplotlib colors
(hsv, rbg, rgba, or named colors)
s : float, optional (default: 1)
Point size.
discrete : bool or None, optional (default: None)
If True, the legend is categorical. If False, the legend is a colorbar.
If None, discreteness is detected automatically. Data containing
non-numeric `c` is always discrete, and numeric data with 20 or less
unique values is discrete.
ax : `matplotlib.Axes` or None, optional (default: None)
axis on which to plot. If None, an axis is created
legend : bool, optional (default: True)
States whether or not to create a legend. If data is continuous,
the legend is a colorbar.
figsize : tuple, optional (default: None)
Tuple of floats for creation of new `matplotlib` figure. Only used if
`ax` is None.
xticks : True, False, or list-like (default: False)
If True, keeps default x ticks. If False, removes x ticks.
If a list, sets custom x ticks
yticks : True, False, or list-like (default: False)
If True, keeps default y ticks. If False, removes y ticks.
If a list, sets custom y ticks
zticks : True, False, or list-like (default: False)
If True, keeps default z ticks. If False, removes z ticks.
If a list, sets custom z ticks. Only used for 3D plots.
xticklabels : True, False, or list-like (default: True)
If True, keeps default x tick labels. If False, removes x tick labels.
If a list, sets custom x tick labels
yticklabels : True, False, or list-like (default: True)
If True, keeps default y tick labels. If False, removes y tick labels.
If a list, sets custom y tick labels
zticklabels : True, False, or list-like (default: True)
If True, keeps default z tick labels. If False, removes z tick labels.
If a list, sets custom z tick labels. Only used for 3D plots.
label_prefix : str or None (default: "PHATE")
Prefix for all axis labels. Axes will be labelled `label_prefix`1,
`label_prefix`2, etc. Can be overriden by setting `xlabel`,
`ylabel`, and `zlabel`.
xlabel : str or None (default : None)
Label for the x axis. Overrides the automatic label given by
label_prefix. If None and label_prefix is None, no label is set.
ylabel : str or None (default : None)
Label for the y axis. Overrides the automatic label given by
label_prefix. If None and label_prefix is None, no label is set.
zlabel : str or None (default : None)
Label for the z axis. Overrides the automatic label given by
label_prefix. If None and label_prefix is None, no label is set.
Only used for 3D plots.
title : str or None (default: None)
axis title. If None, no title is set.
legend_title : str (default: "")
title for the colorbar of legend. Only used for discrete data.
legend_loc : int or string or pair of floats, default: 'best'
Matplotlib legend location. Only used for discrete data.
See <https://matplotlib.org/api/_as_gen/matplotlib.pyplot.legend.html>
for details.
filename : str or None (default: None)
file to which the output is saved
dpi : int or None, optional (default: None)
The resolution in dots per inch. If None it will default to the value
savefig.dpi in the matplotlibrc file. If 'figure' it will set the dpi
to be the value of the figure. Only used if filename is not None.
**plot_kwargs : keyword arguments
Extra arguments passed to `matplotlib.pyplot.scatter`.
Returns
-------
ax : `matplotlib.Axes`
axis on which plot was drawn
Examples
--------
>>> import phate
>>> import matplotlib.pyplot as plt
>>> ###
>>> # Running PHATE
>>> ###
>>> tree_data, tree_clusters = phate.tree.gen_dla(n_dim=100, n_branch=20,
... branch_length=100)
>>> tree_data.shape
(2000, 100)
>>> phate_operator = phate.PHATE(k=5, a=20, t=150)
>>> tree_phate = phate_operator.fit_transform(tree_data)
>>> tree_phate.shape
(2000, 2)
>>> ###
>>> # Plotting using phate.plot
>>> ###
>>> phate.plot.scatter2d(tree_phate, c=tree_clusters)
>>> # You can also pass the PHATE operator instead of data
>>> phate.plot.scatter2d(phate_operator, c=tree_clusters)
>>> phate.plot.scatter3d(phate_operator, c=tree_clusters)
>>> ###
>>> # Using a cmap dictionary
>>> ###
>>> import numpy as np
>>> X = np.random.normal(0,1,[1000,2])
>>> c = np.random.choice(['a','b'], 1000, replace=True)
>>> X[c=='a'] += 10
>>> phate.plot.scatter2d(X, c=c, cmap={'a' : [1,0,0,1], 'b' : 'xkcd:sky blue'})
"""
warnings.warn("`phate.plot.scatter` is deprecated. "
"Use `scprep.plot.scatter` instead.",
FutureWarning)
return scprep.plot.scatter(x=x, y=y, z=z,
c=c, cmap=cmap, s=s, discrete=discrete,
ax=ax, legend=legend, figsize=figsize,
xticks=xticks,
yticks=yticks,
zticks=zticks,
xticklabels=xticklabels,
yticklabels=yticklabels,
zticklabels=zticklabels,
label_prefix=label_prefix,
xlabel=xlabel,
ylabel=ylabel,
zlabel=zlabel,
title=title,
legend_title=legend_title,
legend_loc=legend_loc,
filename=filename,
dpi=dpi,
**plot_kwargs) | def function[scatter, parameter[x, y, z, c, cmap, s, discrete, ax, legend, figsize, xticks, yticks, zticks, xticklabels, yticklabels, zticklabels, label_prefix, xlabel, ylabel, zlabel, title, legend_title, legend_loc, filename, dpi]]:
constant[Create a scatter plot
Builds upon `matplotlib.pyplot.scatter` with nice defaults
and handles categorical colors / legends better. For easy access, use
`scatter2d` or `scatter3d`.
Parameters
----------
x : list-like
data for x axis
y : list-like
data for y axis
z : list-like, optional (default: None)
data for z axis
c : list-like or None, optional (default: None)
Color vector. Can be a single color value (RGB, RGBA, or named
matplotlib colors), an array of these of length n_samples, or a list of
discrete or continuous values of any data type. If `c` is not a single
or list of matplotlib colors, the values in `c` will be used to
populate the legend / colorbar with colors from `cmap`
cmap : `matplotlib` colormap, str, dict or None, optional (default: None)
matplotlib colormap. If None, uses `tab20` for discrete data and
`inferno` for continuous data. If a dictionary, expects one key
for every unique value in `c`, where values are valid matplotlib colors
(hsv, rbg, rgba, or named colors)
s : float, optional (default: 1)
Point size.
discrete : bool or None, optional (default: None)
If True, the legend is categorical. If False, the legend is a colorbar.
If None, discreteness is detected automatically. Data containing
non-numeric `c` is always discrete, and numeric data with 20 or less
unique values is discrete.
ax : `matplotlib.Axes` or None, optional (default: None)
axis on which to plot. If None, an axis is created
legend : bool, optional (default: True)
States whether or not to create a legend. If data is continuous,
the legend is a colorbar.
figsize : tuple, optional (default: None)
Tuple of floats for creation of new `matplotlib` figure. Only used if
`ax` is None.
xticks : True, False, or list-like (default: False)
If True, keeps default x ticks. If False, removes x ticks.
If a list, sets custom x ticks
yticks : True, False, or list-like (default: False)
If True, keeps default y ticks. If False, removes y ticks.
If a list, sets custom y ticks
zticks : True, False, or list-like (default: False)
If True, keeps default z ticks. If False, removes z ticks.
If a list, sets custom z ticks. Only used for 3D plots.
xticklabels : True, False, or list-like (default: True)
If True, keeps default x tick labels. If False, removes x tick labels.
If a list, sets custom x tick labels
yticklabels : True, False, or list-like (default: True)
If True, keeps default y tick labels. If False, removes y tick labels.
If a list, sets custom y tick labels
zticklabels : True, False, or list-like (default: True)
If True, keeps default z tick labels. If False, removes z tick labels.
If a list, sets custom z tick labels. Only used for 3D plots.
label_prefix : str or None (default: "PHATE")
Prefix for all axis labels. Axes will be labelled `label_prefix`1,
`label_prefix`2, etc. Can be overriden by setting `xlabel`,
`ylabel`, and `zlabel`.
xlabel : str or None (default : None)
Label for the x axis. Overrides the automatic label given by
label_prefix. If None and label_prefix is None, no label is set.
ylabel : str or None (default : None)
Label for the y axis. Overrides the automatic label given by
label_prefix. If None and label_prefix is None, no label is set.
zlabel : str or None (default : None)
Label for the z axis. Overrides the automatic label given by
label_prefix. If None and label_prefix is None, no label is set.
Only used for 3D plots.
title : str or None (default: None)
axis title. If None, no title is set.
legend_title : str (default: "")
title for the colorbar of legend. Only used for discrete data.
legend_loc : int or string or pair of floats, default: 'best'
Matplotlib legend location. Only used for discrete data.
See <https://matplotlib.org/api/_as_gen/matplotlib.pyplot.legend.html>
for details.
filename : str or None (default: None)
file to which the output is saved
dpi : int or None, optional (default: None)
The resolution in dots per inch. If None it will default to the value
savefig.dpi in the matplotlibrc file. If 'figure' it will set the dpi
to be the value of the figure. Only used if filename is not None.
**plot_kwargs : keyword arguments
Extra arguments passed to `matplotlib.pyplot.scatter`.
Returns
-------
ax : `matplotlib.Axes`
axis on which plot was drawn
Examples
--------
>>> import phate
>>> import matplotlib.pyplot as plt
>>> ###
>>> # Running PHATE
>>> ###
>>> tree_data, tree_clusters = phate.tree.gen_dla(n_dim=100, n_branch=20,
... branch_length=100)
>>> tree_data.shape
(2000, 100)
>>> phate_operator = phate.PHATE(k=5, a=20, t=150)
>>> tree_phate = phate_operator.fit_transform(tree_data)
>>> tree_phate.shape
(2000, 2)
>>> ###
>>> # Plotting using phate.plot
>>> ###
>>> phate.plot.scatter2d(tree_phate, c=tree_clusters)
>>> # You can also pass the PHATE operator instead of data
>>> phate.plot.scatter2d(phate_operator, c=tree_clusters)
>>> phate.plot.scatter3d(phate_operator, c=tree_clusters)
>>> ###
>>> # Using a cmap dictionary
>>> ###
>>> import numpy as np
>>> X = np.random.normal(0,1,[1000,2])
>>> c = np.random.choice(['a','b'], 1000, replace=True)
>>> X[c=='a'] += 10
>>> phate.plot.scatter2d(X, c=c, cmap={'a' : [1,0,0,1], 'b' : 'xkcd:sky blue'})
]
call[name[warnings].warn, parameter[constant[`phate.plot.scatter` is deprecated. Use `scprep.plot.scatter` instead.], name[FutureWarning]]]
return[call[name[scprep].plot.scatter, parameter[]]] | keyword[def] identifier[scatter] ( identifier[x] , identifier[y] , identifier[z] = keyword[None] ,
identifier[c] = keyword[None] , identifier[cmap] = keyword[None] , identifier[s] = keyword[None] , identifier[discrete] = keyword[None] ,
identifier[ax] = keyword[None] , identifier[legend] = keyword[None] , identifier[figsize] = keyword[None] ,
identifier[xticks] = keyword[False] ,
identifier[yticks] = keyword[False] ,
identifier[zticks] = keyword[False] ,
identifier[xticklabels] = keyword[True] ,
identifier[yticklabels] = keyword[True] ,
identifier[zticklabels] = keyword[True] ,
identifier[label_prefix] = literal[string] ,
identifier[xlabel] = keyword[None] ,
identifier[ylabel] = keyword[None] ,
identifier[zlabel] = keyword[None] ,
identifier[title] = keyword[None] ,
identifier[legend_title] = literal[string] ,
identifier[legend_loc] = literal[string] ,
identifier[filename] = keyword[None] ,
identifier[dpi] = keyword[None] ,
** identifier[plot_kwargs] ):
literal[string]
identifier[warnings] . identifier[warn] ( literal[string]
literal[string] ,
identifier[FutureWarning] )
keyword[return] identifier[scprep] . identifier[plot] . identifier[scatter] ( identifier[x] = identifier[x] , identifier[y] = identifier[y] , identifier[z] = identifier[z] ,
identifier[c] = identifier[c] , identifier[cmap] = identifier[cmap] , identifier[s] = identifier[s] , identifier[discrete] = identifier[discrete] ,
identifier[ax] = identifier[ax] , identifier[legend] = identifier[legend] , identifier[figsize] = identifier[figsize] ,
identifier[xticks] = identifier[xticks] ,
identifier[yticks] = identifier[yticks] ,
identifier[zticks] = identifier[zticks] ,
identifier[xticklabels] = identifier[xticklabels] ,
identifier[yticklabels] = identifier[yticklabels] ,
identifier[zticklabels] = identifier[zticklabels] ,
identifier[label_prefix] = identifier[label_prefix] ,
identifier[xlabel] = identifier[xlabel] ,
identifier[ylabel] = identifier[ylabel] ,
identifier[zlabel] = identifier[zlabel] ,
identifier[title] = identifier[title] ,
identifier[legend_title] = identifier[legend_title] ,
identifier[legend_loc] = identifier[legend_loc] ,
identifier[filename] = identifier[filename] ,
identifier[dpi] = identifier[dpi] ,
** identifier[plot_kwargs] ) | def scatter(x, y, z=None, c=None, cmap=None, s=None, discrete=None, ax=None, legend=None, figsize=None, xticks=False, yticks=False, zticks=False, xticklabels=True, yticklabels=True, zticklabels=True, label_prefix='PHATE', xlabel=None, ylabel=None, zlabel=None, title=None, legend_title='', legend_loc='best', filename=None, dpi=None, **plot_kwargs):
"""Create a scatter plot
Builds upon `matplotlib.pyplot.scatter` with nice defaults
and handles categorical colors / legends better. For easy access, use
`scatter2d` or `scatter3d`.
Parameters
----------
x : list-like
data for x axis
y : list-like
data for y axis
z : list-like, optional (default: None)
data for z axis
c : list-like or None, optional (default: None)
Color vector. Can be a single color value (RGB, RGBA, or named
matplotlib colors), an array of these of length n_samples, or a list of
discrete or continuous values of any data type. If `c` is not a single
or list of matplotlib colors, the values in `c` will be used to
populate the legend / colorbar with colors from `cmap`
cmap : `matplotlib` colormap, str, dict or None, optional (default: None)
matplotlib colormap. If None, uses `tab20` for discrete data and
`inferno` for continuous data. If a dictionary, expects one key
for every unique value in `c`, where values are valid matplotlib colors
(hsv, rbg, rgba, or named colors)
s : float, optional (default: 1)
Point size.
discrete : bool or None, optional (default: None)
If True, the legend is categorical. If False, the legend is a colorbar.
If None, discreteness is detected automatically. Data containing
non-numeric `c` is always discrete, and numeric data with 20 or less
unique values is discrete.
ax : `matplotlib.Axes` or None, optional (default: None)
axis on which to plot. If None, an axis is created
legend : bool, optional (default: True)
States whether or not to create a legend. If data is continuous,
the legend is a colorbar.
figsize : tuple, optional (default: None)
Tuple of floats for creation of new `matplotlib` figure. Only used if
`ax` is None.
xticks : True, False, or list-like (default: False)
If True, keeps default x ticks. If False, removes x ticks.
If a list, sets custom x ticks
yticks : True, False, or list-like (default: False)
If True, keeps default y ticks. If False, removes y ticks.
If a list, sets custom y ticks
zticks : True, False, or list-like (default: False)
If True, keeps default z ticks. If False, removes z ticks.
If a list, sets custom z ticks. Only used for 3D plots.
xticklabels : True, False, or list-like (default: True)
If True, keeps default x tick labels. If False, removes x tick labels.
If a list, sets custom x tick labels
yticklabels : True, False, or list-like (default: True)
If True, keeps default y tick labels. If False, removes y tick labels.
If a list, sets custom y tick labels
zticklabels : True, False, or list-like (default: True)
If True, keeps default z tick labels. If False, removes z tick labels.
If a list, sets custom z tick labels. Only used for 3D plots.
label_prefix : str or None (default: "PHATE")
Prefix for all axis labels. Axes will be labelled `label_prefix`1,
`label_prefix`2, etc. Can be overriden by setting `xlabel`,
`ylabel`, and `zlabel`.
xlabel : str or None (default : None)
Label for the x axis. Overrides the automatic label given by
label_prefix. If None and label_prefix is None, no label is set.
ylabel : str or None (default : None)
Label for the y axis. Overrides the automatic label given by
label_prefix. If None and label_prefix is None, no label is set.
zlabel : str or None (default : None)
Label for the z axis. Overrides the automatic label given by
label_prefix. If None and label_prefix is None, no label is set.
Only used for 3D plots.
title : str or None (default: None)
axis title. If None, no title is set.
legend_title : str (default: "")
title for the colorbar of legend. Only used for discrete data.
legend_loc : int or string or pair of floats, default: 'best'
Matplotlib legend location. Only used for discrete data.
See <https://matplotlib.org/api/_as_gen/matplotlib.pyplot.legend.html>
for details.
filename : str or None (default: None)
file to which the output is saved
dpi : int or None, optional (default: None)
The resolution in dots per inch. If None it will default to the value
savefig.dpi in the matplotlibrc file. If 'figure' it will set the dpi
to be the value of the figure. Only used if filename is not None.
**plot_kwargs : keyword arguments
Extra arguments passed to `matplotlib.pyplot.scatter`.
Returns
-------
ax : `matplotlib.Axes`
axis on which plot was drawn
Examples
--------
>>> import phate
>>> import matplotlib.pyplot as plt
>>> ###
>>> # Running PHATE
>>> ###
>>> tree_data, tree_clusters = phate.tree.gen_dla(n_dim=100, n_branch=20,
... branch_length=100)
>>> tree_data.shape
(2000, 100)
>>> phate_operator = phate.PHATE(k=5, a=20, t=150)
>>> tree_phate = phate_operator.fit_transform(tree_data)
>>> tree_phate.shape
(2000, 2)
>>> ###
>>> # Plotting using phate.plot
>>> ###
>>> phate.plot.scatter2d(tree_phate, c=tree_clusters)
>>> # You can also pass the PHATE operator instead of data
>>> phate.plot.scatter2d(phate_operator, c=tree_clusters)
>>> phate.plot.scatter3d(phate_operator, c=tree_clusters)
>>> ###
>>> # Using a cmap dictionary
>>> ###
>>> import numpy as np
>>> X = np.random.normal(0,1,[1000,2])
>>> c = np.random.choice(['a','b'], 1000, replace=True)
>>> X[c=='a'] += 10
>>> phate.plot.scatter2d(X, c=c, cmap={'a' : [1,0,0,1], 'b' : 'xkcd:sky blue'})
"""
warnings.warn('`phate.plot.scatter` is deprecated. Use `scprep.plot.scatter` instead.', FutureWarning)
return scprep.plot.scatter(x=x, y=y, z=z, c=c, cmap=cmap, s=s, discrete=discrete, ax=ax, legend=legend, figsize=figsize, xticks=xticks, yticks=yticks, zticks=zticks, xticklabels=xticklabels, yticklabels=yticklabels, zticklabels=zticklabels, label_prefix=label_prefix, xlabel=xlabel, ylabel=ylabel, zlabel=zlabel, title=title, legend_title=legend_title, legend_loc=legend_loc, filename=filename, dpi=dpi, **plot_kwargs) |
def delete(method, hmc, uri, uri_parms, logon_required):
"""Operation: Delete User."""
try:
user = hmc.lookup_by_uri(uri)
except KeyError:
raise InvalidResourceError(method, uri)
# Check user type
type_ = user.properties['type']
if type_ == 'pattern-based':
raise BadRequestError(
method, uri, reason=312,
message="Cannot delete pattern-based user {!r}".
format(user.name))
# Delete the mocked resource
user.manager.remove(user.oid) | def function[delete, parameter[method, hmc, uri, uri_parms, logon_required]]:
constant[Operation: Delete User.]
<ast.Try object at 0x7da1b05901c0>
variable[type_] assign[=] call[name[user].properties][constant[type]]
if compare[name[type_] equal[==] constant[pattern-based]] begin[:]
<ast.Raise object at 0x7da18bc71de0>
call[name[user].manager.remove, parameter[name[user].oid]] | keyword[def] identifier[delete] ( identifier[method] , identifier[hmc] , identifier[uri] , identifier[uri_parms] , identifier[logon_required] ):
literal[string]
keyword[try] :
identifier[user] = identifier[hmc] . identifier[lookup_by_uri] ( identifier[uri] )
keyword[except] identifier[KeyError] :
keyword[raise] identifier[InvalidResourceError] ( identifier[method] , identifier[uri] )
identifier[type_] = identifier[user] . identifier[properties] [ literal[string] ]
keyword[if] identifier[type_] == literal[string] :
keyword[raise] identifier[BadRequestError] (
identifier[method] , identifier[uri] , identifier[reason] = literal[int] ,
identifier[message] = literal[string] .
identifier[format] ( identifier[user] . identifier[name] ))
identifier[user] . identifier[manager] . identifier[remove] ( identifier[user] . identifier[oid] ) | def delete(method, hmc, uri, uri_parms, logon_required):
"""Operation: Delete User."""
try:
user = hmc.lookup_by_uri(uri) # depends on [control=['try'], data=[]]
except KeyError:
raise InvalidResourceError(method, uri) # depends on [control=['except'], data=[]]
# Check user type
type_ = user.properties['type']
if type_ == 'pattern-based':
raise BadRequestError(method, uri, reason=312, message='Cannot delete pattern-based user {!r}'.format(user.name)) # depends on [control=['if'], data=[]]
# Delete the mocked resource
user.manager.remove(user.oid) |
def track(user_id, event, first_name=None, last_name=None, email=None,
phone_number=None, apns_tokens=None, gcm_tokens=None,
user_attributes=None, properties=None, on_error=None, on_success=None, timestamp=None):
""" For any event you want to track, when a user triggers that event you
would call this function.
You can do an identify and track call simultaneously by including all the
identifiable user information in the track call.
:param str | number user_id: the id you user who triggered the event.
:param str first_name: OPTIONAL the user's first name.
:param str last_name: OPTIONAL the user's last name.
:param str email: OPTIONAL the user's email address.
:param str phone_number: OPTIONAL the user's phone number.
:param str | list apns_tokens: OPTIONAL the device tokens for the user's iOS
devices. If a single string is given it is put into a list.
:param str | list gcm_tokens: OPTIONAL the device tokens for the user's Android
devices. If a single string is given it is put into a list.
:param dict user_attributes: An optional dictionary with any additional
freeform attributes describing the user.
:param dict properties: An optional dictionary with any properties that
describe the event being track. Example: if the event were "added item to
cart", you might include a properties named "item" that is the name
of the item added to the cart.
:param func on_error: An optional function to call in the event of an error.
on_error callback should take 1 parameter which will be the error message.
:param func on_success: An optional function to call if/when the API call succeeds.
on_success callback takes no parameters.
"""
on_error = on_error or __on_error
on_success = on_success or __on_success
if not __is_init():
on_error(ERROR_INIT, __error_message(ERROR_INIT))
return
if not isinstance(user_id, six.string_types + (Number,)):
on_error(ERROR_USER_ID, __error_message(ERROR_USER_ID))
return
if not isinstance(event, six.string_types):
on_error(ERROR_EVENT_NAME, __error_message(ERROR_EVENT_NAME))
return
data = dict(user_id=user_id, event=event)
user = __user(
first_name,
last_name,
email,
phone_number,
apns_tokens,
gcm_tokens,
user_attributes,
None, None, None)
if user:
data['user'] = user
if properties:
if isinstance(properties, dict):
if len(properties) > 0:
data['properties'] = properties
else:
sys.stderr.write('Invalid event properties given. Expected dictionary. ' +
'Got %s' % type(properties).__name__)
if timestamp:
data['timestamp'] = timestamp
else:
data['timestamp'] = int(time.time())
try:
resp = requests.post(
"%s/track" % __BASE_URL,
data=json.dumps(data),
headers=__HEADERS,
)
if resp.status_code >= 200 and resp.status_code < 400:
on_success()
else:
on_error(ERROR_UNKNOWN, resp.text)
except requests.exceptions.ConnectionError:
on_error(ERROR_CONNECTION, __error_message(ERROR_CONNECTION)) | def function[track, parameter[user_id, event, first_name, last_name, email, phone_number, apns_tokens, gcm_tokens, user_attributes, properties, on_error, on_success, timestamp]]:
constant[ For any event you want to track, when a user triggers that event you
would call this function.
You can do an identify and track call simultaneously by including all the
identifiable user information in the track call.
:param str | number user_id: the id you user who triggered the event.
:param str first_name: OPTIONAL the user's first name.
:param str last_name: OPTIONAL the user's last name.
:param str email: OPTIONAL the user's email address.
:param str phone_number: OPTIONAL the user's phone number.
:param str | list apns_tokens: OPTIONAL the device tokens for the user's iOS
devices. If a single string is given it is put into a list.
:param str | list gcm_tokens: OPTIONAL the device tokens for the user's Android
devices. If a single string is given it is put into a list.
:param dict user_attributes: An optional dictionary with any additional
freeform attributes describing the user.
:param dict properties: An optional dictionary with any properties that
describe the event being track. Example: if the event were "added item to
cart", you might include a properties named "item" that is the name
of the item added to the cart.
:param func on_error: An optional function to call in the event of an error.
on_error callback should take 1 parameter which will be the error message.
:param func on_success: An optional function to call if/when the API call succeeds.
on_success callback takes no parameters.
]
variable[on_error] assign[=] <ast.BoolOp object at 0x7da1b0912860>
variable[on_success] assign[=] <ast.BoolOp object at 0x7da1b0910a30>
if <ast.UnaryOp object at 0x7da1b09128c0> begin[:]
call[name[on_error], parameter[name[ERROR_INIT], call[name[__error_message], parameter[name[ERROR_INIT]]]]]
return[None]
if <ast.UnaryOp object at 0x7da1b0913f70> begin[:]
call[name[on_error], parameter[name[ERROR_USER_ID], call[name[__error_message], parameter[name[ERROR_USER_ID]]]]]
return[None]
if <ast.UnaryOp object at 0x7da1b09122f0> begin[:]
call[name[on_error], parameter[name[ERROR_EVENT_NAME], call[name[__error_message], parameter[name[ERROR_EVENT_NAME]]]]]
return[None]
variable[data] assign[=] call[name[dict], parameter[]]
variable[user] assign[=] call[name[__user], parameter[name[first_name], name[last_name], name[email], name[phone_number], name[apns_tokens], name[gcm_tokens], name[user_attributes], constant[None], constant[None], constant[None]]]
if name[user] begin[:]
call[name[data]][constant[user]] assign[=] name[user]
if name[properties] begin[:]
if call[name[isinstance], parameter[name[properties], name[dict]]] begin[:]
if compare[call[name[len], parameter[name[properties]]] greater[>] constant[0]] begin[:]
call[name[data]][constant[properties]] assign[=] name[properties]
if name[timestamp] begin[:]
call[name[data]][constant[timestamp]] assign[=] name[timestamp]
<ast.Try object at 0x7da1b0912ad0> | keyword[def] identifier[track] ( identifier[user_id] , identifier[event] , identifier[first_name] = keyword[None] , identifier[last_name] = keyword[None] , identifier[email] = keyword[None] ,
identifier[phone_number] = keyword[None] , identifier[apns_tokens] = keyword[None] , identifier[gcm_tokens] = keyword[None] ,
identifier[user_attributes] = keyword[None] , identifier[properties] = keyword[None] , identifier[on_error] = keyword[None] , identifier[on_success] = keyword[None] , identifier[timestamp] = keyword[None] ):
literal[string]
identifier[on_error] = identifier[on_error] keyword[or] identifier[__on_error]
identifier[on_success] = identifier[on_success] keyword[or] identifier[__on_success]
keyword[if] keyword[not] identifier[__is_init] ():
identifier[on_error] ( identifier[ERROR_INIT] , identifier[__error_message] ( identifier[ERROR_INIT] ))
keyword[return]
keyword[if] keyword[not] identifier[isinstance] ( identifier[user_id] , identifier[six] . identifier[string_types] +( identifier[Number] ,)):
identifier[on_error] ( identifier[ERROR_USER_ID] , identifier[__error_message] ( identifier[ERROR_USER_ID] ))
keyword[return]
keyword[if] keyword[not] identifier[isinstance] ( identifier[event] , identifier[six] . identifier[string_types] ):
identifier[on_error] ( identifier[ERROR_EVENT_NAME] , identifier[__error_message] ( identifier[ERROR_EVENT_NAME] ))
keyword[return]
identifier[data] = identifier[dict] ( identifier[user_id] = identifier[user_id] , identifier[event] = identifier[event] )
identifier[user] = identifier[__user] (
identifier[first_name] ,
identifier[last_name] ,
identifier[email] ,
identifier[phone_number] ,
identifier[apns_tokens] ,
identifier[gcm_tokens] ,
identifier[user_attributes] ,
keyword[None] , keyword[None] , keyword[None] )
keyword[if] identifier[user] :
identifier[data] [ literal[string] ]= identifier[user]
keyword[if] identifier[properties] :
keyword[if] identifier[isinstance] ( identifier[properties] , identifier[dict] ):
keyword[if] identifier[len] ( identifier[properties] )> literal[int] :
identifier[data] [ literal[string] ]= identifier[properties]
keyword[else] :
identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] +
literal[string] % identifier[type] ( identifier[properties] ). identifier[__name__] )
keyword[if] identifier[timestamp] :
identifier[data] [ literal[string] ]= identifier[timestamp]
keyword[else] :
identifier[data] [ literal[string] ]= identifier[int] ( identifier[time] . identifier[time] ())
keyword[try] :
identifier[resp] = identifier[requests] . identifier[post] (
literal[string] % identifier[__BASE_URL] ,
identifier[data] = identifier[json] . identifier[dumps] ( identifier[data] ),
identifier[headers] = identifier[__HEADERS] ,
)
keyword[if] identifier[resp] . identifier[status_code] >= literal[int] keyword[and] identifier[resp] . identifier[status_code] < literal[int] :
identifier[on_success] ()
keyword[else] :
identifier[on_error] ( identifier[ERROR_UNKNOWN] , identifier[resp] . identifier[text] )
keyword[except] identifier[requests] . identifier[exceptions] . identifier[ConnectionError] :
identifier[on_error] ( identifier[ERROR_CONNECTION] , identifier[__error_message] ( identifier[ERROR_CONNECTION] )) | def track(user_id, event, first_name=None, last_name=None, email=None, phone_number=None, apns_tokens=None, gcm_tokens=None, user_attributes=None, properties=None, on_error=None, on_success=None, timestamp=None):
""" For any event you want to track, when a user triggers that event you
would call this function.
You can do an identify and track call simultaneously by including all the
identifiable user information in the track call.
:param str | number user_id: the id you user who triggered the event.
:param str first_name: OPTIONAL the user's first name.
:param str last_name: OPTIONAL the user's last name.
:param str email: OPTIONAL the user's email address.
:param str phone_number: OPTIONAL the user's phone number.
:param str | list apns_tokens: OPTIONAL the device tokens for the user's iOS
devices. If a single string is given it is put into a list.
:param str | list gcm_tokens: OPTIONAL the device tokens for the user's Android
devices. If a single string is given it is put into a list.
:param dict user_attributes: An optional dictionary with any additional
freeform attributes describing the user.
:param dict properties: An optional dictionary with any properties that
describe the event being track. Example: if the event were "added item to
cart", you might include a properties named "item" that is the name
of the item added to the cart.
:param func on_error: An optional function to call in the event of an error.
on_error callback should take 1 parameter which will be the error message.
:param func on_success: An optional function to call if/when the API call succeeds.
on_success callback takes no parameters.
"""
on_error = on_error or __on_error
on_success = on_success or __on_success
if not __is_init():
on_error(ERROR_INIT, __error_message(ERROR_INIT))
return # depends on [control=['if'], data=[]]
if not isinstance(user_id, six.string_types + (Number,)):
on_error(ERROR_USER_ID, __error_message(ERROR_USER_ID))
return # depends on [control=['if'], data=[]]
if not isinstance(event, six.string_types):
on_error(ERROR_EVENT_NAME, __error_message(ERROR_EVENT_NAME))
return # depends on [control=['if'], data=[]]
data = dict(user_id=user_id, event=event)
user = __user(first_name, last_name, email, phone_number, apns_tokens, gcm_tokens, user_attributes, None, None, None)
if user:
data['user'] = user # depends on [control=['if'], data=[]]
if properties:
if isinstance(properties, dict):
if len(properties) > 0:
data['properties'] = properties # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
sys.stderr.write('Invalid event properties given. Expected dictionary. ' + 'Got %s' % type(properties).__name__) # depends on [control=['if'], data=[]]
if timestamp:
data['timestamp'] = timestamp # depends on [control=['if'], data=[]]
else:
data['timestamp'] = int(time.time())
try:
resp = requests.post('%s/track' % __BASE_URL, data=json.dumps(data), headers=__HEADERS)
if resp.status_code >= 200 and resp.status_code < 400:
on_success() # depends on [control=['if'], data=[]]
else:
on_error(ERROR_UNKNOWN, resp.text) # depends on [control=['try'], data=[]]
except requests.exceptions.ConnectionError:
on_error(ERROR_CONNECTION, __error_message(ERROR_CONNECTION)) # depends on [control=['except'], data=[]] |
def get_todo_items(self, **kwargs):
'''
Returns an iterator that will provide each item in the todo queue. Note that to complete each item you have to run complete method with the output of this iterator.
That will move the item to the done directory and prevent it from being retrieved in the future.
'''
def inner(self):
for item in self.get_all_as_list():
yield item
self._unlock()
if not self._is_locked():
if self._lock():
return inner(self)
raise RuntimeError("RuntimeError: Index Already Locked") | def function[get_todo_items, parameter[self]]:
constant[
Returns an iterator that will provide each item in the todo queue. Note that to complete each item you have to run complete method with the output of this iterator.
That will move the item to the done directory and prevent it from being retrieved in the future.
]
def function[inner, parameter[self]]:
for taget[name[item]] in starred[call[name[self].get_all_as_list, parameter[]]] begin[:]
<ast.Yield object at 0x7da20c7c96c0>
call[name[self]._unlock, parameter[]]
if <ast.UnaryOp object at 0x7da1b0f517b0> begin[:]
if call[name[self]._lock, parameter[]] begin[:]
return[call[name[inner], parameter[name[self]]]]
<ast.Raise object at 0x7da1b0f510c0> | keyword[def] identifier[get_todo_items] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
keyword[def] identifier[inner] ( identifier[self] ):
keyword[for] identifier[item] keyword[in] identifier[self] . identifier[get_all_as_list] ():
keyword[yield] identifier[item]
identifier[self] . identifier[_unlock] ()
keyword[if] keyword[not] identifier[self] . identifier[_is_locked] ():
keyword[if] identifier[self] . identifier[_lock] ():
keyword[return] identifier[inner] ( identifier[self] )
keyword[raise] identifier[RuntimeError] ( literal[string] ) | def get_todo_items(self, **kwargs):
"""
Returns an iterator that will provide each item in the todo queue. Note that to complete each item you have to run complete method with the output of this iterator.
That will move the item to the done directory and prevent it from being retrieved in the future.
"""
def inner(self):
for item in self.get_all_as_list():
yield item # depends on [control=['for'], data=['item']]
self._unlock()
if not self._is_locked():
if self._lock():
return inner(self) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
raise RuntimeError('RuntimeError: Index Already Locked') |
def congruent(self, other):
'''
A congruent B
True iff all angles of 'A' equal angles in 'B' and
all side lengths of 'A' equal all side lengths of 'B', boolean.
'''
a = set(self.angles)
b = set(other.angles)
if len(a) != len(b) or len(a.difference(b)) != 0:
return False
a = set(self.sides)
b = set(other.sides)
return len(a) == len(b) and len(a.difference(b)) == 0 | def function[congruent, parameter[self, other]]:
constant[
A congruent B
True iff all angles of 'A' equal angles in 'B' and
all side lengths of 'A' equal all side lengths of 'B', boolean.
]
variable[a] assign[=] call[name[set], parameter[name[self].angles]]
variable[b] assign[=] call[name[set], parameter[name[other].angles]]
if <ast.BoolOp object at 0x7da1b118cb20> begin[:]
return[constant[False]]
variable[a] assign[=] call[name[set], parameter[name[self].sides]]
variable[b] assign[=] call[name[set], parameter[name[other].sides]]
return[<ast.BoolOp object at 0x7da1b102a2c0>] | keyword[def] identifier[congruent] ( identifier[self] , identifier[other] ):
literal[string]
identifier[a] = identifier[set] ( identifier[self] . identifier[angles] )
identifier[b] = identifier[set] ( identifier[other] . identifier[angles] )
keyword[if] identifier[len] ( identifier[a] )!= identifier[len] ( identifier[b] ) keyword[or] identifier[len] ( identifier[a] . identifier[difference] ( identifier[b] ))!= literal[int] :
keyword[return] keyword[False]
identifier[a] = identifier[set] ( identifier[self] . identifier[sides] )
identifier[b] = identifier[set] ( identifier[other] . identifier[sides] )
keyword[return] identifier[len] ( identifier[a] )== identifier[len] ( identifier[b] ) keyword[and] identifier[len] ( identifier[a] . identifier[difference] ( identifier[b] ))== literal[int] | def congruent(self, other):
"""
A congruent B
True iff all angles of 'A' equal angles in 'B' and
all side lengths of 'A' equal all side lengths of 'B', boolean.
"""
a = set(self.angles)
b = set(other.angles)
if len(a) != len(b) or len(a.difference(b)) != 0:
return False # depends on [control=['if'], data=[]]
a = set(self.sides)
b = set(other.sides)
return len(a) == len(b) and len(a.difference(b)) == 0 |
def finish(self, code=None, message=None, perfdata=None, extdata=None):
"""
exit when using internal function to add results
automatically generates output, but each parameter can be overriden
all parameters are optional
arguments:
code: exit status code
message: a short, one-line message to display
perfdata: perfdata, if any
extdata: multi-line message to give more details
"""
if code is None:
code = self.get_code()
if message is None:
message = self.get_message(msglevels=[code])
if perfdata is None:
perfdata = self.get_perfdata()
if extdata is None:
extdata = self.get_extdata()
self.exit(code=code, message=message,
perfdata=perfdata, extdata=extdata) | def function[finish, parameter[self, code, message, perfdata, extdata]]:
constant[
exit when using internal function to add results
automatically generates output, but each parameter can be overriden
all parameters are optional
arguments:
code: exit status code
message: a short, one-line message to display
perfdata: perfdata, if any
extdata: multi-line message to give more details
]
if compare[name[code] is constant[None]] begin[:]
variable[code] assign[=] call[name[self].get_code, parameter[]]
if compare[name[message] is constant[None]] begin[:]
variable[message] assign[=] call[name[self].get_message, parameter[]]
if compare[name[perfdata] is constant[None]] begin[:]
variable[perfdata] assign[=] call[name[self].get_perfdata, parameter[]]
if compare[name[extdata] is constant[None]] begin[:]
variable[extdata] assign[=] call[name[self].get_extdata, parameter[]]
call[name[self].exit, parameter[]] | keyword[def] identifier[finish] ( identifier[self] , identifier[code] = keyword[None] , identifier[message] = keyword[None] , identifier[perfdata] = keyword[None] , identifier[extdata] = keyword[None] ):
literal[string]
keyword[if] identifier[code] keyword[is] keyword[None] :
identifier[code] = identifier[self] . identifier[get_code] ()
keyword[if] identifier[message] keyword[is] keyword[None] :
identifier[message] = identifier[self] . identifier[get_message] ( identifier[msglevels] =[ identifier[code] ])
keyword[if] identifier[perfdata] keyword[is] keyword[None] :
identifier[perfdata] = identifier[self] . identifier[get_perfdata] ()
keyword[if] identifier[extdata] keyword[is] keyword[None] :
identifier[extdata] = identifier[self] . identifier[get_extdata] ()
identifier[self] . identifier[exit] ( identifier[code] = identifier[code] , identifier[message] = identifier[message] ,
identifier[perfdata] = identifier[perfdata] , identifier[extdata] = identifier[extdata] ) | def finish(self, code=None, message=None, perfdata=None, extdata=None):
"""
exit when using internal function to add results
automatically generates output, but each parameter can be overriden
all parameters are optional
arguments:
code: exit status code
message: a short, one-line message to display
perfdata: perfdata, if any
extdata: multi-line message to give more details
"""
if code is None:
code = self.get_code() # depends on [control=['if'], data=['code']]
if message is None:
message = self.get_message(msglevels=[code]) # depends on [control=['if'], data=['message']]
if perfdata is None:
perfdata = self.get_perfdata() # depends on [control=['if'], data=['perfdata']]
if extdata is None:
extdata = self.get_extdata() # depends on [control=['if'], data=['extdata']]
self.exit(code=code, message=message, perfdata=perfdata, extdata=extdata) |
def post_save(self, obj, created=False):
"""indexes the object to ElasticSearch after any save function (POST/PUT)
:param obj: instance of the saved object
:param created: boolean expressing if object is newly created (`False` if updated)
:return: `rest_framework.viewset.ModelViewSet.post_save`
"""
from bulbs.content.tasks import index
index.delay(obj.polymorphic_ctype_id, obj.pk)
message = "Created" if created else "Saved"
LogEntry.objects.log(self.request.user, obj, message)
return super(ContentViewSet, self).post_save(obj, created=created) | def function[post_save, parameter[self, obj, created]]:
constant[indexes the object to ElasticSearch after any save function (POST/PUT)
:param obj: instance of the saved object
:param created: boolean expressing if object is newly created (`False` if updated)
:return: `rest_framework.viewset.ModelViewSet.post_save`
]
from relative_module[bulbs.content.tasks] import module[index]
call[name[index].delay, parameter[name[obj].polymorphic_ctype_id, name[obj].pk]]
variable[message] assign[=] <ast.IfExp object at 0x7da1b0a1d750>
call[name[LogEntry].objects.log, parameter[name[self].request.user, name[obj], name[message]]]
return[call[call[name[super], parameter[name[ContentViewSet], name[self]]].post_save, parameter[name[obj]]]] | keyword[def] identifier[post_save] ( identifier[self] , identifier[obj] , identifier[created] = keyword[False] ):
literal[string]
keyword[from] identifier[bulbs] . identifier[content] . identifier[tasks] keyword[import] identifier[index]
identifier[index] . identifier[delay] ( identifier[obj] . identifier[polymorphic_ctype_id] , identifier[obj] . identifier[pk] )
identifier[message] = literal[string] keyword[if] identifier[created] keyword[else] literal[string]
identifier[LogEntry] . identifier[objects] . identifier[log] ( identifier[self] . identifier[request] . identifier[user] , identifier[obj] , identifier[message] )
keyword[return] identifier[super] ( identifier[ContentViewSet] , identifier[self] ). identifier[post_save] ( identifier[obj] , identifier[created] = identifier[created] ) | def post_save(self, obj, created=False):
"""indexes the object to ElasticSearch after any save function (POST/PUT)
:param obj: instance of the saved object
:param created: boolean expressing if object is newly created (`False` if updated)
:return: `rest_framework.viewset.ModelViewSet.post_save`
"""
from bulbs.content.tasks import index
index.delay(obj.polymorphic_ctype_id, obj.pk)
message = 'Created' if created else 'Saved'
LogEntry.objects.log(self.request.user, obj, message)
return super(ContentViewSet, self).post_save(obj, created=created) |
def enable_death_signal(_warn=True):
"""
Set the "death signal" of the current process, so that
the current process will be cleaned with guarantee
in case the parent dies accidentally.
"""
if platform.system() != 'Linux':
return
try:
import prctl # pip install python-prctl
except ImportError:
if _warn:
log_once('"import prctl" failed! Install python-prctl so that processes can be cleaned with guarantee.',
'warn')
return
else:
assert hasattr(prctl, 'set_pdeathsig'), \
"prctl.set_pdeathsig does not exist! Note that you need to install 'python-prctl' instead of 'prctl'."
# is SIGHUP a good choice?
prctl.set_pdeathsig(signal.SIGHUP) | def function[enable_death_signal, parameter[_warn]]:
constant[
Set the "death signal" of the current process, so that
the current process will be cleaned with guarantee
in case the parent dies accidentally.
]
if compare[call[name[platform].system, parameter[]] not_equal[!=] constant[Linux]] begin[:]
return[None]
<ast.Try object at 0x7da18f00e590> | keyword[def] identifier[enable_death_signal] ( identifier[_warn] = keyword[True] ):
literal[string]
keyword[if] identifier[platform] . identifier[system] ()!= literal[string] :
keyword[return]
keyword[try] :
keyword[import] identifier[prctl]
keyword[except] identifier[ImportError] :
keyword[if] identifier[_warn] :
identifier[log_once] ( literal[string] ,
literal[string] )
keyword[return]
keyword[else] :
keyword[assert] identifier[hasattr] ( identifier[prctl] , literal[string] ), literal[string]
identifier[prctl] . identifier[set_pdeathsig] ( identifier[signal] . identifier[SIGHUP] ) | def enable_death_signal(_warn=True):
"""
Set the "death signal" of the current process, so that
the current process will be cleaned with guarantee
in case the parent dies accidentally.
"""
if platform.system() != 'Linux':
return # depends on [control=['if'], data=[]]
try:
import prctl # pip install python-prctl # depends on [control=['try'], data=[]]
except ImportError:
if _warn:
log_once('"import prctl" failed! Install python-prctl so that processes can be cleaned with guarantee.', 'warn') # depends on [control=['if'], data=[]]
return # depends on [control=['except'], data=[]]
else:
assert hasattr(prctl, 'set_pdeathsig'), "prctl.set_pdeathsig does not exist! Note that you need to install 'python-prctl' instead of 'prctl'."
# is SIGHUP a good choice?
prctl.set_pdeathsig(signal.SIGHUP) |
def prune(self, dir):
"""Filter out files from 'dir/'."""
match = translate_pattern(os.path.join(dir, '**'))
return self._remove_files(match.match) | def function[prune, parameter[self, dir]]:
constant[Filter out files from 'dir/'.]
variable[match] assign[=] call[name[translate_pattern], parameter[call[name[os].path.join, parameter[name[dir], constant[**]]]]]
return[call[name[self]._remove_files, parameter[name[match].match]]] | keyword[def] identifier[prune] ( identifier[self] , identifier[dir] ):
literal[string]
identifier[match] = identifier[translate_pattern] ( identifier[os] . identifier[path] . identifier[join] ( identifier[dir] , literal[string] ))
keyword[return] identifier[self] . identifier[_remove_files] ( identifier[match] . identifier[match] ) | def prune(self, dir):
"""Filter out files from 'dir/'."""
match = translate_pattern(os.path.join(dir, '**'))
return self._remove_files(match.match) |
def unsubscribe(self, ssid, max_msgs=0):
"""
Takes a subscription sequence id and removes the subscription
from the client, optionally after receiving more than max_msgs,
and unsubscribes immediatedly.
"""
if self.is_closed:
raise ErrConnectionClosed
sub = None
try:
sub = self._subs[ssid]
except KeyError:
# Already unsubscribed.
return
# In case subscription has already received enough messages
# then announce to the server that we are unsubscribing and
# remove the callback locally too.
if max_msgs == 0 or sub.received >= max_msgs:
self._subs.pop(ssid, None)
self._remove_subscription(sub)
# We will send these for all subs when we reconnect anyway,
# so that we can suppress here.
if not self.is_reconnecting:
yield self.auto_unsubscribe(ssid, max_msgs) | def function[unsubscribe, parameter[self, ssid, max_msgs]]:
constant[
Takes a subscription sequence id and removes the subscription
from the client, optionally after receiving more than max_msgs,
and unsubscribes immediatedly.
]
if name[self].is_closed begin[:]
<ast.Raise object at 0x7da1b049a140>
variable[sub] assign[=] constant[None]
<ast.Try object at 0x7da1b0499a50>
if <ast.BoolOp object at 0x7da1b0499c30> begin[:]
call[name[self]._subs.pop, parameter[name[ssid], constant[None]]]
call[name[self]._remove_subscription, parameter[name[sub]]]
if <ast.UnaryOp object at 0x7da1b04a63b0> begin[:]
<ast.Yield object at 0x7da1b04a4f10> | keyword[def] identifier[unsubscribe] ( identifier[self] , identifier[ssid] , identifier[max_msgs] = literal[int] ):
literal[string]
keyword[if] identifier[self] . identifier[is_closed] :
keyword[raise] identifier[ErrConnectionClosed]
identifier[sub] = keyword[None]
keyword[try] :
identifier[sub] = identifier[self] . identifier[_subs] [ identifier[ssid] ]
keyword[except] identifier[KeyError] :
keyword[return]
keyword[if] identifier[max_msgs] == literal[int] keyword[or] identifier[sub] . identifier[received] >= identifier[max_msgs] :
identifier[self] . identifier[_subs] . identifier[pop] ( identifier[ssid] , keyword[None] )
identifier[self] . identifier[_remove_subscription] ( identifier[sub] )
keyword[if] keyword[not] identifier[self] . identifier[is_reconnecting] :
keyword[yield] identifier[self] . identifier[auto_unsubscribe] ( identifier[ssid] , identifier[max_msgs] ) | def unsubscribe(self, ssid, max_msgs=0):
"""
Takes a subscription sequence id and removes the subscription
from the client, optionally after receiving more than max_msgs,
and unsubscribes immediatedly.
"""
if self.is_closed:
raise ErrConnectionClosed # depends on [control=['if'], data=[]]
sub = None
try:
sub = self._subs[ssid] # depends on [control=['try'], data=[]]
except KeyError:
# Already unsubscribed.
return # depends on [control=['except'], data=[]]
# In case subscription has already received enough messages
# then announce to the server that we are unsubscribing and
# remove the callback locally too.
if max_msgs == 0 or sub.received >= max_msgs:
self._subs.pop(ssid, None)
self._remove_subscription(sub) # depends on [control=['if'], data=[]]
# We will send these for all subs when we reconnect anyway,
# so that we can suppress here.
if not self.is_reconnecting:
yield self.auto_unsubscribe(ssid, max_msgs) # depends on [control=['if'], data=[]] |
def _get_enterprise_operator_users_batch(self, start, end):
"""
Returns a batched queryset of User objects.
"""
LOGGER.info('Fetching new batch of enterprise operator users from indexes: %s to %s', start, end)
return User.objects.filter(groups__name=ENTERPRISE_DATA_API_ACCESS_GROUP, is_staff=True)[start:end] | def function[_get_enterprise_operator_users_batch, parameter[self, start, end]]:
constant[
Returns a batched queryset of User objects.
]
call[name[LOGGER].info, parameter[constant[Fetching new batch of enterprise operator users from indexes: %s to %s], name[start], name[end]]]
return[call[call[name[User].objects.filter, parameter[]]][<ast.Slice object at 0x7da1b0053a90>]] | keyword[def] identifier[_get_enterprise_operator_users_batch] ( identifier[self] , identifier[start] , identifier[end] ):
literal[string]
identifier[LOGGER] . identifier[info] ( literal[string] , identifier[start] , identifier[end] )
keyword[return] identifier[User] . identifier[objects] . identifier[filter] ( identifier[groups__name] = identifier[ENTERPRISE_DATA_API_ACCESS_GROUP] , identifier[is_staff] = keyword[True] )[ identifier[start] : identifier[end] ] | def _get_enterprise_operator_users_batch(self, start, end):
"""
Returns a batched queryset of User objects.
"""
LOGGER.info('Fetching new batch of enterprise operator users from indexes: %s to %s', start, end)
return User.objects.filter(groups__name=ENTERPRISE_DATA_API_ACCESS_GROUP, is_staff=True)[start:end] |
def add_date_facet(self, *args, **kwargs):
"""Add a date factory facet"""
self.facets.append(DateHistogramFacet(*args, **kwargs)) | def function[add_date_facet, parameter[self]]:
constant[Add a date factory facet]
call[name[self].facets.append, parameter[call[name[DateHistogramFacet], parameter[<ast.Starred object at 0x7da20e962980>]]]] | keyword[def] identifier[add_date_facet] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[self] . identifier[facets] . identifier[append] ( identifier[DateHistogramFacet] (* identifier[args] ,** identifier[kwargs] )) | def add_date_facet(self, *args, **kwargs):
"""Add a date factory facet"""
self.facets.append(DateHistogramFacet(*args, **kwargs)) |
def play(self, wav=None, data=None, rate=16000, channels=1, width=2, block=True, spectrum=None):
"""
play wav file or raw audio (string or generator)
Args:
wav: wav file path
data: raw audio data, str or iterator
rate: sample rate, only for raw audio
channels: channel number, only for raw data
width: raw audio data width, 16 bit is 2, only for raw data
block: if true, block until audio is played.
spectrum: if true, use a spectrum analyzer thread to analyze data
"""
if wav:
f = wave.open(wav, 'rb')
rate = f.getframerate()
channels = f.getnchannels()
width = f.getsampwidth()
def gen(w):
d = w.readframes(CHUNK_SIZE)
while d:
yield d
d = w.readframes(CHUNK_SIZE)
w.close()
data = gen(f)
self.stop_event.clear()
if block:
self._play(data, rate, channels, width, spectrum)
else:
thread = threading.Thread(target=self._play, args=(data, rate, channels, width, spectrum))
thread.start() | def function[play, parameter[self, wav, data, rate, channels, width, block, spectrum]]:
constant[
play wav file or raw audio (string or generator)
Args:
wav: wav file path
data: raw audio data, str or iterator
rate: sample rate, only for raw audio
channels: channel number, only for raw data
width: raw audio data width, 16 bit is 2, only for raw data
block: if true, block until audio is played.
spectrum: if true, use a spectrum analyzer thread to analyze data
]
if name[wav] begin[:]
variable[f] assign[=] call[name[wave].open, parameter[name[wav], constant[rb]]]
variable[rate] assign[=] call[name[f].getframerate, parameter[]]
variable[channels] assign[=] call[name[f].getnchannels, parameter[]]
variable[width] assign[=] call[name[f].getsampwidth, parameter[]]
def function[gen, parameter[w]]:
variable[d] assign[=] call[name[w].readframes, parameter[name[CHUNK_SIZE]]]
while name[d] begin[:]
<ast.Yield object at 0x7da1b06ad3c0>
variable[d] assign[=] call[name[w].readframes, parameter[name[CHUNK_SIZE]]]
call[name[w].close, parameter[]]
variable[data] assign[=] call[name[gen], parameter[name[f]]]
call[name[self].stop_event.clear, parameter[]]
if name[block] begin[:]
call[name[self]._play, parameter[name[data], name[rate], name[channels], name[width], name[spectrum]]] | keyword[def] identifier[play] ( identifier[self] , identifier[wav] = keyword[None] , identifier[data] = keyword[None] , identifier[rate] = literal[int] , identifier[channels] = literal[int] , identifier[width] = literal[int] , identifier[block] = keyword[True] , identifier[spectrum] = keyword[None] ):
literal[string]
keyword[if] identifier[wav] :
identifier[f] = identifier[wave] . identifier[open] ( identifier[wav] , literal[string] )
identifier[rate] = identifier[f] . identifier[getframerate] ()
identifier[channels] = identifier[f] . identifier[getnchannels] ()
identifier[width] = identifier[f] . identifier[getsampwidth] ()
keyword[def] identifier[gen] ( identifier[w] ):
identifier[d] = identifier[w] . identifier[readframes] ( identifier[CHUNK_SIZE] )
keyword[while] identifier[d] :
keyword[yield] identifier[d]
identifier[d] = identifier[w] . identifier[readframes] ( identifier[CHUNK_SIZE] )
identifier[w] . identifier[close] ()
identifier[data] = identifier[gen] ( identifier[f] )
identifier[self] . identifier[stop_event] . identifier[clear] ()
keyword[if] identifier[block] :
identifier[self] . identifier[_play] ( identifier[data] , identifier[rate] , identifier[channels] , identifier[width] , identifier[spectrum] )
keyword[else] :
identifier[thread] = identifier[threading] . identifier[Thread] ( identifier[target] = identifier[self] . identifier[_play] , identifier[args] =( identifier[data] , identifier[rate] , identifier[channels] , identifier[width] , identifier[spectrum] ))
identifier[thread] . identifier[start] () | def play(self, wav=None, data=None, rate=16000, channels=1, width=2, block=True, spectrum=None):
"""
play wav file or raw audio (string or generator)
Args:
wav: wav file path
data: raw audio data, str or iterator
rate: sample rate, only for raw audio
channels: channel number, only for raw data
width: raw audio data width, 16 bit is 2, only for raw data
block: if true, block until audio is played.
spectrum: if true, use a spectrum analyzer thread to analyze data
"""
if wav:
f = wave.open(wav, 'rb')
rate = f.getframerate()
channels = f.getnchannels()
width = f.getsampwidth()
def gen(w):
d = w.readframes(CHUNK_SIZE)
while d:
yield d
d = w.readframes(CHUNK_SIZE) # depends on [control=['while'], data=[]]
w.close()
data = gen(f) # depends on [control=['if'], data=[]]
self.stop_event.clear()
if block:
self._play(data, rate, channels, width, spectrum) # depends on [control=['if'], data=[]]
else:
thread = threading.Thread(target=self._play, args=(data, rate, channels, width, spectrum))
thread.start() |
def save_file(self, data, dfile):
"""
Saves the Instances object in the specified file.
:param data: the data to save
:type data: Instances
:param dfile: the file to save the data to
:type dfile: str
"""
self.enforce_type(self.jobject, "weka.core.converters.FileSourcedConverter")
if not javabridge.is_instance_of(dfile, "Ljava/io/File;"):
dfile = javabridge.make_instance(
"Ljava/io/File;", "(Ljava/lang/String;)V", javabridge.get_env().new_string_utf(str(dfile)))
javabridge.call(self.jobject, "setFile", "(Ljava/io/File;)V", dfile)
javabridge.call(self.jobject, "setInstances", "(Lweka/core/Instances;)V", data.jobject)
javabridge.call(self.jobject, "writeBatch", "()V") | def function[save_file, parameter[self, data, dfile]]:
constant[
Saves the Instances object in the specified file.
:param data: the data to save
:type data: Instances
:param dfile: the file to save the data to
:type dfile: str
]
call[name[self].enforce_type, parameter[name[self].jobject, constant[weka.core.converters.FileSourcedConverter]]]
if <ast.UnaryOp object at 0x7da1b0659e10> begin[:]
variable[dfile] assign[=] call[name[javabridge].make_instance, parameter[constant[Ljava/io/File;], constant[(Ljava/lang/String;)V], call[call[name[javabridge].get_env, parameter[]].new_string_utf, parameter[call[name[str], parameter[name[dfile]]]]]]]
call[name[javabridge].call, parameter[name[self].jobject, constant[setFile], constant[(Ljava/io/File;)V], name[dfile]]]
call[name[javabridge].call, parameter[name[self].jobject, constant[setInstances], constant[(Lweka/core/Instances;)V], name[data].jobject]]
call[name[javabridge].call, parameter[name[self].jobject, constant[writeBatch], constant[()V]]] | keyword[def] identifier[save_file] ( identifier[self] , identifier[data] , identifier[dfile] ):
literal[string]
identifier[self] . identifier[enforce_type] ( identifier[self] . identifier[jobject] , literal[string] )
keyword[if] keyword[not] identifier[javabridge] . identifier[is_instance_of] ( identifier[dfile] , literal[string] ):
identifier[dfile] = identifier[javabridge] . identifier[make_instance] (
literal[string] , literal[string] , identifier[javabridge] . identifier[get_env] (). identifier[new_string_utf] ( identifier[str] ( identifier[dfile] )))
identifier[javabridge] . identifier[call] ( identifier[self] . identifier[jobject] , literal[string] , literal[string] , identifier[dfile] )
identifier[javabridge] . identifier[call] ( identifier[self] . identifier[jobject] , literal[string] , literal[string] , identifier[data] . identifier[jobject] )
identifier[javabridge] . identifier[call] ( identifier[self] . identifier[jobject] , literal[string] , literal[string] ) | def save_file(self, data, dfile):
"""
Saves the Instances object in the specified file.
:param data: the data to save
:type data: Instances
:param dfile: the file to save the data to
:type dfile: str
"""
self.enforce_type(self.jobject, 'weka.core.converters.FileSourcedConverter')
if not javabridge.is_instance_of(dfile, 'Ljava/io/File;'):
dfile = javabridge.make_instance('Ljava/io/File;', '(Ljava/lang/String;)V', javabridge.get_env().new_string_utf(str(dfile))) # depends on [control=['if'], data=[]]
javabridge.call(self.jobject, 'setFile', '(Ljava/io/File;)V', dfile)
javabridge.call(self.jobject, 'setInstances', '(Lweka/core/Instances;)V', data.jobject)
javabridge.call(self.jobject, 'writeBatch', '()V') |
def _compute_site_scaling(self, vs30, mean):
"""
Scales the ground motions by increasing 40 % on NEHRP class D/E sites,
and decreasing by 40 % on NEHRP class A/B sites
"""
site_factor = np.ones(len(vs30), dtype=float)
idx = vs30 <= 360.
site_factor[idx] = 1.4
idx = vs30 > 760.0
site_factor[idx] = 0.6
return np.log(np.exp(mean) * site_factor) | def function[_compute_site_scaling, parameter[self, vs30, mean]]:
constant[
Scales the ground motions by increasing 40 % on NEHRP class D/E sites,
and decreasing by 40 % on NEHRP class A/B sites
]
variable[site_factor] assign[=] call[name[np].ones, parameter[call[name[len], parameter[name[vs30]]]]]
variable[idx] assign[=] compare[name[vs30] less_or_equal[<=] constant[360.0]]
call[name[site_factor]][name[idx]] assign[=] constant[1.4]
variable[idx] assign[=] compare[name[vs30] greater[>] constant[760.0]]
call[name[site_factor]][name[idx]] assign[=] constant[0.6]
return[call[name[np].log, parameter[binary_operation[call[name[np].exp, parameter[name[mean]]] * name[site_factor]]]]] | keyword[def] identifier[_compute_site_scaling] ( identifier[self] , identifier[vs30] , identifier[mean] ):
literal[string]
identifier[site_factor] = identifier[np] . identifier[ones] ( identifier[len] ( identifier[vs30] ), identifier[dtype] = identifier[float] )
identifier[idx] = identifier[vs30] <= literal[int]
identifier[site_factor] [ identifier[idx] ]= literal[int]
identifier[idx] = identifier[vs30] > literal[int]
identifier[site_factor] [ identifier[idx] ]= literal[int]
keyword[return] identifier[np] . identifier[log] ( identifier[np] . identifier[exp] ( identifier[mean] )* identifier[site_factor] ) | def _compute_site_scaling(self, vs30, mean):
"""
Scales the ground motions by increasing 40 % on NEHRP class D/E sites,
and decreasing by 40 % on NEHRP class A/B sites
"""
site_factor = np.ones(len(vs30), dtype=float)
idx = vs30 <= 360.0
site_factor[idx] = 1.4
idx = vs30 > 760.0
site_factor[idx] = 0.6
return np.log(np.exp(mean) * site_factor) |
def remove_trunk_ports(self):
"""SDN Controller disable trunk ports
:rtype: list[tuple[str, str]]
"""
ports = self.attributes.get("{}Disable Full Trunk Ports".format(self.namespace_prefix), None)
return self._parse_ports(ports=ports) | def function[remove_trunk_ports, parameter[self]]:
constant[SDN Controller disable trunk ports
:rtype: list[tuple[str, str]]
]
variable[ports] assign[=] call[name[self].attributes.get, parameter[call[constant[{}Disable Full Trunk Ports].format, parameter[name[self].namespace_prefix]], constant[None]]]
return[call[name[self]._parse_ports, parameter[]]] | keyword[def] identifier[remove_trunk_ports] ( identifier[self] ):
literal[string]
identifier[ports] = identifier[self] . identifier[attributes] . identifier[get] ( literal[string] . identifier[format] ( identifier[self] . identifier[namespace_prefix] ), keyword[None] )
keyword[return] identifier[self] . identifier[_parse_ports] ( identifier[ports] = identifier[ports] ) | def remove_trunk_ports(self):
"""SDN Controller disable trunk ports
:rtype: list[tuple[str, str]]
"""
ports = self.attributes.get('{}Disable Full Trunk Ports'.format(self.namespace_prefix), None)
return self._parse_ports(ports=ports) |
def note_and_log(cls):
"""
This will be used as a decorator on class to activate
logging and store messages in the variable cls._notes
This will allow quick access to events in the web app.
A note can be added to cls._notes without logging if passing
the argument log=false to function note()
Something can be logged without addind a note using function log()
"""
if hasattr(cls, "DEBUG_LEVEL"):
if cls.DEBUG_LEVEL == "debug":
file_level = logging.DEBUG
console_level = logging.DEBUG
elif cls.DEBUG_LEVEL == "info":
file_level = logging.INFO
console_level = logging.INFO
else:
file_level = logging.WARNING
console_level = logging.INFO
# Notes object
cls._notes = namedtuple("_notes", ["timestamp", "notes"])
cls._notes.timestamp = []
cls._notes.notes = []
# Defining log object
cls.logname = "{} | {}".format(cls.__module__, cls.__name__)
root_logger = logging.getLogger()
cls._log = logging.getLogger("BAC0")
if not len(root_logger.handlers):
root_logger.addHandler(cls._log)
# Console Handler
ch = logging.StreamHandler()
ch.set_name("stderr")
ch2 = logging.StreamHandler(sys.stdout)
ch2.set_name("stdout")
ch.setLevel(console_level)
ch2.setLevel(logging.CRITICAL)
formatter = logging.Formatter("{asctime} - {levelname:<8}| {message}", style="{")
# File Handler
_PERMISSION_TO_WRITE = True
logUserPath = expanduser("~")
logSaveFilePath = join(logUserPath, ".BAC0")
logFile = join(logSaveFilePath, "BAC0.log")
if not os.path.exists(logSaveFilePath):
try:
os.makedirs(logSaveFilePath)
except:
_PERMISSION_TO_WRITE = False
if _PERMISSION_TO_WRITE:
fh = FileHandler(logFile)
fh.set_name("file_handler")
fh.setLevel(file_level)
fh.setFormatter(formatter)
ch.setFormatter(formatter)
ch2.setFormatter(formatter)
# Add handlers the first time only...
if not len(cls._log.handlers):
if _PERMISSION_TO_WRITE:
cls._log.addHandler(fh)
cls._log.addHandler(ch)
cls._log.addHandler(ch2)
# cls._log.setLevel(logging.CRITICAL)
def log_title(self, title, args=None, width=35):
cls._log.debug("")
cls._log.debug("#" * width)
cls._log.debug("# {}".format(title))
cls._log.debug("#" * width)
if args:
cls._log.debug("{!r}".format(args))
cls._log.debug("#" * 35)
def log_subtitle(self, subtitle, args=None, width=35):
cls._log.debug("")
cls._log.debug("=" * width)
cls._log.debug("{}".format(subtitle))
cls._log.debug("=" * width)
if args:
cls._log.debug("{!r}".format(args))
cls._log.debug("=" * width)
def log(self, note, *, level=logging.DEBUG):
"""
Add a log entry...no note
"""
if not note:
raise ValueError("Provide something to log")
note = "{} | {}".format(cls.logname, note)
cls._log.log(level, note)
def note(self, note, *, level=logging.INFO, log=True):
"""
Add note to the object. By default, the note will also
be logged
:param note: (str) The note itself
:param level: (logging.level)
:param log: (boolean) Enable or disable logging of note
"""
if not note:
raise ValueError("Provide something to log")
note = "{} | {}".format(cls.logname, note)
cls._notes.timestamp.append(datetime.now())
cls._notes.notes.append(note)
if log:
cls.log(level, note)
@property
def notes(self):
"""
Retrieve notes list as a Pandas Series
"""
if not _PANDAS:
return dict(zip(self._notes.timestamp, self._notes.notes))
return pd.Series(self._notes.notes, index=self._notes.timestamp)
def clear_notes(self):
"""
Clear notes object
"""
cls._notes.timestamp = []
cls._notes.notes = []
# Add the functions to the decorated class
cls.clear_notes = clear_notes
cls.note = note
cls.notes = notes
cls.log = log
cls.log_title = log_title
cls.log_subtitle = log_subtitle
return cls | def function[note_and_log, parameter[cls]]:
constant[
This will be used as a decorator on class to activate
logging and store messages in the variable cls._notes
This will allow quick access to events in the web app.
A note can be added to cls._notes without logging if passing
the argument log=false to function note()
Something can be logged without addind a note using function log()
]
if call[name[hasattr], parameter[name[cls], constant[DEBUG_LEVEL]]] begin[:]
if compare[name[cls].DEBUG_LEVEL equal[==] constant[debug]] begin[:]
variable[file_level] assign[=] name[logging].DEBUG
variable[console_level] assign[=] name[logging].DEBUG
name[cls]._notes assign[=] call[name[namedtuple], parameter[constant[_notes], list[[<ast.Constant object at 0x7da1b0505210>, <ast.Constant object at 0x7da1b0504e20>]]]]
name[cls]._notes.timestamp assign[=] list[[]]
name[cls]._notes.notes assign[=] list[[]]
name[cls].logname assign[=] call[constant[{} | {}].format, parameter[name[cls].__module__, name[cls].__name__]]
variable[root_logger] assign[=] call[name[logging].getLogger, parameter[]]
name[cls]._log assign[=] call[name[logging].getLogger, parameter[constant[BAC0]]]
if <ast.UnaryOp object at 0x7da1b0504ee0> begin[:]
call[name[root_logger].addHandler, parameter[name[cls]._log]]
variable[ch] assign[=] call[name[logging].StreamHandler, parameter[]]
call[name[ch].set_name, parameter[constant[stderr]]]
variable[ch2] assign[=] call[name[logging].StreamHandler, parameter[name[sys].stdout]]
call[name[ch2].set_name, parameter[constant[stdout]]]
call[name[ch].setLevel, parameter[name[console_level]]]
call[name[ch2].setLevel, parameter[name[logging].CRITICAL]]
variable[formatter] assign[=] call[name[logging].Formatter, parameter[constant[{asctime} - {levelname:<8}| {message}]]]
variable[_PERMISSION_TO_WRITE] assign[=] constant[True]
variable[logUserPath] assign[=] call[name[expanduser], parameter[constant[~]]]
variable[logSaveFilePath] assign[=] call[name[join], parameter[name[logUserPath], constant[.BAC0]]]
variable[logFile] assign[=] call[name[join], parameter[name[logSaveFilePath], constant[BAC0.log]]]
if <ast.UnaryOp object at 0x7da1b042ed40> begin[:]
<ast.Try object at 0x7da1b042d3c0>
if name[_PERMISSION_TO_WRITE] begin[:]
variable[fh] assign[=] call[name[FileHandler], parameter[name[logFile]]]
call[name[fh].set_name, parameter[constant[file_handler]]]
call[name[fh].setLevel, parameter[name[file_level]]]
call[name[fh].setFormatter, parameter[name[formatter]]]
call[name[ch].setFormatter, parameter[name[formatter]]]
call[name[ch2].setFormatter, parameter[name[formatter]]]
if <ast.UnaryOp object at 0x7da1b042eef0> begin[:]
if name[_PERMISSION_TO_WRITE] begin[:]
call[name[cls]._log.addHandler, parameter[name[fh]]]
call[name[cls]._log.addHandler, parameter[name[ch]]]
call[name[cls]._log.addHandler, parameter[name[ch2]]]
def function[log_title, parameter[self, title, args, width]]:
call[name[cls]._log.debug, parameter[constant[]]]
call[name[cls]._log.debug, parameter[binary_operation[constant[#] * name[width]]]]
call[name[cls]._log.debug, parameter[call[constant[# {}].format, parameter[name[title]]]]]
call[name[cls]._log.debug, parameter[binary_operation[constant[#] * name[width]]]]
if name[args] begin[:]
call[name[cls]._log.debug, parameter[call[constant[{!r}].format, parameter[name[args]]]]]
call[name[cls]._log.debug, parameter[binary_operation[constant[#] * constant[35]]]]
def function[log_subtitle, parameter[self, subtitle, args, width]]:
call[name[cls]._log.debug, parameter[constant[]]]
call[name[cls]._log.debug, parameter[binary_operation[constant[=] * name[width]]]]
call[name[cls]._log.debug, parameter[call[constant[{}].format, parameter[name[subtitle]]]]]
call[name[cls]._log.debug, parameter[binary_operation[constant[=] * name[width]]]]
if name[args] begin[:]
call[name[cls]._log.debug, parameter[call[constant[{!r}].format, parameter[name[args]]]]]
call[name[cls]._log.debug, parameter[binary_operation[constant[=] * name[width]]]]
def function[log, parameter[self, note]]:
constant[
Add a log entry...no note
]
if <ast.UnaryOp object at 0x7da1b04556c0> begin[:]
<ast.Raise object at 0x7da1b0455960>
variable[note] assign[=] call[constant[{} | {}].format, parameter[name[cls].logname, name[note]]]
call[name[cls]._log.log, parameter[name[level], name[note]]]
def function[note, parameter[self, note]]:
constant[
Add note to the object. By default, the note will also
be logged
:param note: (str) The note itself
:param level: (logging.level)
:param log: (boolean) Enable or disable logging of note
]
if <ast.UnaryOp object at 0x7da1b04557b0> begin[:]
<ast.Raise object at 0x7da1b0454fa0>
variable[note] assign[=] call[constant[{} | {}].format, parameter[name[cls].logname, name[note]]]
call[name[cls]._notes.timestamp.append, parameter[call[name[datetime].now, parameter[]]]]
call[name[cls]._notes.notes.append, parameter[name[note]]]
if name[log] begin[:]
call[name[cls].log, parameter[name[level], name[note]]]
def function[notes, parameter[self]]:
constant[
Retrieve notes list as a Pandas Series
]
if <ast.UnaryOp object at 0x7da1b045f160> begin[:]
return[call[name[dict], parameter[call[name[zip], parameter[name[self]._notes.timestamp, name[self]._notes.notes]]]]]
return[call[name[pd].Series, parameter[name[self]._notes.notes]]]
def function[clear_notes, parameter[self]]:
constant[
Clear notes object
]
name[cls]._notes.timestamp assign[=] list[[]]
name[cls]._notes.notes assign[=] list[[]]
name[cls].clear_notes assign[=] name[clear_notes]
name[cls].note assign[=] name[note]
name[cls].notes assign[=] name[notes]
name[cls].log assign[=] name[log]
name[cls].log_title assign[=] name[log_title]
name[cls].log_subtitle assign[=] name[log_subtitle]
return[name[cls]] | keyword[def] identifier[note_and_log] ( identifier[cls] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[cls] , literal[string] ):
keyword[if] identifier[cls] . identifier[DEBUG_LEVEL] == literal[string] :
identifier[file_level] = identifier[logging] . identifier[DEBUG]
identifier[console_level] = identifier[logging] . identifier[DEBUG]
keyword[elif] identifier[cls] . identifier[DEBUG_LEVEL] == literal[string] :
identifier[file_level] = identifier[logging] . identifier[INFO]
identifier[console_level] = identifier[logging] . identifier[INFO]
keyword[else] :
identifier[file_level] = identifier[logging] . identifier[WARNING]
identifier[console_level] = identifier[logging] . identifier[INFO]
identifier[cls] . identifier[_notes] = identifier[namedtuple] ( literal[string] ,[ literal[string] , literal[string] ])
identifier[cls] . identifier[_notes] . identifier[timestamp] =[]
identifier[cls] . identifier[_notes] . identifier[notes] =[]
identifier[cls] . identifier[logname] = literal[string] . identifier[format] ( identifier[cls] . identifier[__module__] , identifier[cls] . identifier[__name__] )
identifier[root_logger] = identifier[logging] . identifier[getLogger] ()
identifier[cls] . identifier[_log] = identifier[logging] . identifier[getLogger] ( literal[string] )
keyword[if] keyword[not] identifier[len] ( identifier[root_logger] . identifier[handlers] ):
identifier[root_logger] . identifier[addHandler] ( identifier[cls] . identifier[_log] )
identifier[ch] = identifier[logging] . identifier[StreamHandler] ()
identifier[ch] . identifier[set_name] ( literal[string] )
identifier[ch2] = identifier[logging] . identifier[StreamHandler] ( identifier[sys] . identifier[stdout] )
identifier[ch2] . identifier[set_name] ( literal[string] )
identifier[ch] . identifier[setLevel] ( identifier[console_level] )
identifier[ch2] . identifier[setLevel] ( identifier[logging] . identifier[CRITICAL] )
identifier[formatter] = identifier[logging] . identifier[Formatter] ( literal[string] , identifier[style] = literal[string] )
identifier[_PERMISSION_TO_WRITE] = keyword[True]
identifier[logUserPath] = identifier[expanduser] ( literal[string] )
identifier[logSaveFilePath] = identifier[join] ( identifier[logUserPath] , literal[string] )
identifier[logFile] = identifier[join] ( identifier[logSaveFilePath] , literal[string] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[logSaveFilePath] ):
keyword[try] :
identifier[os] . identifier[makedirs] ( identifier[logSaveFilePath] )
keyword[except] :
identifier[_PERMISSION_TO_WRITE] = keyword[False]
keyword[if] identifier[_PERMISSION_TO_WRITE] :
identifier[fh] = identifier[FileHandler] ( identifier[logFile] )
identifier[fh] . identifier[set_name] ( literal[string] )
identifier[fh] . identifier[setLevel] ( identifier[file_level] )
identifier[fh] . identifier[setFormatter] ( identifier[formatter] )
identifier[ch] . identifier[setFormatter] ( identifier[formatter] )
identifier[ch2] . identifier[setFormatter] ( identifier[formatter] )
keyword[if] keyword[not] identifier[len] ( identifier[cls] . identifier[_log] . identifier[handlers] ):
keyword[if] identifier[_PERMISSION_TO_WRITE] :
identifier[cls] . identifier[_log] . identifier[addHandler] ( identifier[fh] )
identifier[cls] . identifier[_log] . identifier[addHandler] ( identifier[ch] )
identifier[cls] . identifier[_log] . identifier[addHandler] ( identifier[ch2] )
keyword[def] identifier[log_title] ( identifier[self] , identifier[title] , identifier[args] = keyword[None] , identifier[width] = literal[int] ):
identifier[cls] . identifier[_log] . identifier[debug] ( literal[string] )
identifier[cls] . identifier[_log] . identifier[debug] ( literal[string] * identifier[width] )
identifier[cls] . identifier[_log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[title] ))
identifier[cls] . identifier[_log] . identifier[debug] ( literal[string] * identifier[width] )
keyword[if] identifier[args] :
identifier[cls] . identifier[_log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[args] ))
identifier[cls] . identifier[_log] . identifier[debug] ( literal[string] * literal[int] )
keyword[def] identifier[log_subtitle] ( identifier[self] , identifier[subtitle] , identifier[args] = keyword[None] , identifier[width] = literal[int] ):
identifier[cls] . identifier[_log] . identifier[debug] ( literal[string] )
identifier[cls] . identifier[_log] . identifier[debug] ( literal[string] * identifier[width] )
identifier[cls] . identifier[_log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[subtitle] ))
identifier[cls] . identifier[_log] . identifier[debug] ( literal[string] * identifier[width] )
keyword[if] identifier[args] :
identifier[cls] . identifier[_log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[args] ))
identifier[cls] . identifier[_log] . identifier[debug] ( literal[string] * identifier[width] )
keyword[def] identifier[log] ( identifier[self] , identifier[note] ,*, identifier[level] = identifier[logging] . identifier[DEBUG] ):
literal[string]
keyword[if] keyword[not] identifier[note] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[note] = literal[string] . identifier[format] ( identifier[cls] . identifier[logname] , identifier[note] )
identifier[cls] . identifier[_log] . identifier[log] ( identifier[level] , identifier[note] )
keyword[def] identifier[note] ( identifier[self] , identifier[note] ,*, identifier[level] = identifier[logging] . identifier[INFO] , identifier[log] = keyword[True] ):
literal[string]
keyword[if] keyword[not] identifier[note] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[note] = literal[string] . identifier[format] ( identifier[cls] . identifier[logname] , identifier[note] )
identifier[cls] . identifier[_notes] . identifier[timestamp] . identifier[append] ( identifier[datetime] . identifier[now] ())
identifier[cls] . identifier[_notes] . identifier[notes] . identifier[append] ( identifier[note] )
keyword[if] identifier[log] :
identifier[cls] . identifier[log] ( identifier[level] , identifier[note] )
@ identifier[property]
keyword[def] identifier[notes] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[_PANDAS] :
keyword[return] identifier[dict] ( identifier[zip] ( identifier[self] . identifier[_notes] . identifier[timestamp] , identifier[self] . identifier[_notes] . identifier[notes] ))
keyword[return] identifier[pd] . identifier[Series] ( identifier[self] . identifier[_notes] . identifier[notes] , identifier[index] = identifier[self] . identifier[_notes] . identifier[timestamp] )
keyword[def] identifier[clear_notes] ( identifier[self] ):
literal[string]
identifier[cls] . identifier[_notes] . identifier[timestamp] =[]
identifier[cls] . identifier[_notes] . identifier[notes] =[]
identifier[cls] . identifier[clear_notes] = identifier[clear_notes]
identifier[cls] . identifier[note] = identifier[note]
identifier[cls] . identifier[notes] = identifier[notes]
identifier[cls] . identifier[log] = identifier[log]
identifier[cls] . identifier[log_title] = identifier[log_title]
identifier[cls] . identifier[log_subtitle] = identifier[log_subtitle]
keyword[return] identifier[cls] | def note_and_log(cls):
"""
This will be used as a decorator on class to activate
logging and store messages in the variable cls._notes
This will allow quick access to events in the web app.
A note can be added to cls._notes without logging if passing
the argument log=false to function note()
Something can be logged without addind a note using function log()
"""
if hasattr(cls, 'DEBUG_LEVEL'):
if cls.DEBUG_LEVEL == 'debug':
file_level = logging.DEBUG
console_level = logging.DEBUG # depends on [control=['if'], data=[]]
elif cls.DEBUG_LEVEL == 'info':
file_level = logging.INFO
console_level = logging.INFO # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
file_level = logging.WARNING
console_level = logging.INFO
# Notes object
cls._notes = namedtuple('_notes', ['timestamp', 'notes'])
cls._notes.timestamp = []
cls._notes.notes = []
# Defining log object
cls.logname = '{} | {}'.format(cls.__module__, cls.__name__)
root_logger = logging.getLogger()
cls._log = logging.getLogger('BAC0')
if not len(root_logger.handlers):
root_logger.addHandler(cls._log) # depends on [control=['if'], data=[]]
# Console Handler
ch = logging.StreamHandler()
ch.set_name('stderr')
ch2 = logging.StreamHandler(sys.stdout)
ch2.set_name('stdout')
ch.setLevel(console_level)
ch2.setLevel(logging.CRITICAL)
formatter = logging.Formatter('{asctime} - {levelname:<8}| {message}', style='{')
# File Handler
_PERMISSION_TO_WRITE = True
logUserPath = expanduser('~')
logSaveFilePath = join(logUserPath, '.BAC0')
logFile = join(logSaveFilePath, 'BAC0.log')
if not os.path.exists(logSaveFilePath):
try:
os.makedirs(logSaveFilePath) # depends on [control=['try'], data=[]]
except:
_PERMISSION_TO_WRITE = False # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
if _PERMISSION_TO_WRITE:
fh = FileHandler(logFile)
fh.set_name('file_handler')
fh.setLevel(file_level)
fh.setFormatter(formatter) # depends on [control=['if'], data=[]]
ch.setFormatter(formatter)
ch2.setFormatter(formatter)
# Add handlers the first time only...
if not len(cls._log.handlers):
if _PERMISSION_TO_WRITE:
cls._log.addHandler(fh) # depends on [control=['if'], data=[]]
cls._log.addHandler(ch)
cls._log.addHandler(ch2) # depends on [control=['if'], data=[]]
# cls._log.setLevel(logging.CRITICAL)
def log_title(self, title, args=None, width=35):
cls._log.debug('')
cls._log.debug('#' * width)
cls._log.debug('# {}'.format(title))
cls._log.debug('#' * width)
if args:
cls._log.debug('{!r}'.format(args))
cls._log.debug('#' * 35) # depends on [control=['if'], data=[]]
def log_subtitle(self, subtitle, args=None, width=35):
cls._log.debug('')
cls._log.debug('=' * width)
cls._log.debug('{}'.format(subtitle))
cls._log.debug('=' * width)
if args:
cls._log.debug('{!r}'.format(args))
cls._log.debug('=' * width) # depends on [control=['if'], data=[]]
def log(self, note, *, level=logging.DEBUG):
"""
Add a log entry...no note
"""
if not note:
raise ValueError('Provide something to log') # depends on [control=['if'], data=[]]
note = '{} | {}'.format(cls.logname, note)
cls._log.log(level, note)
def note(self, note, *, level=logging.INFO, log=True):
"""
Add note to the object. By default, the note will also
be logged
:param note: (str) The note itself
:param level: (logging.level)
:param log: (boolean) Enable or disable logging of note
"""
if not note:
raise ValueError('Provide something to log') # depends on [control=['if'], data=[]]
note = '{} | {}'.format(cls.logname, note)
cls._notes.timestamp.append(datetime.now())
cls._notes.notes.append(note)
if log:
cls.log(level, note) # depends on [control=['if'], data=[]]
@property
def notes(self):
"""
Retrieve notes list as a Pandas Series
"""
if not _PANDAS:
return dict(zip(self._notes.timestamp, self._notes.notes)) # depends on [control=['if'], data=[]]
return pd.Series(self._notes.notes, index=self._notes.timestamp)
def clear_notes(self):
"""
Clear notes object
"""
cls._notes.timestamp = []
cls._notes.notes = []
# Add the functions to the decorated class
cls.clear_notes = clear_notes
cls.note = note
cls.notes = notes
cls.log = log
cls.log_title = log_title
cls.log_subtitle = log_subtitle
return cls |
def mkdir_p(path):
'''
Mimic `mkdir -p` since os module doesn't provide one.
:param str path: directory to create
'''
assert isinstance(path, basestring), ("path must be a string but is %r" % path)
try:
os.makedirs(path)
except OSError as exception:
if exception.errno != errno.EEXIST:
raise | def function[mkdir_p, parameter[path]]:
constant[
Mimic `mkdir -p` since os module doesn't provide one.
:param str path: directory to create
]
assert[call[name[isinstance], parameter[name[path], name[basestring]]]]
<ast.Try object at 0x7da2047e86d0> | keyword[def] identifier[mkdir_p] ( identifier[path] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[path] , identifier[basestring] ),( literal[string] % identifier[path] )
keyword[try] :
identifier[os] . identifier[makedirs] ( identifier[path] )
keyword[except] identifier[OSError] keyword[as] identifier[exception] :
keyword[if] identifier[exception] . identifier[errno] != identifier[errno] . identifier[EEXIST] :
keyword[raise] | def mkdir_p(path):
"""
Mimic `mkdir -p` since os module doesn't provide one.
:param str path: directory to create
"""
assert isinstance(path, basestring), 'path must be a string but is %r' % path
try:
os.makedirs(path) # depends on [control=['try'], data=[]]
except OSError as exception:
if exception.errno != errno.EEXIST:
raise # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['exception']] |
def calldefs(
self,
name=None,
function=None,
return_type=None,
arg_types=None,
header_dir=None,
header_file=None,
recursive=None,
allow_empty=None):
"""returns a set of :class:`calldef_t` declarations, that are matched
defined criteria"""
return (
self._find_multiple(
self._impl_matchers[scopedef_t.calldef],
name=name,
function=function,
decl_type=self._impl_decl_types[
scopedef_t.calldef],
return_type=return_type,
arg_types=arg_types,
header_dir=header_dir,
header_file=header_file,
recursive=recursive,
allow_empty=allow_empty)
) | def function[calldefs, parameter[self, name, function, return_type, arg_types, header_dir, header_file, recursive, allow_empty]]:
constant[returns a set of :class:`calldef_t` declarations, that are matched
defined criteria]
return[call[name[self]._find_multiple, parameter[call[name[self]._impl_matchers][name[scopedef_t].calldef]]]] | keyword[def] identifier[calldefs] (
identifier[self] ,
identifier[name] = keyword[None] ,
identifier[function] = keyword[None] ,
identifier[return_type] = keyword[None] ,
identifier[arg_types] = keyword[None] ,
identifier[header_dir] = keyword[None] ,
identifier[header_file] = keyword[None] ,
identifier[recursive] = keyword[None] ,
identifier[allow_empty] = keyword[None] ):
literal[string]
keyword[return] (
identifier[self] . identifier[_find_multiple] (
identifier[self] . identifier[_impl_matchers] [ identifier[scopedef_t] . identifier[calldef] ],
identifier[name] = identifier[name] ,
identifier[function] = identifier[function] ,
identifier[decl_type] = identifier[self] . identifier[_impl_decl_types] [
identifier[scopedef_t] . identifier[calldef] ],
identifier[return_type] = identifier[return_type] ,
identifier[arg_types] = identifier[arg_types] ,
identifier[header_dir] = identifier[header_dir] ,
identifier[header_file] = identifier[header_file] ,
identifier[recursive] = identifier[recursive] ,
identifier[allow_empty] = identifier[allow_empty] )
) | def calldefs(self, name=None, function=None, return_type=None, arg_types=None, header_dir=None, header_file=None, recursive=None, allow_empty=None):
"""returns a set of :class:`calldef_t` declarations, that are matched
defined criteria"""
return self._find_multiple(self._impl_matchers[scopedef_t.calldef], name=name, function=function, decl_type=self._impl_decl_types[scopedef_t.calldef], return_type=return_type, arg_types=arg_types, header_dir=header_dir, header_file=header_file, recursive=recursive, allow_empty=allow_empty) |
def profiler(self):
"""Creates a dictionary from the profile scheme(s)"""
logging.info('Loading profiles')
# Initialise variables
profiledata = defaultdict(make_dict)
reverse_profiledata = dict()
profileset = set()
# Find all the unique profiles to use with a set
for sample in self.runmetadata.samples:
if sample.general.bestassemblyfile != 'NA':
if sample[self.analysistype].profile != 'NA':
profileset.add(sample[self.analysistype].profile)
# Extract the profiles for each set
for sequenceprofile in profileset:
#
if sequenceprofile not in self.meta_dict:
self.meta_dict[sequenceprofile] = dict()
reverse_profiledata[sequenceprofile] = dict()
self.meta_dict[sequenceprofile]['ND'] = dict()
# Clear the list of genes
geneset = set()
# Calculate the total number of genes in the typing scheme
for sample in self.runmetadata.samples:
if sample.general.bestassemblyfile != 'NA':
if sequenceprofile == sample[self.analysistype].profile:
geneset = {allele for allele in sample[self.analysistype].alleles}
try:
# Open the sequence profile file as a dictionary
profile = DictReader(open(sequenceprofile), dialect='excel-tab')
# Revert to standard comma separated values
except KeyError:
# Open the sequence profile file as a dictionary
profile = DictReader(open(sequenceprofile))
# Iterate through the rows
for row in profile:
# Populate the profile dictionary with profile number: {gene: allele}. Use the first field name,
# which will be either ST, or rST as the key to determine the profile number value
allele_comprehension = {gene: allele for gene, allele in row.items() if gene in geneset}
st = row[profile.fieldnames[0]]
for header, value in row.items():
value = value if value else 'ND'
if header not in geneset and header not in ['ST', 'rST']:
if st not in self.meta_dict[sequenceprofile]:
self.meta_dict[sequenceprofile][st] = dict()
if header == 'CC' or header == 'clonal_complex':
header = 'CC'
self.meta_dict[sequenceprofile][st][header] = value
self.meta_dict[sequenceprofile]['ND'][header] = 'ND'
self.meta_dict[sequenceprofile][st]['PredictedSerogroup'] = 'ND'
if header not in self.meta_headers:
self.meta_headers.append(header)
profiledata[sequenceprofile][st] = allele_comprehension
# Create a 'reverse' dictionary using the the allele comprehension as the key, and
# the sequence type as the value - can be used if exact matches are ever desired
reverse_profiledata[sequenceprofile].update({frozenset(allele_comprehension.items()): st})
# Add the profile data, and gene list to each sample
for sample in self.runmetadata.samples:
if sample.general.bestassemblyfile != 'NA':
if sequenceprofile == sample[self.analysistype].profile:
# Populate the metadata with the profile data
sample[self.analysistype].profiledata = profiledata[sample[self.analysistype].profile]
sample[self.analysistype].reverse_profiledata = reverse_profiledata[sequenceprofile]
sample[self.analysistype].meta_dict = self.meta_dict[sequenceprofile]
else:
sample[self.analysistype].profiledata = 'NA'
sample[self.analysistype].reverse_profiledata = 'NA'
sample[self.analysistype].meta_dict = 'NA' | def function[profiler, parameter[self]]:
constant[Creates a dictionary from the profile scheme(s)]
call[name[logging].info, parameter[constant[Loading profiles]]]
variable[profiledata] assign[=] call[name[defaultdict], parameter[name[make_dict]]]
variable[reverse_profiledata] assign[=] call[name[dict], parameter[]]
variable[profileset] assign[=] call[name[set], parameter[]]
for taget[name[sample]] in starred[name[self].runmetadata.samples] begin[:]
if compare[name[sample].general.bestassemblyfile not_equal[!=] constant[NA]] begin[:]
if compare[call[name[sample]][name[self].analysistype].profile not_equal[!=] constant[NA]] begin[:]
call[name[profileset].add, parameter[call[name[sample]][name[self].analysistype].profile]]
for taget[name[sequenceprofile]] in starred[name[profileset]] begin[:]
if compare[name[sequenceprofile] <ast.NotIn object at 0x7da2590d7190> name[self].meta_dict] begin[:]
call[name[self].meta_dict][name[sequenceprofile]] assign[=] call[name[dict], parameter[]]
call[name[reverse_profiledata]][name[sequenceprofile]] assign[=] call[name[dict], parameter[]]
call[call[name[self].meta_dict][name[sequenceprofile]]][constant[ND]] assign[=] call[name[dict], parameter[]]
variable[geneset] assign[=] call[name[set], parameter[]]
for taget[name[sample]] in starred[name[self].runmetadata.samples] begin[:]
if compare[name[sample].general.bestassemblyfile not_equal[!=] constant[NA]] begin[:]
if compare[name[sequenceprofile] equal[==] call[name[sample]][name[self].analysistype].profile] begin[:]
variable[geneset] assign[=] <ast.SetComp object at 0x7da1b1146b30>
<ast.Try object at 0x7da1b1146950>
for taget[name[row]] in starred[name[profile]] begin[:]
variable[allele_comprehension] assign[=] <ast.DictComp object at 0x7da1b11464a0>
variable[st] assign[=] call[name[row]][call[name[profile].fieldnames][constant[0]]]
for taget[tuple[[<ast.Name object at 0x7da1b1146020>, <ast.Name object at 0x7da1b1145ff0>]]] in starred[call[name[row].items, parameter[]]] begin[:]
variable[value] assign[=] <ast.IfExp object at 0x7da1b1145ed0>
if <ast.BoolOp object at 0x7da1b1145de0> begin[:]
if compare[name[st] <ast.NotIn object at 0x7da2590d7190> call[name[self].meta_dict][name[sequenceprofile]]] begin[:]
call[call[name[self].meta_dict][name[sequenceprofile]]][name[st]] assign[=] call[name[dict], parameter[]]
if <ast.BoolOp object at 0x7da1b11458d0> begin[:]
variable[header] assign[=] constant[CC]
call[call[call[name[self].meta_dict][name[sequenceprofile]]][name[st]]][name[header]] assign[=] name[value]
call[call[call[name[self].meta_dict][name[sequenceprofile]]][constant[ND]]][name[header]] assign[=] constant[ND]
call[call[call[name[self].meta_dict][name[sequenceprofile]]][name[st]]][constant[PredictedSerogroup]] assign[=] constant[ND]
if compare[name[header] <ast.NotIn object at 0x7da2590d7190> name[self].meta_headers] begin[:]
call[name[self].meta_headers.append, parameter[name[header]]]
call[call[name[profiledata]][name[sequenceprofile]]][name[st]] assign[=] name[allele_comprehension]
call[call[name[reverse_profiledata]][name[sequenceprofile]].update, parameter[dictionary[[<ast.Call object at 0x7da1b10d4ee0>], [<ast.Name object at 0x7da1b10d5750>]]]]
for taget[name[sample]] in starred[name[self].runmetadata.samples] begin[:]
if compare[name[sample].general.bestassemblyfile not_equal[!=] constant[NA]] begin[:]
if compare[name[sequenceprofile] equal[==] call[name[sample]][name[self].analysistype].profile] begin[:]
call[name[sample]][name[self].analysistype].profiledata assign[=] call[name[profiledata]][call[name[sample]][name[self].analysistype].profile]
call[name[sample]][name[self].analysistype].reverse_profiledata assign[=] call[name[reverse_profiledata]][name[sequenceprofile]]
call[name[sample]][name[self].analysistype].meta_dict assign[=] call[name[self].meta_dict][name[sequenceprofile]] | keyword[def] identifier[profiler] ( identifier[self] ):
literal[string]
identifier[logging] . identifier[info] ( literal[string] )
identifier[profiledata] = identifier[defaultdict] ( identifier[make_dict] )
identifier[reverse_profiledata] = identifier[dict] ()
identifier[profileset] = identifier[set] ()
keyword[for] identifier[sample] keyword[in] identifier[self] . identifier[runmetadata] . identifier[samples] :
keyword[if] identifier[sample] . identifier[general] . identifier[bestassemblyfile] != literal[string] :
keyword[if] identifier[sample] [ identifier[self] . identifier[analysistype] ]. identifier[profile] != literal[string] :
identifier[profileset] . identifier[add] ( identifier[sample] [ identifier[self] . identifier[analysistype] ]. identifier[profile] )
keyword[for] identifier[sequenceprofile] keyword[in] identifier[profileset] :
keyword[if] identifier[sequenceprofile] keyword[not] keyword[in] identifier[self] . identifier[meta_dict] :
identifier[self] . identifier[meta_dict] [ identifier[sequenceprofile] ]= identifier[dict] ()
identifier[reverse_profiledata] [ identifier[sequenceprofile] ]= identifier[dict] ()
identifier[self] . identifier[meta_dict] [ identifier[sequenceprofile] ][ literal[string] ]= identifier[dict] ()
identifier[geneset] = identifier[set] ()
keyword[for] identifier[sample] keyword[in] identifier[self] . identifier[runmetadata] . identifier[samples] :
keyword[if] identifier[sample] . identifier[general] . identifier[bestassemblyfile] != literal[string] :
keyword[if] identifier[sequenceprofile] == identifier[sample] [ identifier[self] . identifier[analysistype] ]. identifier[profile] :
identifier[geneset] ={ identifier[allele] keyword[for] identifier[allele] keyword[in] identifier[sample] [ identifier[self] . identifier[analysistype] ]. identifier[alleles] }
keyword[try] :
identifier[profile] = identifier[DictReader] ( identifier[open] ( identifier[sequenceprofile] ), identifier[dialect] = literal[string] )
keyword[except] identifier[KeyError] :
identifier[profile] = identifier[DictReader] ( identifier[open] ( identifier[sequenceprofile] ))
keyword[for] identifier[row] keyword[in] identifier[profile] :
identifier[allele_comprehension] ={ identifier[gene] : identifier[allele] keyword[for] identifier[gene] , identifier[allele] keyword[in] identifier[row] . identifier[items] () keyword[if] identifier[gene] keyword[in] identifier[geneset] }
identifier[st] = identifier[row] [ identifier[profile] . identifier[fieldnames] [ literal[int] ]]
keyword[for] identifier[header] , identifier[value] keyword[in] identifier[row] . identifier[items] ():
identifier[value] = identifier[value] keyword[if] identifier[value] keyword[else] literal[string]
keyword[if] identifier[header] keyword[not] keyword[in] identifier[geneset] keyword[and] identifier[header] keyword[not] keyword[in] [ literal[string] , literal[string] ]:
keyword[if] identifier[st] keyword[not] keyword[in] identifier[self] . identifier[meta_dict] [ identifier[sequenceprofile] ]:
identifier[self] . identifier[meta_dict] [ identifier[sequenceprofile] ][ identifier[st] ]= identifier[dict] ()
keyword[if] identifier[header] == literal[string] keyword[or] identifier[header] == literal[string] :
identifier[header] = literal[string]
identifier[self] . identifier[meta_dict] [ identifier[sequenceprofile] ][ identifier[st] ][ identifier[header] ]= identifier[value]
identifier[self] . identifier[meta_dict] [ identifier[sequenceprofile] ][ literal[string] ][ identifier[header] ]= literal[string]
identifier[self] . identifier[meta_dict] [ identifier[sequenceprofile] ][ identifier[st] ][ literal[string] ]= literal[string]
keyword[if] identifier[header] keyword[not] keyword[in] identifier[self] . identifier[meta_headers] :
identifier[self] . identifier[meta_headers] . identifier[append] ( identifier[header] )
identifier[profiledata] [ identifier[sequenceprofile] ][ identifier[st] ]= identifier[allele_comprehension]
identifier[reverse_profiledata] [ identifier[sequenceprofile] ]. identifier[update] ({ identifier[frozenset] ( identifier[allele_comprehension] . identifier[items] ()): identifier[st] })
keyword[for] identifier[sample] keyword[in] identifier[self] . identifier[runmetadata] . identifier[samples] :
keyword[if] identifier[sample] . identifier[general] . identifier[bestassemblyfile] != literal[string] :
keyword[if] identifier[sequenceprofile] == identifier[sample] [ identifier[self] . identifier[analysistype] ]. identifier[profile] :
identifier[sample] [ identifier[self] . identifier[analysistype] ]. identifier[profiledata] = identifier[profiledata] [ identifier[sample] [ identifier[self] . identifier[analysistype] ]. identifier[profile] ]
identifier[sample] [ identifier[self] . identifier[analysistype] ]. identifier[reverse_profiledata] = identifier[reverse_profiledata] [ identifier[sequenceprofile] ]
identifier[sample] [ identifier[self] . identifier[analysistype] ]. identifier[meta_dict] = identifier[self] . identifier[meta_dict] [ identifier[sequenceprofile] ]
keyword[else] :
identifier[sample] [ identifier[self] . identifier[analysistype] ]. identifier[profiledata] = literal[string]
identifier[sample] [ identifier[self] . identifier[analysistype] ]. identifier[reverse_profiledata] = literal[string]
identifier[sample] [ identifier[self] . identifier[analysistype] ]. identifier[meta_dict] = literal[string] | def profiler(self):
"""Creates a dictionary from the profile scheme(s)"""
logging.info('Loading profiles')
# Initialise variables
profiledata = defaultdict(make_dict)
reverse_profiledata = dict()
profileset = set()
# Find all the unique profiles to use with a set
for sample in self.runmetadata.samples:
if sample.general.bestassemblyfile != 'NA':
if sample[self.analysistype].profile != 'NA':
profileset.add(sample[self.analysistype].profile) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['sample']]
# Extract the profiles for each set
for sequenceprofile in profileset:
#
if sequenceprofile not in self.meta_dict:
self.meta_dict[sequenceprofile] = dict() # depends on [control=['if'], data=['sequenceprofile']]
reverse_profiledata[sequenceprofile] = dict()
self.meta_dict[sequenceprofile]['ND'] = dict()
# Clear the list of genes
geneset = set()
# Calculate the total number of genes in the typing scheme
for sample in self.runmetadata.samples:
if sample.general.bestassemblyfile != 'NA':
if sequenceprofile == sample[self.analysistype].profile:
geneset = {allele for allele in sample[self.analysistype].alleles} # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['sample']]
try:
# Open the sequence profile file as a dictionary
profile = DictReader(open(sequenceprofile), dialect='excel-tab') # depends on [control=['try'], data=[]]
# Revert to standard comma separated values
except KeyError:
# Open the sequence profile file as a dictionary
profile = DictReader(open(sequenceprofile)) # depends on [control=['except'], data=[]]
# Iterate through the rows
for row in profile:
# Populate the profile dictionary with profile number: {gene: allele}. Use the first field name,
# which will be either ST, or rST as the key to determine the profile number value
allele_comprehension = {gene: allele for (gene, allele) in row.items() if gene in geneset}
st = row[profile.fieldnames[0]]
for (header, value) in row.items():
value = value if value else 'ND'
if header not in geneset and header not in ['ST', 'rST']:
if st not in self.meta_dict[sequenceprofile]:
self.meta_dict[sequenceprofile][st] = dict() # depends on [control=['if'], data=['st']]
if header == 'CC' or header == 'clonal_complex':
header = 'CC' # depends on [control=['if'], data=[]]
self.meta_dict[sequenceprofile][st][header] = value
self.meta_dict[sequenceprofile]['ND'][header] = 'ND'
self.meta_dict[sequenceprofile][st]['PredictedSerogroup'] = 'ND'
if header not in self.meta_headers:
self.meta_headers.append(header) # depends on [control=['if'], data=['header']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
profiledata[sequenceprofile][st] = allele_comprehension
# Create a 'reverse' dictionary using the the allele comprehension as the key, and
# the sequence type as the value - can be used if exact matches are ever desired
reverse_profiledata[sequenceprofile].update({frozenset(allele_comprehension.items()): st}) # depends on [control=['for'], data=['row']]
# Add the profile data, and gene list to each sample
for sample in self.runmetadata.samples:
if sample.general.bestassemblyfile != 'NA':
if sequenceprofile == sample[self.analysistype].profile:
# Populate the metadata with the profile data
sample[self.analysistype].profiledata = profiledata[sample[self.analysistype].profile]
sample[self.analysistype].reverse_profiledata = reverse_profiledata[sequenceprofile]
sample[self.analysistype].meta_dict = self.meta_dict[sequenceprofile] # depends on [control=['if'], data=['sequenceprofile']] # depends on [control=['if'], data=[]]
else:
sample[self.analysistype].profiledata = 'NA'
sample[self.analysistype].reverse_profiledata = 'NA'
sample[self.analysistype].meta_dict = 'NA' # depends on [control=['for'], data=['sample']] # depends on [control=['for'], data=['sequenceprofile']] |
def predict_variant_effect_on_transcript_or_failure(variant, transcript):
"""
Try predicting the effect of a variant on a particular transcript but
suppress raised exceptions by converting them into `Failure` effect
values.
"""
try:
return predict_variant_effect_on_transcript(
variant=variant,
transcript=transcript)
except (AssertionError, ValueError) as error:
logger.warn(
"Encountered error annotating %s for %s: %s",
variant,
transcript,
error)
return Failure(variant, transcript) | def function[predict_variant_effect_on_transcript_or_failure, parameter[variant, transcript]]:
constant[
Try predicting the effect of a variant on a particular transcript but
suppress raised exceptions by converting them into `Failure` effect
values.
]
<ast.Try object at 0x7da1b0404190> | keyword[def] identifier[predict_variant_effect_on_transcript_or_failure] ( identifier[variant] , identifier[transcript] ):
literal[string]
keyword[try] :
keyword[return] identifier[predict_variant_effect_on_transcript] (
identifier[variant] = identifier[variant] ,
identifier[transcript] = identifier[transcript] )
keyword[except] ( identifier[AssertionError] , identifier[ValueError] ) keyword[as] identifier[error] :
identifier[logger] . identifier[warn] (
literal[string] ,
identifier[variant] ,
identifier[transcript] ,
identifier[error] )
keyword[return] identifier[Failure] ( identifier[variant] , identifier[transcript] ) | def predict_variant_effect_on_transcript_or_failure(variant, transcript):
"""
Try predicting the effect of a variant on a particular transcript but
suppress raised exceptions by converting them into `Failure` effect
values.
"""
try:
return predict_variant_effect_on_transcript(variant=variant, transcript=transcript) # depends on [control=['try'], data=[]]
except (AssertionError, ValueError) as error:
logger.warn('Encountered error annotating %s for %s: %s', variant, transcript, error)
return Failure(variant, transcript) # depends on [control=['except'], data=['error']] |
def set_keyboard_focus(self, move_up, move_down, select):
"""
Set the keyboard as the object that controls the menu.
move_up is from the pygame.KEYS enum that defines what button causes the menu selection to move up.
move_down is from the pygame.KEYS enum that defines what button causes the menu selection to move down.
select is from the pygame.KEYS enum that defines what button causes the button to be selected.
"""
self.input_focus = StateTypes.KEYBOARD
self.move_up_button = move_up
self.move_down_button = move_down
self.select_button = select | def function[set_keyboard_focus, parameter[self, move_up, move_down, select]]:
constant[
Set the keyboard as the object that controls the menu.
move_up is from the pygame.KEYS enum that defines what button causes the menu selection to move up.
move_down is from the pygame.KEYS enum that defines what button causes the menu selection to move down.
select is from the pygame.KEYS enum that defines what button causes the button to be selected.
]
name[self].input_focus assign[=] name[StateTypes].KEYBOARD
name[self].move_up_button assign[=] name[move_up]
name[self].move_down_button assign[=] name[move_down]
name[self].select_button assign[=] name[select] | keyword[def] identifier[set_keyboard_focus] ( identifier[self] , identifier[move_up] , identifier[move_down] , identifier[select] ):
literal[string]
identifier[self] . identifier[input_focus] = identifier[StateTypes] . identifier[KEYBOARD]
identifier[self] . identifier[move_up_button] = identifier[move_up]
identifier[self] . identifier[move_down_button] = identifier[move_down]
identifier[self] . identifier[select_button] = identifier[select] | def set_keyboard_focus(self, move_up, move_down, select):
"""
Set the keyboard as the object that controls the menu.
move_up is from the pygame.KEYS enum that defines what button causes the menu selection to move up.
move_down is from the pygame.KEYS enum that defines what button causes the menu selection to move down.
select is from the pygame.KEYS enum that defines what button causes the button to be selected.
"""
self.input_focus = StateTypes.KEYBOARD
self.move_up_button = move_up
self.move_down_button = move_down
self.select_button = select |
def cli():
"""Command line utility to HTTP enable (publish) a dataset."""
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
"dataset_uri",
help="Dtool dataset URI"
)
parser.add_argument(
"-q",
"--quiet",
action="store_true",
help="Only return the http URI"
)
args = parser.parse_args()
access_uri = publish(args.dataset_uri)
if args.quiet:
print(access_uri)
else:
print("Dataset accessible at: {}".format(access_uri)) | def function[cli, parameter[]]:
constant[Command line utility to HTTP enable (publish) a dataset.]
variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[]]
call[name[parser].add_argument, parameter[constant[dataset_uri]]]
call[name[parser].add_argument, parameter[constant[-q], constant[--quiet]]]
variable[args] assign[=] call[name[parser].parse_args, parameter[]]
variable[access_uri] assign[=] call[name[publish], parameter[name[args].dataset_uri]]
if name[args].quiet begin[:]
call[name[print], parameter[name[access_uri]]] | keyword[def] identifier[cli] ():
literal[string]
identifier[parser] = identifier[argparse] . identifier[ArgumentParser] ( identifier[description] = identifier[__doc__] )
identifier[parser] . identifier[add_argument] (
literal[string] ,
identifier[help] = literal[string]
)
identifier[parser] . identifier[add_argument] (
literal[string] ,
literal[string] ,
identifier[action] = literal[string] ,
identifier[help] = literal[string]
)
identifier[args] = identifier[parser] . identifier[parse_args] ()
identifier[access_uri] = identifier[publish] ( identifier[args] . identifier[dataset_uri] )
keyword[if] identifier[args] . identifier[quiet] :
identifier[print] ( identifier[access_uri] )
keyword[else] :
identifier[print] ( literal[string] . identifier[format] ( identifier[access_uri] )) | def cli():
"""Command line utility to HTTP enable (publish) a dataset."""
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('dataset_uri', help='Dtool dataset URI')
parser.add_argument('-q', '--quiet', action='store_true', help='Only return the http URI')
args = parser.parse_args()
access_uri = publish(args.dataset_uri)
if args.quiet:
print(access_uri) # depends on [control=['if'], data=[]]
else:
print('Dataset accessible at: {}'.format(access_uri)) |
def reference_title_header_element(feature, parent):
"""Retrieve reference title header string from definitions."""
_ = feature, parent # NOQA
header = reference_title_header['string_format']
return header.capitalize() | def function[reference_title_header_element, parameter[feature, parent]]:
constant[Retrieve reference title header string from definitions.]
variable[_] assign[=] tuple[[<ast.Name object at 0x7da1b0ce3e80>, <ast.Name object at 0x7da1b0ce1510>]]
variable[header] assign[=] call[name[reference_title_header]][constant[string_format]]
return[call[name[header].capitalize, parameter[]]] | keyword[def] identifier[reference_title_header_element] ( identifier[feature] , identifier[parent] ):
literal[string]
identifier[_] = identifier[feature] , identifier[parent]
identifier[header] = identifier[reference_title_header] [ literal[string] ]
keyword[return] identifier[header] . identifier[capitalize] () | def reference_title_header_element(feature, parent):
"""Retrieve reference title header string from definitions."""
_ = (feature, parent) # NOQA
header = reference_title_header['string_format']
return header.capitalize() |
def _build_hexameter_template(self, stress_positions: str) -> str:
"""
Build a hexameter scansion template from string of 5 binary numbers;
NOTE: Traditionally the fifth foot is dactyl and spondee substitution is rare,
however since it *is* a possible combination, we include it here.
:param stress_positions: 5 binary integers, indicating whether foot is dactyl or spondee
:return: a valid hexameter scansion template, a string representing stressed and
unstresssed syllables with the optional terminal ending.
>>> print(MetricalValidator()._build_hexameter_template("01010"))
-UU---UU---UU-X
"""
hexameter = []
for binary in stress_positions:
if binary == "1":
hexameter.append(self.constants.SPONDEE)
if binary == "0":
hexameter.append(self.constants.DACTYL)
hexameter.append(self.constants.HEXAMETER_ENDING)
return "".join(hexameter) | def function[_build_hexameter_template, parameter[self, stress_positions]]:
constant[
Build a hexameter scansion template from string of 5 binary numbers;
NOTE: Traditionally the fifth foot is dactyl and spondee substitution is rare,
however since it *is* a possible combination, we include it here.
:param stress_positions: 5 binary integers, indicating whether foot is dactyl or spondee
:return: a valid hexameter scansion template, a string representing stressed and
unstresssed syllables with the optional terminal ending.
>>> print(MetricalValidator()._build_hexameter_template("01010"))
-UU---UU---UU-X
]
variable[hexameter] assign[=] list[[]]
for taget[name[binary]] in starred[name[stress_positions]] begin[:]
if compare[name[binary] equal[==] constant[1]] begin[:]
call[name[hexameter].append, parameter[name[self].constants.SPONDEE]]
if compare[name[binary] equal[==] constant[0]] begin[:]
call[name[hexameter].append, parameter[name[self].constants.DACTYL]]
call[name[hexameter].append, parameter[name[self].constants.HEXAMETER_ENDING]]
return[call[constant[].join, parameter[name[hexameter]]]] | keyword[def] identifier[_build_hexameter_template] ( identifier[self] , identifier[stress_positions] : identifier[str] )-> identifier[str] :
literal[string]
identifier[hexameter] =[]
keyword[for] identifier[binary] keyword[in] identifier[stress_positions] :
keyword[if] identifier[binary] == literal[string] :
identifier[hexameter] . identifier[append] ( identifier[self] . identifier[constants] . identifier[SPONDEE] )
keyword[if] identifier[binary] == literal[string] :
identifier[hexameter] . identifier[append] ( identifier[self] . identifier[constants] . identifier[DACTYL] )
identifier[hexameter] . identifier[append] ( identifier[self] . identifier[constants] . identifier[HEXAMETER_ENDING] )
keyword[return] literal[string] . identifier[join] ( identifier[hexameter] ) | def _build_hexameter_template(self, stress_positions: str) -> str:
"""
Build a hexameter scansion template from string of 5 binary numbers;
NOTE: Traditionally the fifth foot is dactyl and spondee substitution is rare,
however since it *is* a possible combination, we include it here.
:param stress_positions: 5 binary integers, indicating whether foot is dactyl or spondee
:return: a valid hexameter scansion template, a string representing stressed and
unstresssed syllables with the optional terminal ending.
>>> print(MetricalValidator()._build_hexameter_template("01010"))
-UU---UU---UU-X
"""
hexameter = []
for binary in stress_positions:
if binary == '1':
hexameter.append(self.constants.SPONDEE) # depends on [control=['if'], data=[]]
if binary == '0':
hexameter.append(self.constants.DACTYL) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['binary']]
hexameter.append(self.constants.HEXAMETER_ENDING)
return ''.join(hexameter) |
def getScalars(self, inputData):
"""
Returns a numpy array containing the sub-field scalar value(s) for
each sub-field of the ``inputData``. To get the associated field names for
each of the scalar values, call :meth:`.getScalarNames()`.
For a simple scalar encoder, the scalar value is simply the input unmodified.
For category encoders, it is the scalar representing the category string
that is passed in. For the datetime encoder, the scalar value is the
the number of seconds since epoch.
The intent of the scalar representation of a sub-field is to provide a
baseline for measuring error differences. You can compare the scalar value
of the inputData with the scalar value returned from :meth:`.topDownCompute`
on a top-down representation to evaluate prediction accuracy, for example.
:param inputData: The data from the source. This is typically an object with
members
:return: array of scalar values
"""
retVals = numpy.array([])
if self.encoders is not None:
for (name, encoder, offset) in self.encoders:
values = encoder.getScalars(self._getInputValue(inputData, name))
retVals = numpy.hstack((retVals, values))
else:
retVals = numpy.hstack((retVals, inputData))
return retVals | def function[getScalars, parameter[self, inputData]]:
constant[
Returns a numpy array containing the sub-field scalar value(s) for
each sub-field of the ``inputData``. To get the associated field names for
each of the scalar values, call :meth:`.getScalarNames()`.
For a simple scalar encoder, the scalar value is simply the input unmodified.
For category encoders, it is the scalar representing the category string
that is passed in. For the datetime encoder, the scalar value is the
the number of seconds since epoch.
The intent of the scalar representation of a sub-field is to provide a
baseline for measuring error differences. You can compare the scalar value
of the inputData with the scalar value returned from :meth:`.topDownCompute`
on a top-down representation to evaluate prediction accuracy, for example.
:param inputData: The data from the source. This is typically an object with
members
:return: array of scalar values
]
variable[retVals] assign[=] call[name[numpy].array, parameter[list[[]]]]
if compare[name[self].encoders is_not constant[None]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da20e9b0670>, <ast.Name object at 0x7da20e9b1420>, <ast.Name object at 0x7da20e9b0e20>]]] in starred[name[self].encoders] begin[:]
variable[values] assign[=] call[name[encoder].getScalars, parameter[call[name[self]._getInputValue, parameter[name[inputData], name[name]]]]]
variable[retVals] assign[=] call[name[numpy].hstack, parameter[tuple[[<ast.Name object at 0x7da20e9b3610>, <ast.Name object at 0x7da20e9b1c90>]]]]
return[name[retVals]] | keyword[def] identifier[getScalars] ( identifier[self] , identifier[inputData] ):
literal[string]
identifier[retVals] = identifier[numpy] . identifier[array] ([])
keyword[if] identifier[self] . identifier[encoders] keyword[is] keyword[not] keyword[None] :
keyword[for] ( identifier[name] , identifier[encoder] , identifier[offset] ) keyword[in] identifier[self] . identifier[encoders] :
identifier[values] = identifier[encoder] . identifier[getScalars] ( identifier[self] . identifier[_getInputValue] ( identifier[inputData] , identifier[name] ))
identifier[retVals] = identifier[numpy] . identifier[hstack] (( identifier[retVals] , identifier[values] ))
keyword[else] :
identifier[retVals] = identifier[numpy] . identifier[hstack] (( identifier[retVals] , identifier[inputData] ))
keyword[return] identifier[retVals] | def getScalars(self, inputData):
"""
Returns a numpy array containing the sub-field scalar value(s) for
each sub-field of the ``inputData``. To get the associated field names for
each of the scalar values, call :meth:`.getScalarNames()`.
For a simple scalar encoder, the scalar value is simply the input unmodified.
For category encoders, it is the scalar representing the category string
that is passed in. For the datetime encoder, the scalar value is the
the number of seconds since epoch.
The intent of the scalar representation of a sub-field is to provide a
baseline for measuring error differences. You can compare the scalar value
of the inputData with the scalar value returned from :meth:`.topDownCompute`
on a top-down representation to evaluate prediction accuracy, for example.
:param inputData: The data from the source. This is typically an object with
members
:return: array of scalar values
"""
retVals = numpy.array([])
if self.encoders is not None:
for (name, encoder, offset) in self.encoders:
values = encoder.getScalars(self._getInputValue(inputData, name))
retVals = numpy.hstack((retVals, values)) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
else:
retVals = numpy.hstack((retVals, inputData))
return retVals |
def short_form_one_format(jupytext_format):
"""Represent one jupytext format as a string"""
if not isinstance(jupytext_format, dict):
return jupytext_format
fmt = jupytext_format['extension']
if 'suffix' in jupytext_format:
fmt = jupytext_format['suffix'] + fmt
elif fmt.startswith('.'):
fmt = fmt[1:]
if 'prefix' in jupytext_format:
fmt = jupytext_format['prefix'] + '/' + fmt
if jupytext_format.get('format_name'):
if jupytext_format['extension'] not in ['.md', '.Rmd'] or jupytext_format['format_name'] == 'pandoc':
fmt = fmt + ':' + jupytext_format['format_name']
return fmt | def function[short_form_one_format, parameter[jupytext_format]]:
constant[Represent one jupytext format as a string]
if <ast.UnaryOp object at 0x7da20e963250> begin[:]
return[name[jupytext_format]]
variable[fmt] assign[=] call[name[jupytext_format]][constant[extension]]
if compare[constant[suffix] in name[jupytext_format]] begin[:]
variable[fmt] assign[=] binary_operation[call[name[jupytext_format]][constant[suffix]] + name[fmt]]
if compare[constant[prefix] in name[jupytext_format]] begin[:]
variable[fmt] assign[=] binary_operation[binary_operation[call[name[jupytext_format]][constant[prefix]] + constant[/]] + name[fmt]]
if call[name[jupytext_format].get, parameter[constant[format_name]]] begin[:]
if <ast.BoolOp object at 0x7da2054a4670> begin[:]
variable[fmt] assign[=] binary_operation[binary_operation[name[fmt] + constant[:]] + call[name[jupytext_format]][constant[format_name]]]
return[name[fmt]] | keyword[def] identifier[short_form_one_format] ( identifier[jupytext_format] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[jupytext_format] , identifier[dict] ):
keyword[return] identifier[jupytext_format]
identifier[fmt] = identifier[jupytext_format] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[jupytext_format] :
identifier[fmt] = identifier[jupytext_format] [ literal[string] ]+ identifier[fmt]
keyword[elif] identifier[fmt] . identifier[startswith] ( literal[string] ):
identifier[fmt] = identifier[fmt] [ literal[int] :]
keyword[if] literal[string] keyword[in] identifier[jupytext_format] :
identifier[fmt] = identifier[jupytext_format] [ literal[string] ]+ literal[string] + identifier[fmt]
keyword[if] identifier[jupytext_format] . identifier[get] ( literal[string] ):
keyword[if] identifier[jupytext_format] [ literal[string] ] keyword[not] keyword[in] [ literal[string] , literal[string] ] keyword[or] identifier[jupytext_format] [ literal[string] ]== literal[string] :
identifier[fmt] = identifier[fmt] + literal[string] + identifier[jupytext_format] [ literal[string] ]
keyword[return] identifier[fmt] | def short_form_one_format(jupytext_format):
"""Represent one jupytext format as a string"""
if not isinstance(jupytext_format, dict):
return jupytext_format # depends on [control=['if'], data=[]]
fmt = jupytext_format['extension']
if 'suffix' in jupytext_format:
fmt = jupytext_format['suffix'] + fmt # depends on [control=['if'], data=['jupytext_format']]
elif fmt.startswith('.'):
fmt = fmt[1:] # depends on [control=['if'], data=[]]
if 'prefix' in jupytext_format:
fmt = jupytext_format['prefix'] + '/' + fmt # depends on [control=['if'], data=['jupytext_format']]
if jupytext_format.get('format_name'):
if jupytext_format['extension'] not in ['.md', '.Rmd'] or jupytext_format['format_name'] == 'pandoc':
fmt = fmt + ':' + jupytext_format['format_name'] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return fmt |
def get_proficiencies_for_objectives(self, objective_ids):
"""Gets a ``ProficiencyList`` relating to the given objectives.
arg: objective_ids (osid.id.IdList): the objective ``Ids``
return: (osid.learning.ProficiencyList) - the returned
``Proficiency`` list
raise: NullArgument - ``objective_ids`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.relationship.RelationshipLookupSession.get_relationships_for_destination
# NOTE: This implementation currently ignores plenary and effective views
collection = JSONClientValidated('learning',
collection='Proficiency',
runtime=self._runtime)
result = collection.find(
dict({'objectiveId': str(objective_ids)},
**self._view_filter())).sort('_id', ASCENDING)
return objects.ProficiencyList(result, runtime=self._runtime) | def function[get_proficiencies_for_objectives, parameter[self, objective_ids]]:
constant[Gets a ``ProficiencyList`` relating to the given objectives.
arg: objective_ids (osid.id.IdList): the objective ``Ids``
return: (osid.learning.ProficiencyList) - the returned
``Proficiency`` list
raise: NullArgument - ``objective_ids`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
]
variable[collection] assign[=] call[name[JSONClientValidated], parameter[constant[learning]]]
variable[result] assign[=] call[call[name[collection].find, parameter[call[name[dict], parameter[dictionary[[<ast.Constant object at 0x7da18bc70eb0>], [<ast.Call object at 0x7da18bc72020>]]]]]].sort, parameter[constant[_id], name[ASCENDING]]]
return[call[name[objects].ProficiencyList, parameter[name[result]]]] | keyword[def] identifier[get_proficiencies_for_objectives] ( identifier[self] , identifier[objective_ids] ):
literal[string]
identifier[collection] = identifier[JSONClientValidated] ( literal[string] ,
identifier[collection] = literal[string] ,
identifier[runtime] = identifier[self] . identifier[_runtime] )
identifier[result] = identifier[collection] . identifier[find] (
identifier[dict] ({ literal[string] : identifier[str] ( identifier[objective_ids] )},
** identifier[self] . identifier[_view_filter] ())). identifier[sort] ( literal[string] , identifier[ASCENDING] )
keyword[return] identifier[objects] . identifier[ProficiencyList] ( identifier[result] , identifier[runtime] = identifier[self] . identifier[_runtime] ) | def get_proficiencies_for_objectives(self, objective_ids):
"""Gets a ``ProficiencyList`` relating to the given objectives.
arg: objective_ids (osid.id.IdList): the objective ``Ids``
return: (osid.learning.ProficiencyList) - the returned
``Proficiency`` list
raise: NullArgument - ``objective_ids`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.relationship.RelationshipLookupSession.get_relationships_for_destination
# NOTE: This implementation currently ignores plenary and effective views
collection = JSONClientValidated('learning', collection='Proficiency', runtime=self._runtime)
result = collection.find(dict({'objectiveId': str(objective_ids)}, **self._view_filter())).sort('_id', ASCENDING)
return objects.ProficiencyList(result, runtime=self._runtime) |
def shuffle_egg(egg):
""" Shuffle an Egg's recalls"""
from .egg import Egg
pres, rec, features, dist_funcs = parse_egg(egg)
if pres.ndim==1:
pres = pres.reshape(1, pres.shape[0])
rec = rec.reshape(1, rec.shape[0])
features = features.reshape(1, features.shape[0])
for ilist in range(rec.shape[0]):
idx = np.random.permutation(rec.shape[1])
rec[ilist,:] = rec[ilist,idx]
return Egg(pres=pres, rec=rec, features=features, dist_funcs=dist_funcs) | def function[shuffle_egg, parameter[egg]]:
constant[ Shuffle an Egg's recalls]
from relative_module[egg] import module[Egg]
<ast.Tuple object at 0x7da1b10273d0> assign[=] call[name[parse_egg], parameter[name[egg]]]
if compare[name[pres].ndim equal[==] constant[1]] begin[:]
variable[pres] assign[=] call[name[pres].reshape, parameter[constant[1], call[name[pres].shape][constant[0]]]]
variable[rec] assign[=] call[name[rec].reshape, parameter[constant[1], call[name[rec].shape][constant[0]]]]
variable[features] assign[=] call[name[features].reshape, parameter[constant[1], call[name[features].shape][constant[0]]]]
for taget[name[ilist]] in starred[call[name[range], parameter[call[name[rec].shape][constant[0]]]]] begin[:]
variable[idx] assign[=] call[name[np].random.permutation, parameter[call[name[rec].shape][constant[1]]]]
call[name[rec]][tuple[[<ast.Name object at 0x7da1b0fec460>, <ast.Slice object at 0x7da1b0feccd0>]]] assign[=] call[name[rec]][tuple[[<ast.Name object at 0x7da1b0fef7c0>, <ast.Name object at 0x7da1b0fefe50>]]]
return[call[name[Egg], parameter[]]] | keyword[def] identifier[shuffle_egg] ( identifier[egg] ):
literal[string]
keyword[from] . identifier[egg] keyword[import] identifier[Egg]
identifier[pres] , identifier[rec] , identifier[features] , identifier[dist_funcs] = identifier[parse_egg] ( identifier[egg] )
keyword[if] identifier[pres] . identifier[ndim] == literal[int] :
identifier[pres] = identifier[pres] . identifier[reshape] ( literal[int] , identifier[pres] . identifier[shape] [ literal[int] ])
identifier[rec] = identifier[rec] . identifier[reshape] ( literal[int] , identifier[rec] . identifier[shape] [ literal[int] ])
identifier[features] = identifier[features] . identifier[reshape] ( literal[int] , identifier[features] . identifier[shape] [ literal[int] ])
keyword[for] identifier[ilist] keyword[in] identifier[range] ( identifier[rec] . identifier[shape] [ literal[int] ]):
identifier[idx] = identifier[np] . identifier[random] . identifier[permutation] ( identifier[rec] . identifier[shape] [ literal[int] ])
identifier[rec] [ identifier[ilist] ,:]= identifier[rec] [ identifier[ilist] , identifier[idx] ]
keyword[return] identifier[Egg] ( identifier[pres] = identifier[pres] , identifier[rec] = identifier[rec] , identifier[features] = identifier[features] , identifier[dist_funcs] = identifier[dist_funcs] ) | def shuffle_egg(egg):
""" Shuffle an Egg's recalls"""
from .egg import Egg
(pres, rec, features, dist_funcs) = parse_egg(egg)
if pres.ndim == 1:
pres = pres.reshape(1, pres.shape[0])
rec = rec.reshape(1, rec.shape[0])
features = features.reshape(1, features.shape[0]) # depends on [control=['if'], data=[]]
for ilist in range(rec.shape[0]):
idx = np.random.permutation(rec.shape[1])
rec[ilist, :] = rec[ilist, idx] # depends on [control=['for'], data=['ilist']]
return Egg(pres=pres, rec=rec, features=features, dist_funcs=dist_funcs) |
def WriteToPath(obj, filepath):
"""Serializes and writes given Python object to the specified YAML file.
Args:
obj: A Python object to serialize.
filepath: A path to the file into which the object is to be written.
"""
with io.open(filepath, mode="w", encoding="utf-8") as filedesc:
WriteToFile(obj, filedesc) | def function[WriteToPath, parameter[obj, filepath]]:
constant[Serializes and writes given Python object to the specified YAML file.
Args:
obj: A Python object to serialize.
filepath: A path to the file into which the object is to be written.
]
with call[name[io].open, parameter[name[filepath]]] begin[:]
call[name[WriteToFile], parameter[name[obj], name[filedesc]]] | keyword[def] identifier[WriteToPath] ( identifier[obj] , identifier[filepath] ):
literal[string]
keyword[with] identifier[io] . identifier[open] ( identifier[filepath] , identifier[mode] = literal[string] , identifier[encoding] = literal[string] ) keyword[as] identifier[filedesc] :
identifier[WriteToFile] ( identifier[obj] , identifier[filedesc] ) | def WriteToPath(obj, filepath):
"""Serializes and writes given Python object to the specified YAML file.
Args:
obj: A Python object to serialize.
filepath: A path to the file into which the object is to be written.
"""
with io.open(filepath, mode='w', encoding='utf-8') as filedesc:
WriteToFile(obj, filedesc) # depends on [control=['with'], data=['filedesc']] |
def get_info(dstore):
"""
:returns: {'stats': dic, 'loss_types': dic, 'num_rlzs': R}
"""
oq = dstore['oqparam']
stats = {stat: s for s, stat in enumerate(oq.hazard_stats())}
loss_types = {lt: l for l, lt in enumerate(oq.loss_dt().names)}
imt = {imt: i for i, imt in enumerate(oq.imtls)}
num_rlzs = dstore['csm_info'].get_num_rlzs()
return dict(stats=stats, num_rlzs=num_rlzs, loss_types=loss_types,
imtls=oq.imtls, investigation_time=oq.investigation_time,
poes=oq.poes, imt=imt, uhs_dt=oq.uhs_dt()) | def function[get_info, parameter[dstore]]:
constant[
:returns: {'stats': dic, 'loss_types': dic, 'num_rlzs': R}
]
variable[oq] assign[=] call[name[dstore]][constant[oqparam]]
variable[stats] assign[=] <ast.DictComp object at 0x7da2054a7460>
variable[loss_types] assign[=] <ast.DictComp object at 0x7da2054a77c0>
variable[imt] assign[=] <ast.DictComp object at 0x7da2054a65f0>
variable[num_rlzs] assign[=] call[call[name[dstore]][constant[csm_info]].get_num_rlzs, parameter[]]
return[call[name[dict], parameter[]]] | keyword[def] identifier[get_info] ( identifier[dstore] ):
literal[string]
identifier[oq] = identifier[dstore] [ literal[string] ]
identifier[stats] ={ identifier[stat] : identifier[s] keyword[for] identifier[s] , identifier[stat] keyword[in] identifier[enumerate] ( identifier[oq] . identifier[hazard_stats] ())}
identifier[loss_types] ={ identifier[lt] : identifier[l] keyword[for] identifier[l] , identifier[lt] keyword[in] identifier[enumerate] ( identifier[oq] . identifier[loss_dt] (). identifier[names] )}
identifier[imt] ={ identifier[imt] : identifier[i] keyword[for] identifier[i] , identifier[imt] keyword[in] identifier[enumerate] ( identifier[oq] . identifier[imtls] )}
identifier[num_rlzs] = identifier[dstore] [ literal[string] ]. identifier[get_num_rlzs] ()
keyword[return] identifier[dict] ( identifier[stats] = identifier[stats] , identifier[num_rlzs] = identifier[num_rlzs] , identifier[loss_types] = identifier[loss_types] ,
identifier[imtls] = identifier[oq] . identifier[imtls] , identifier[investigation_time] = identifier[oq] . identifier[investigation_time] ,
identifier[poes] = identifier[oq] . identifier[poes] , identifier[imt] = identifier[imt] , identifier[uhs_dt] = identifier[oq] . identifier[uhs_dt] ()) | def get_info(dstore):
"""
:returns: {'stats': dic, 'loss_types': dic, 'num_rlzs': R}
"""
oq = dstore['oqparam']
stats = {stat: s for (s, stat) in enumerate(oq.hazard_stats())}
loss_types = {lt: l for (l, lt) in enumerate(oq.loss_dt().names)}
imt = {imt: i for (i, imt) in enumerate(oq.imtls)}
num_rlzs = dstore['csm_info'].get_num_rlzs()
return dict(stats=stats, num_rlzs=num_rlzs, loss_types=loss_types, imtls=oq.imtls, investigation_time=oq.investigation_time, poes=oq.poes, imt=imt, uhs_dt=oq.uhs_dt()) |
def _get_arn_from_idempotency_token(self, token):
"""
If token doesnt exist, return None, later it will be
set with an expiry and arn.
If token expiry has passed, delete entry and return None
Else return ARN
:param token: String token
:return: None or ARN
"""
now = datetime.datetime.now()
if token in self._idempotency_tokens:
if self._idempotency_tokens[token]['expires'] < now:
# Token has expired, new request
del self._idempotency_tokens[token]
return None
else:
return self._idempotency_tokens[token]['arn']
return None | def function[_get_arn_from_idempotency_token, parameter[self, token]]:
constant[
If token doesnt exist, return None, later it will be
set with an expiry and arn.
If token expiry has passed, delete entry and return None
Else return ARN
:param token: String token
:return: None or ARN
]
variable[now] assign[=] call[name[datetime].datetime.now, parameter[]]
if compare[name[token] in name[self]._idempotency_tokens] begin[:]
if compare[call[call[name[self]._idempotency_tokens][name[token]]][constant[expires]] less[<] name[now]] begin[:]
<ast.Delete object at 0x7da18dc05840>
return[constant[None]]
return[constant[None]] | keyword[def] identifier[_get_arn_from_idempotency_token] ( identifier[self] , identifier[token] ):
literal[string]
identifier[now] = identifier[datetime] . identifier[datetime] . identifier[now] ()
keyword[if] identifier[token] keyword[in] identifier[self] . identifier[_idempotency_tokens] :
keyword[if] identifier[self] . identifier[_idempotency_tokens] [ identifier[token] ][ literal[string] ]< identifier[now] :
keyword[del] identifier[self] . identifier[_idempotency_tokens] [ identifier[token] ]
keyword[return] keyword[None]
keyword[else] :
keyword[return] identifier[self] . identifier[_idempotency_tokens] [ identifier[token] ][ literal[string] ]
keyword[return] keyword[None] | def _get_arn_from_idempotency_token(self, token):
"""
If token doesnt exist, return None, later it will be
set with an expiry and arn.
If token expiry has passed, delete entry and return None
Else return ARN
:param token: String token
:return: None or ARN
"""
now = datetime.datetime.now()
if token in self._idempotency_tokens:
if self._idempotency_tokens[token]['expires'] < now:
# Token has expired, new request
del self._idempotency_tokens[token]
return None # depends on [control=['if'], data=[]]
else:
return self._idempotency_tokens[token]['arn'] # depends on [control=['if'], data=['token']]
return None |
def map_custom_field(custom_fields, fields):
"""Add extra information for custom fields.
:param custom_fields: set of custom fields with the extra information
:param fields: fields of the issue where to add the extra information
:returns: an set of items with the extra information mapped
"""
def build_cf(cf, v):
return {'id': cf['id'], 'name': cf['name'], 'value': v}
return {
k: build_cf(custom_fields[k], v)
for k, v in fields.items()
if k in custom_fields
} | def function[map_custom_field, parameter[custom_fields, fields]]:
constant[Add extra information for custom fields.
:param custom_fields: set of custom fields with the extra information
:param fields: fields of the issue where to add the extra information
:returns: an set of items with the extra information mapped
]
def function[build_cf, parameter[cf, v]]:
return[dictionary[[<ast.Constant object at 0x7da1b0294250>, <ast.Constant object at 0x7da1b0294220>, <ast.Constant object at 0x7da1b02941f0>], [<ast.Subscript object at 0x7da1b02941c0>, <ast.Subscript object at 0x7da1b0294130>, <ast.Name object at 0x7da1b02940a0>]]]
return[<ast.DictComp object at 0x7da1b0297f10>] | keyword[def] identifier[map_custom_field] ( identifier[custom_fields] , identifier[fields] ):
literal[string]
keyword[def] identifier[build_cf] ( identifier[cf] , identifier[v] ):
keyword[return] { literal[string] : identifier[cf] [ literal[string] ], literal[string] : identifier[cf] [ literal[string] ], literal[string] : identifier[v] }
keyword[return] {
identifier[k] : identifier[build_cf] ( identifier[custom_fields] [ identifier[k] ], identifier[v] )
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[fields] . identifier[items] ()
keyword[if] identifier[k] keyword[in] identifier[custom_fields]
} | def map_custom_field(custom_fields, fields):
"""Add extra information for custom fields.
:param custom_fields: set of custom fields with the extra information
:param fields: fields of the issue where to add the extra information
:returns: an set of items with the extra information mapped
"""
def build_cf(cf, v):
return {'id': cf['id'], 'name': cf['name'], 'value': v}
return {k: build_cf(custom_fields[k], v) for (k, v) in fields.items() if k in custom_fields} |
def get_net_configuration(self, channel=None, gateway_macs=True):
"""Get network configuration data
Retrieve network configuration from the target
:param channel: Channel to configure, defaults to None for 'autodetect'
:param gateway_macs: Whether to retrieve mac addresses for gateways
:returns: A dictionary of network configuration data
"""
if channel is None:
channel = self.get_network_channel()
retdata = {}
v4addr = self._fetch_lancfg_param(channel, 3)
if v4addr is None:
retdata['ipv4_address'] = None
else:
v4masklen = self._fetch_lancfg_param(channel, 6, prefixlen=True)
retdata['ipv4_address'] = '{0}/{1}'.format(v4addr, v4masklen)
v4cfgmethods = {
0: 'Unspecified',
1: 'Static',
2: 'DHCP',
3: 'BIOS',
4: 'Other',
}
retdata['ipv4_configuration'] = v4cfgmethods[self._fetch_lancfg_param(
channel, 4)]
retdata['mac_address'] = self._fetch_lancfg_param(channel, 5)
retdata['ipv4_gateway'] = self._fetch_lancfg_param(channel, 12)
retdata['ipv4_backup_gateway'] = self._fetch_lancfg_param(channel, 14)
if gateway_macs:
retdata['ipv4_gateway_mac'] = self._fetch_lancfg_param(channel, 13)
retdata['ipv4_backup_gateway_mac'] = self._fetch_lancfg_param(
channel, 15)
self.oem_init()
self._oem.add_extra_net_configuration(retdata)
return retdata | def function[get_net_configuration, parameter[self, channel, gateway_macs]]:
constant[Get network configuration data
Retrieve network configuration from the target
:param channel: Channel to configure, defaults to None for 'autodetect'
:param gateway_macs: Whether to retrieve mac addresses for gateways
:returns: A dictionary of network configuration data
]
if compare[name[channel] is constant[None]] begin[:]
variable[channel] assign[=] call[name[self].get_network_channel, parameter[]]
variable[retdata] assign[=] dictionary[[], []]
variable[v4addr] assign[=] call[name[self]._fetch_lancfg_param, parameter[name[channel], constant[3]]]
if compare[name[v4addr] is constant[None]] begin[:]
call[name[retdata]][constant[ipv4_address]] assign[=] constant[None]
variable[v4cfgmethods] assign[=] dictionary[[<ast.Constant object at 0x7da207f02dd0>, <ast.Constant object at 0x7da207f01030>, <ast.Constant object at 0x7da207f00640>, <ast.Constant object at 0x7da207f01270>, <ast.Constant object at 0x7da207f03670>], [<ast.Constant object at 0x7da207f03340>, <ast.Constant object at 0x7da207f00280>, <ast.Constant object at 0x7da207f01c30>, <ast.Constant object at 0x7da207f01960>, <ast.Constant object at 0x7da207f00610>]]
call[name[retdata]][constant[ipv4_configuration]] assign[=] call[name[v4cfgmethods]][call[name[self]._fetch_lancfg_param, parameter[name[channel], constant[4]]]]
call[name[retdata]][constant[mac_address]] assign[=] call[name[self]._fetch_lancfg_param, parameter[name[channel], constant[5]]]
call[name[retdata]][constant[ipv4_gateway]] assign[=] call[name[self]._fetch_lancfg_param, parameter[name[channel], constant[12]]]
call[name[retdata]][constant[ipv4_backup_gateway]] assign[=] call[name[self]._fetch_lancfg_param, parameter[name[channel], constant[14]]]
if name[gateway_macs] begin[:]
call[name[retdata]][constant[ipv4_gateway_mac]] assign[=] call[name[self]._fetch_lancfg_param, parameter[name[channel], constant[13]]]
call[name[retdata]][constant[ipv4_backup_gateway_mac]] assign[=] call[name[self]._fetch_lancfg_param, parameter[name[channel], constant[15]]]
call[name[self].oem_init, parameter[]]
call[name[self]._oem.add_extra_net_configuration, parameter[name[retdata]]]
return[name[retdata]] | keyword[def] identifier[get_net_configuration] ( identifier[self] , identifier[channel] = keyword[None] , identifier[gateway_macs] = keyword[True] ):
literal[string]
keyword[if] identifier[channel] keyword[is] keyword[None] :
identifier[channel] = identifier[self] . identifier[get_network_channel] ()
identifier[retdata] ={}
identifier[v4addr] = identifier[self] . identifier[_fetch_lancfg_param] ( identifier[channel] , literal[int] )
keyword[if] identifier[v4addr] keyword[is] keyword[None] :
identifier[retdata] [ literal[string] ]= keyword[None]
keyword[else] :
identifier[v4masklen] = identifier[self] . identifier[_fetch_lancfg_param] ( identifier[channel] , literal[int] , identifier[prefixlen] = keyword[True] )
identifier[retdata] [ literal[string] ]= literal[string] . identifier[format] ( identifier[v4addr] , identifier[v4masklen] )
identifier[v4cfgmethods] ={
literal[int] : literal[string] ,
literal[int] : literal[string] ,
literal[int] : literal[string] ,
literal[int] : literal[string] ,
literal[int] : literal[string] ,
}
identifier[retdata] [ literal[string] ]= identifier[v4cfgmethods] [ identifier[self] . identifier[_fetch_lancfg_param] (
identifier[channel] , literal[int] )]
identifier[retdata] [ literal[string] ]= identifier[self] . identifier[_fetch_lancfg_param] ( identifier[channel] , literal[int] )
identifier[retdata] [ literal[string] ]= identifier[self] . identifier[_fetch_lancfg_param] ( identifier[channel] , literal[int] )
identifier[retdata] [ literal[string] ]= identifier[self] . identifier[_fetch_lancfg_param] ( identifier[channel] , literal[int] )
keyword[if] identifier[gateway_macs] :
identifier[retdata] [ literal[string] ]= identifier[self] . identifier[_fetch_lancfg_param] ( identifier[channel] , literal[int] )
identifier[retdata] [ literal[string] ]= identifier[self] . identifier[_fetch_lancfg_param] (
identifier[channel] , literal[int] )
identifier[self] . identifier[oem_init] ()
identifier[self] . identifier[_oem] . identifier[add_extra_net_configuration] ( identifier[retdata] )
keyword[return] identifier[retdata] | def get_net_configuration(self, channel=None, gateway_macs=True):
"""Get network configuration data
Retrieve network configuration from the target
:param channel: Channel to configure, defaults to None for 'autodetect'
:param gateway_macs: Whether to retrieve mac addresses for gateways
:returns: A dictionary of network configuration data
"""
if channel is None:
channel = self.get_network_channel() # depends on [control=['if'], data=['channel']]
retdata = {}
v4addr = self._fetch_lancfg_param(channel, 3)
if v4addr is None:
retdata['ipv4_address'] = None # depends on [control=['if'], data=[]]
else:
v4masklen = self._fetch_lancfg_param(channel, 6, prefixlen=True)
retdata['ipv4_address'] = '{0}/{1}'.format(v4addr, v4masklen)
v4cfgmethods = {0: 'Unspecified', 1: 'Static', 2: 'DHCP', 3: 'BIOS', 4: 'Other'}
retdata['ipv4_configuration'] = v4cfgmethods[self._fetch_lancfg_param(channel, 4)]
retdata['mac_address'] = self._fetch_lancfg_param(channel, 5)
retdata['ipv4_gateway'] = self._fetch_lancfg_param(channel, 12)
retdata['ipv4_backup_gateway'] = self._fetch_lancfg_param(channel, 14)
if gateway_macs:
retdata['ipv4_gateway_mac'] = self._fetch_lancfg_param(channel, 13)
retdata['ipv4_backup_gateway_mac'] = self._fetch_lancfg_param(channel, 15) # depends on [control=['if'], data=[]]
self.oem_init()
self._oem.add_extra_net_configuration(retdata)
return retdata |
def pformat(self, prefix=()):
'''
Makes a pretty ASCII format of the data, suitable for
displaying in a console or saving to a text file.
Returns a list of lines.
'''
nan = float("nan")
def sformat(segment, stat):
FMT = "n={0}, mean={1}, p50/95={2}/{3}, max={4}"
line_segs = [segment]
for s in [stat]:
p = s.get_percentiles()
p50, p95 = p.get(0.50, nan), p.get(0.95, nan)
line_segs.append(FMT.format(s.n, s.mean, p50, p95, s.max))
return '{0}: {1}'.format(*line_segs)
lines = []
for path in sorted(self.path_stats.keys()):
lines.append('=====================')
for seg, stat in zip(path, self.path_stats[path]):
lines.append(sformat(seg, stat))
return lines | def function[pformat, parameter[self, prefix]]:
constant[
Makes a pretty ASCII format of the data, suitable for
displaying in a console or saving to a text file.
Returns a list of lines.
]
variable[nan] assign[=] call[name[float], parameter[constant[nan]]]
def function[sformat, parameter[segment, stat]]:
variable[FMT] assign[=] constant[n={0}, mean={1}, p50/95={2}/{3}, max={4}]
variable[line_segs] assign[=] list[[<ast.Name object at 0x7da18f8132b0>]]
for taget[name[s]] in starred[list[[<ast.Name object at 0x7da18f811e10>]]] begin[:]
variable[p] assign[=] call[name[s].get_percentiles, parameter[]]
<ast.Tuple object at 0x7da18f810430> assign[=] tuple[[<ast.Call object at 0x7da18f811270>, <ast.Call object at 0x7da18f8117e0>]]
call[name[line_segs].append, parameter[call[name[FMT].format, parameter[name[s].n, name[s].mean, name[p50], name[p95], name[s].max]]]]
return[call[constant[{0}: {1}].format, parameter[<ast.Starred object at 0x7da18f813970>]]]
variable[lines] assign[=] list[[]]
for taget[name[path]] in starred[call[name[sorted], parameter[call[name[self].path_stats.keys, parameter[]]]]] begin[:]
call[name[lines].append, parameter[constant[=====================]]]
for taget[tuple[[<ast.Name object at 0x7da1b25070a0>, <ast.Name object at 0x7da1b25074f0>]]] in starred[call[name[zip], parameter[name[path], call[name[self].path_stats][name[path]]]]] begin[:]
call[name[lines].append, parameter[call[name[sformat], parameter[name[seg], name[stat]]]]]
return[name[lines]] | keyword[def] identifier[pformat] ( identifier[self] , identifier[prefix] =()):
literal[string]
identifier[nan] = identifier[float] ( literal[string] )
keyword[def] identifier[sformat] ( identifier[segment] , identifier[stat] ):
identifier[FMT] = literal[string]
identifier[line_segs] =[ identifier[segment] ]
keyword[for] identifier[s] keyword[in] [ identifier[stat] ]:
identifier[p] = identifier[s] . identifier[get_percentiles] ()
identifier[p50] , identifier[p95] = identifier[p] . identifier[get] ( literal[int] , identifier[nan] ), identifier[p] . identifier[get] ( literal[int] , identifier[nan] )
identifier[line_segs] . identifier[append] ( identifier[FMT] . identifier[format] ( identifier[s] . identifier[n] , identifier[s] . identifier[mean] , identifier[p50] , identifier[p95] , identifier[s] . identifier[max] ))
keyword[return] literal[string] . identifier[format] (* identifier[line_segs] )
identifier[lines] =[]
keyword[for] identifier[path] keyword[in] identifier[sorted] ( identifier[self] . identifier[path_stats] . identifier[keys] ()):
identifier[lines] . identifier[append] ( literal[string] )
keyword[for] identifier[seg] , identifier[stat] keyword[in] identifier[zip] ( identifier[path] , identifier[self] . identifier[path_stats] [ identifier[path] ]):
identifier[lines] . identifier[append] ( identifier[sformat] ( identifier[seg] , identifier[stat] ))
keyword[return] identifier[lines] | def pformat(self, prefix=()):
"""
Makes a pretty ASCII format of the data, suitable for
displaying in a console or saving to a text file.
Returns a list of lines.
"""
nan = float('nan')
def sformat(segment, stat):
FMT = 'n={0}, mean={1}, p50/95={2}/{3}, max={4}'
line_segs = [segment]
for s in [stat]:
p = s.get_percentiles()
(p50, p95) = (p.get(0.5, nan), p.get(0.95, nan))
line_segs.append(FMT.format(s.n, s.mean, p50, p95, s.max)) # depends on [control=['for'], data=['s']]
return '{0}: {1}'.format(*line_segs)
lines = []
for path in sorted(self.path_stats.keys()):
lines.append('=====================')
for (seg, stat) in zip(path, self.path_stats[path]):
lines.append(sformat(seg, stat)) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['path']]
return lines |
def _qualified_key(self, key):
"""
Prepends the configured prefix to the key (if applicable).
:param key: The unprefixed key.
:return: The key with any configured prefix prepended.
"""
pfx = self.key_prefix if self.key_prefix is not None else ''
return '{}{}'.format(pfx, key) | def function[_qualified_key, parameter[self, key]]:
constant[
Prepends the configured prefix to the key (if applicable).
:param key: The unprefixed key.
:return: The key with any configured prefix prepended.
]
variable[pfx] assign[=] <ast.IfExp object at 0x7da18f00cdc0>
return[call[constant[{}{}].format, parameter[name[pfx], name[key]]]] | keyword[def] identifier[_qualified_key] ( identifier[self] , identifier[key] ):
literal[string]
identifier[pfx] = identifier[self] . identifier[key_prefix] keyword[if] identifier[self] . identifier[key_prefix] keyword[is] keyword[not] keyword[None] keyword[else] literal[string]
keyword[return] literal[string] . identifier[format] ( identifier[pfx] , identifier[key] ) | def _qualified_key(self, key):
"""
Prepends the configured prefix to the key (if applicable).
:param key: The unprefixed key.
:return: The key with any configured prefix prepended.
"""
pfx = self.key_prefix if self.key_prefix is not None else ''
return '{}{}'.format(pfx, key) |
def freeze(dest_dir, opt):
"""Iterates over the Secretfile looking for secrets to freeze"""
tmp_dir = ensure_tmpdir()
dest_prefix = "%s/dest" % tmp_dir
ensure_dir(dest_dir)
ensure_dir(dest_prefix)
config = get_secretfile(opt)
Context.load(config, opt) \
.freeze(dest_prefix)
zip_filename = freeze_archive(tmp_dir, dest_prefix)
ice_file = freeze_encrypt(dest_dir, zip_filename, config, opt)
shutil.rmtree(tmp_dir)
LOG.debug("Generated file is %s", ice_file) | def function[freeze, parameter[dest_dir, opt]]:
constant[Iterates over the Secretfile looking for secrets to freeze]
variable[tmp_dir] assign[=] call[name[ensure_tmpdir], parameter[]]
variable[dest_prefix] assign[=] binary_operation[constant[%s/dest] <ast.Mod object at 0x7da2590d6920> name[tmp_dir]]
call[name[ensure_dir], parameter[name[dest_dir]]]
call[name[ensure_dir], parameter[name[dest_prefix]]]
variable[config] assign[=] call[name[get_secretfile], parameter[name[opt]]]
call[call[name[Context].load, parameter[name[config], name[opt]]].freeze, parameter[name[dest_prefix]]]
variable[zip_filename] assign[=] call[name[freeze_archive], parameter[name[tmp_dir], name[dest_prefix]]]
variable[ice_file] assign[=] call[name[freeze_encrypt], parameter[name[dest_dir], name[zip_filename], name[config], name[opt]]]
call[name[shutil].rmtree, parameter[name[tmp_dir]]]
call[name[LOG].debug, parameter[constant[Generated file is %s], name[ice_file]]] | keyword[def] identifier[freeze] ( identifier[dest_dir] , identifier[opt] ):
literal[string]
identifier[tmp_dir] = identifier[ensure_tmpdir] ()
identifier[dest_prefix] = literal[string] % identifier[tmp_dir]
identifier[ensure_dir] ( identifier[dest_dir] )
identifier[ensure_dir] ( identifier[dest_prefix] )
identifier[config] = identifier[get_secretfile] ( identifier[opt] )
identifier[Context] . identifier[load] ( identifier[config] , identifier[opt] ). identifier[freeze] ( identifier[dest_prefix] )
identifier[zip_filename] = identifier[freeze_archive] ( identifier[tmp_dir] , identifier[dest_prefix] )
identifier[ice_file] = identifier[freeze_encrypt] ( identifier[dest_dir] , identifier[zip_filename] , identifier[config] , identifier[opt] )
identifier[shutil] . identifier[rmtree] ( identifier[tmp_dir] )
identifier[LOG] . identifier[debug] ( literal[string] , identifier[ice_file] ) | def freeze(dest_dir, opt):
"""Iterates over the Secretfile looking for secrets to freeze"""
tmp_dir = ensure_tmpdir()
dest_prefix = '%s/dest' % tmp_dir
ensure_dir(dest_dir)
ensure_dir(dest_prefix)
config = get_secretfile(opt)
Context.load(config, opt).freeze(dest_prefix)
zip_filename = freeze_archive(tmp_dir, dest_prefix)
ice_file = freeze_encrypt(dest_dir, zip_filename, config, opt)
shutil.rmtree(tmp_dir)
LOG.debug('Generated file is %s', ice_file) |
def _add_hook(self, socket, callback):
"""Generic hook. The passed socket has to be "receive only".
"""
self._hooks.append(socket)
self._hooks_cb[socket] = callback
if self.poller:
self.poller.register(socket, POLLIN) | def function[_add_hook, parameter[self, socket, callback]]:
constant[Generic hook. The passed socket has to be "receive only".
]
call[name[self]._hooks.append, parameter[name[socket]]]
call[name[self]._hooks_cb][name[socket]] assign[=] name[callback]
if name[self].poller begin[:]
call[name[self].poller.register, parameter[name[socket], name[POLLIN]]] | keyword[def] identifier[_add_hook] ( identifier[self] , identifier[socket] , identifier[callback] ):
literal[string]
identifier[self] . identifier[_hooks] . identifier[append] ( identifier[socket] )
identifier[self] . identifier[_hooks_cb] [ identifier[socket] ]= identifier[callback]
keyword[if] identifier[self] . identifier[poller] :
identifier[self] . identifier[poller] . identifier[register] ( identifier[socket] , identifier[POLLIN] ) | def _add_hook(self, socket, callback):
"""Generic hook. The passed socket has to be "receive only".
"""
self._hooks.append(socket)
self._hooks_cb[socket] = callback
if self.poller:
self.poller.register(socket, POLLIN) # depends on [control=['if'], data=[]] |
def convert_to_string(self, block):
"""
Takes a list of SeqRecordExpanded objects corresponding to a gene_code
and produces the gene_block as string.
:param block:
:return: str.
"""
if self.aminoacids:
molecule_type = "protein"
else:
molecule_type = "dna"
out = None
for seq_record in block:
if not out:
out = '&[{0}]\n'.format(molecule_type, seq_record.gene_code)
taxon_id = '{0}_{1}_{2}'.format(seq_record.voucher_code,
seq_record.taxonomy['genus'],
seq_record.taxonomy['species'],
)
sequence = get_seq(seq_record, self.codon_positions, self.aminoacids,
self.degenerate)
seq = sequence.seq
if sequence.warning:
self.warnings.append(sequence.warning)
out += '{0}{1}\n'.format(taxon_id.ljust(55), seq)
return out | def function[convert_to_string, parameter[self, block]]:
constant[
Takes a list of SeqRecordExpanded objects corresponding to a gene_code
and produces the gene_block as string.
:param block:
:return: str.
]
if name[self].aminoacids begin[:]
variable[molecule_type] assign[=] constant[protein]
variable[out] assign[=] constant[None]
for taget[name[seq_record]] in starred[name[block]] begin[:]
if <ast.UnaryOp object at 0x7da18fe91e70> begin[:]
variable[out] assign[=] call[constant[&[{0}]
].format, parameter[name[molecule_type], name[seq_record].gene_code]]
variable[taxon_id] assign[=] call[constant[{0}_{1}_{2}].format, parameter[name[seq_record].voucher_code, call[name[seq_record].taxonomy][constant[genus]], call[name[seq_record].taxonomy][constant[species]]]]
variable[sequence] assign[=] call[name[get_seq], parameter[name[seq_record], name[self].codon_positions, name[self].aminoacids, name[self].degenerate]]
variable[seq] assign[=] name[sequence].seq
if name[sequence].warning begin[:]
call[name[self].warnings.append, parameter[name[sequence].warning]]
<ast.AugAssign object at 0x7da18fe939d0>
return[name[out]] | keyword[def] identifier[convert_to_string] ( identifier[self] , identifier[block] ):
literal[string]
keyword[if] identifier[self] . identifier[aminoacids] :
identifier[molecule_type] = literal[string]
keyword[else] :
identifier[molecule_type] = literal[string]
identifier[out] = keyword[None]
keyword[for] identifier[seq_record] keyword[in] identifier[block] :
keyword[if] keyword[not] identifier[out] :
identifier[out] = literal[string] . identifier[format] ( identifier[molecule_type] , identifier[seq_record] . identifier[gene_code] )
identifier[taxon_id] = literal[string] . identifier[format] ( identifier[seq_record] . identifier[voucher_code] ,
identifier[seq_record] . identifier[taxonomy] [ literal[string] ],
identifier[seq_record] . identifier[taxonomy] [ literal[string] ],
)
identifier[sequence] = identifier[get_seq] ( identifier[seq_record] , identifier[self] . identifier[codon_positions] , identifier[self] . identifier[aminoacids] ,
identifier[self] . identifier[degenerate] )
identifier[seq] = identifier[sequence] . identifier[seq]
keyword[if] identifier[sequence] . identifier[warning] :
identifier[self] . identifier[warnings] . identifier[append] ( identifier[sequence] . identifier[warning] )
identifier[out] += literal[string] . identifier[format] ( identifier[taxon_id] . identifier[ljust] ( literal[int] ), identifier[seq] )
keyword[return] identifier[out] | def convert_to_string(self, block):
"""
Takes a list of SeqRecordExpanded objects corresponding to a gene_code
and produces the gene_block as string.
:param block:
:return: str.
"""
if self.aminoacids:
molecule_type = 'protein' # depends on [control=['if'], data=[]]
else:
molecule_type = 'dna'
out = None
for seq_record in block:
if not out:
out = '&[{0}]\n'.format(molecule_type, seq_record.gene_code) # depends on [control=['if'], data=[]]
taxon_id = '{0}_{1}_{2}'.format(seq_record.voucher_code, seq_record.taxonomy['genus'], seq_record.taxonomy['species'])
sequence = get_seq(seq_record, self.codon_positions, self.aminoacids, self.degenerate)
seq = sequence.seq
if sequence.warning:
self.warnings.append(sequence.warning) # depends on [control=['if'], data=[]]
out += '{0}{1}\n'.format(taxon_id.ljust(55), seq) # depends on [control=['for'], data=['seq_record']]
return out |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.