code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def iteritems(self, **options):
'''Return a query interator with (id, object) pairs.'''
iter = self.query(**options)
while True:
obj = iter.next()
yield (obj.id, obj) | def function[iteritems, parameter[self]]:
constant[Return a query interator with (id, object) pairs.]
variable[iter] assign[=] call[name[self].query, parameter[]]
while constant[True] begin[:]
variable[obj] assign[=] call[name[iter].next, parameter[]]
<ast.Yield object at 0x7da1b0e612d0> | keyword[def] identifier[iteritems] ( identifier[self] ,** identifier[options] ):
literal[string]
identifier[iter] = identifier[self] . identifier[query] (** identifier[options] )
keyword[while] keyword[True] :
identifier[obj] = identifier[iter] . identifier[next] ()
keyword[yield] ( identifier[obj] . identifier[id] , identifier[obj] ) | def iteritems(self, **options):
"""Return a query interator with (id, object) pairs."""
iter = self.query(**options)
while True:
obj = iter.next()
yield (obj.id, obj) # depends on [control=['while'], data=[]] |
def from_dict(cls, label=None, label2=None, icon=None, thumbnail=None,
path=None, selected=None, info=None, properties=None,
context_menu=None, replace_context_menu=False,
is_playable=None, info_type='video', stream_info=None):
'''A ListItem constructor for setting a lot of properties not
available in the regular __init__ method. Useful to collect all
the properties in a dict and then use the **dct to call this
method.
'''
listitem = cls(label, label2, icon, thumbnail, path)
if selected is not None:
listitem.select(selected)
if info:
listitem.set_info(info_type, info)
if is_playable:
listitem.set_is_playable(True)
if properties:
# Need to support existing tuples, but prefer to have a dict for
# properties.
if hasattr(properties, 'items'):
properties = properties.items()
for key, val in properties:
listitem.set_property(key, val)
if stream_info:
for stream_type, stream_values in stream_info.items():
listitem.add_stream_info(stream_type, stream_values)
if context_menu:
listitem.add_context_menu_items(context_menu, replace_context_menu)
return listitem | def function[from_dict, parameter[cls, label, label2, icon, thumbnail, path, selected, info, properties, context_menu, replace_context_menu, is_playable, info_type, stream_info]]:
constant[A ListItem constructor for setting a lot of properties not
available in the regular __init__ method. Useful to collect all
the properties in a dict and then use the **dct to call this
method.
]
variable[listitem] assign[=] call[name[cls], parameter[name[label], name[label2], name[icon], name[thumbnail], name[path]]]
if compare[name[selected] is_not constant[None]] begin[:]
call[name[listitem].select, parameter[name[selected]]]
if name[info] begin[:]
call[name[listitem].set_info, parameter[name[info_type], name[info]]]
if name[is_playable] begin[:]
call[name[listitem].set_is_playable, parameter[constant[True]]]
if name[properties] begin[:]
if call[name[hasattr], parameter[name[properties], constant[items]]] begin[:]
variable[properties] assign[=] call[name[properties].items, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b18384c0>, <ast.Name object at 0x7da1b1838430>]]] in starred[name[properties]] begin[:]
call[name[listitem].set_property, parameter[name[key], name[val]]]
if name[stream_info] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b1839240>, <ast.Name object at 0x7da1b183a1a0>]]] in starred[call[name[stream_info].items, parameter[]]] begin[:]
call[name[listitem].add_stream_info, parameter[name[stream_type], name[stream_values]]]
if name[context_menu] begin[:]
call[name[listitem].add_context_menu_items, parameter[name[context_menu], name[replace_context_menu]]]
return[name[listitem]] | keyword[def] identifier[from_dict] ( identifier[cls] , identifier[label] = keyword[None] , identifier[label2] = keyword[None] , identifier[icon] = keyword[None] , identifier[thumbnail] = keyword[None] ,
identifier[path] = keyword[None] , identifier[selected] = keyword[None] , identifier[info] = keyword[None] , identifier[properties] = keyword[None] ,
identifier[context_menu] = keyword[None] , identifier[replace_context_menu] = keyword[False] ,
identifier[is_playable] = keyword[None] , identifier[info_type] = literal[string] , identifier[stream_info] = keyword[None] ):
literal[string]
identifier[listitem] = identifier[cls] ( identifier[label] , identifier[label2] , identifier[icon] , identifier[thumbnail] , identifier[path] )
keyword[if] identifier[selected] keyword[is] keyword[not] keyword[None] :
identifier[listitem] . identifier[select] ( identifier[selected] )
keyword[if] identifier[info] :
identifier[listitem] . identifier[set_info] ( identifier[info_type] , identifier[info] )
keyword[if] identifier[is_playable] :
identifier[listitem] . identifier[set_is_playable] ( keyword[True] )
keyword[if] identifier[properties] :
keyword[if] identifier[hasattr] ( identifier[properties] , literal[string] ):
identifier[properties] = identifier[properties] . identifier[items] ()
keyword[for] identifier[key] , identifier[val] keyword[in] identifier[properties] :
identifier[listitem] . identifier[set_property] ( identifier[key] , identifier[val] )
keyword[if] identifier[stream_info] :
keyword[for] identifier[stream_type] , identifier[stream_values] keyword[in] identifier[stream_info] . identifier[items] ():
identifier[listitem] . identifier[add_stream_info] ( identifier[stream_type] , identifier[stream_values] )
keyword[if] identifier[context_menu] :
identifier[listitem] . identifier[add_context_menu_items] ( identifier[context_menu] , identifier[replace_context_menu] )
keyword[return] identifier[listitem] | def from_dict(cls, label=None, label2=None, icon=None, thumbnail=None, path=None, selected=None, info=None, properties=None, context_menu=None, replace_context_menu=False, is_playable=None, info_type='video', stream_info=None):
"""A ListItem constructor for setting a lot of properties not
available in the regular __init__ method. Useful to collect all
the properties in a dict and then use the **dct to call this
method.
"""
listitem = cls(label, label2, icon, thumbnail, path)
if selected is not None:
listitem.select(selected) # depends on [control=['if'], data=['selected']]
if info:
listitem.set_info(info_type, info) # depends on [control=['if'], data=[]]
if is_playable:
listitem.set_is_playable(True) # depends on [control=['if'], data=[]]
if properties:
# Need to support existing tuples, but prefer to have a dict for
# properties.
if hasattr(properties, 'items'):
properties = properties.items() # depends on [control=['if'], data=[]]
for (key, val) in properties:
listitem.set_property(key, val) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
if stream_info:
for (stream_type, stream_values) in stream_info.items():
listitem.add_stream_info(stream_type, stream_values) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
if context_menu:
listitem.add_context_menu_items(context_menu, replace_context_menu) # depends on [control=['if'], data=[]]
return listitem |
def buses_of_vlvl(network, voltage_level):
""" Get bus-ids of given voltage level(s).
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
voltage_level: list
Returns
-------
list
List containing bus-ids.
"""
mask = network.buses.v_nom.isin(voltage_level)
df = network.buses[mask]
return df.index | def function[buses_of_vlvl, parameter[network, voltage_level]]:
constant[ Get bus-ids of given voltage level(s).
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
voltage_level: list
Returns
-------
list
List containing bus-ids.
]
variable[mask] assign[=] call[name[network].buses.v_nom.isin, parameter[name[voltage_level]]]
variable[df] assign[=] call[name[network].buses][name[mask]]
return[name[df].index] | keyword[def] identifier[buses_of_vlvl] ( identifier[network] , identifier[voltage_level] ):
literal[string]
identifier[mask] = identifier[network] . identifier[buses] . identifier[v_nom] . identifier[isin] ( identifier[voltage_level] )
identifier[df] = identifier[network] . identifier[buses] [ identifier[mask] ]
keyword[return] identifier[df] . identifier[index] | def buses_of_vlvl(network, voltage_level):
""" Get bus-ids of given voltage level(s).
Parameters
----------
network : :class:`pypsa.Network
Overall container of PyPSA
voltage_level: list
Returns
-------
list
List containing bus-ids.
"""
mask = network.buses.v_nom.isin(voltage_level)
df = network.buses[mask]
return df.index |
def validate_file(file_type, file_path):
"""
Validates a file against a schema
Parameters
----------
file_type : str
Type of file to read. May be 'component', 'element', 'table', or 'references'
file_path:
Full path to the file to be validated
Raises
------
RuntimeError
If the file_type is not valid (and/or a schema doesn't exist)
ValidationError
If the given file does not pass validation
FileNotFoundError
If the file given by file_path doesn't exist
"""
file_data = fileio._read_plain_json(file_path, False)
validate_data(file_type, file_data) | def function[validate_file, parameter[file_type, file_path]]:
constant[
Validates a file against a schema
Parameters
----------
file_type : str
Type of file to read. May be 'component', 'element', 'table', or 'references'
file_path:
Full path to the file to be validated
Raises
------
RuntimeError
If the file_type is not valid (and/or a schema doesn't exist)
ValidationError
If the given file does not pass validation
FileNotFoundError
If the file given by file_path doesn't exist
]
variable[file_data] assign[=] call[name[fileio]._read_plain_json, parameter[name[file_path], constant[False]]]
call[name[validate_data], parameter[name[file_type], name[file_data]]] | keyword[def] identifier[validate_file] ( identifier[file_type] , identifier[file_path] ):
literal[string]
identifier[file_data] = identifier[fileio] . identifier[_read_plain_json] ( identifier[file_path] , keyword[False] )
identifier[validate_data] ( identifier[file_type] , identifier[file_data] ) | def validate_file(file_type, file_path):
"""
Validates a file against a schema
Parameters
----------
file_type : str
Type of file to read. May be 'component', 'element', 'table', or 'references'
file_path:
Full path to the file to be validated
Raises
------
RuntimeError
If the file_type is not valid (and/or a schema doesn't exist)
ValidationError
If the given file does not pass validation
FileNotFoundError
If the file given by file_path doesn't exist
"""
file_data = fileio._read_plain_json(file_path, False)
validate_data(file_type, file_data) |
def _create_tag_lowlevel(self, tag_name, message=None, force=True,
patch=False):
"""Create a tag on the toplevel or patch repo
If the tag exists, and force is False, no tag is made. If force is True,
and a tag exists, but it is a direct ancestor of the current commit,
and there is no difference in filestate between the current commit
and the tagged commit, no tag is made. Otherwise, the old tag is
overwritten to point at the current commit.
Returns True or False indicating whether the tag was actually committed
"""
# check if tag already exists, and if it does, if it is a direct
# ancestor, and there is NO difference in the files between the tagged
# state and current state
#
# This check is mainly to avoid re-creating the same tag over and over
# on what is essentially the same commit, since tagging will
# technically create a new commit, and update the working copy to it.
#
# Without this check, say you were releasing to three different
# locations, one right after another; the first would create the tag,
# and a new tag commit. The second would then recreate the exact same
# tag, but now pointing at the commit that made the first tag.
# The third would create the tag a THIRD time, but now pointing at the
# commit that created the 2nd tag.
tags = self.get_tags(patch=patch)
old_commit = tags.get(tag_name)
if old_commit is not None:
if not force:
return False
old_rev = old_commit['rev']
# ok, now check to see if direct ancestor...
if self.is_ancestor(old_rev, '.', patch=patch):
# ...and if filestates are same
altered = self.hg('status', '--rev', old_rev, '--rev', '.',
'--no-status')
if not altered or altered == ['.hgtags']:
force = False
if not force:
return False
tag_args = ['tag', tag_name]
if message:
tag_args += ['--message', message]
# we should be ok with ALWAYS having force flag on now, since we should
# have already checked if the commit exists.. but be paranoid, in case
# we've missed some edge case...
if force:
tag_args += ['--force']
self.hg(patch=patch, *tag_args)
return True | def function[_create_tag_lowlevel, parameter[self, tag_name, message, force, patch]]:
constant[Create a tag on the toplevel or patch repo
If the tag exists, and force is False, no tag is made. If force is True,
and a tag exists, but it is a direct ancestor of the current commit,
and there is no difference in filestate between the current commit
and the tagged commit, no tag is made. Otherwise, the old tag is
overwritten to point at the current commit.
Returns True or False indicating whether the tag was actually committed
]
variable[tags] assign[=] call[name[self].get_tags, parameter[]]
variable[old_commit] assign[=] call[name[tags].get, parameter[name[tag_name]]]
if compare[name[old_commit] is_not constant[None]] begin[:]
if <ast.UnaryOp object at 0x7da1b17e3160> begin[:]
return[constant[False]]
variable[old_rev] assign[=] call[name[old_commit]][constant[rev]]
if call[name[self].is_ancestor, parameter[name[old_rev], constant[.]]] begin[:]
variable[altered] assign[=] call[name[self].hg, parameter[constant[status], constant[--rev], name[old_rev], constant[--rev], constant[.], constant[--no-status]]]
if <ast.BoolOp object at 0x7da1b17cefb0> begin[:]
variable[force] assign[=] constant[False]
if <ast.UnaryOp object at 0x7da1b17ce800> begin[:]
return[constant[False]]
variable[tag_args] assign[=] list[[<ast.Constant object at 0x7da1b170cb80>, <ast.Name object at 0x7da1b170f250>]]
if name[message] begin[:]
<ast.AugAssign object at 0x7da1b170d180>
if name[force] begin[:]
<ast.AugAssign object at 0x7da1b17cc0d0>
call[name[self].hg, parameter[<ast.Starred object at 0x7da1b17cd4b0>]]
return[constant[True]] | keyword[def] identifier[_create_tag_lowlevel] ( identifier[self] , identifier[tag_name] , identifier[message] = keyword[None] , identifier[force] = keyword[True] ,
identifier[patch] = keyword[False] ):
literal[string]
identifier[tags] = identifier[self] . identifier[get_tags] ( identifier[patch] = identifier[patch] )
identifier[old_commit] = identifier[tags] . identifier[get] ( identifier[tag_name] )
keyword[if] identifier[old_commit] keyword[is] keyword[not] keyword[None] :
keyword[if] keyword[not] identifier[force] :
keyword[return] keyword[False]
identifier[old_rev] = identifier[old_commit] [ literal[string] ]
keyword[if] identifier[self] . identifier[is_ancestor] ( identifier[old_rev] , literal[string] , identifier[patch] = identifier[patch] ):
identifier[altered] = identifier[self] . identifier[hg] ( literal[string] , literal[string] , identifier[old_rev] , literal[string] , literal[string] ,
literal[string] )
keyword[if] keyword[not] identifier[altered] keyword[or] identifier[altered] ==[ literal[string] ]:
identifier[force] = keyword[False]
keyword[if] keyword[not] identifier[force] :
keyword[return] keyword[False]
identifier[tag_args] =[ literal[string] , identifier[tag_name] ]
keyword[if] identifier[message] :
identifier[tag_args] +=[ literal[string] , identifier[message] ]
keyword[if] identifier[force] :
identifier[tag_args] +=[ literal[string] ]
identifier[self] . identifier[hg] ( identifier[patch] = identifier[patch] ,* identifier[tag_args] )
keyword[return] keyword[True] | def _create_tag_lowlevel(self, tag_name, message=None, force=True, patch=False):
"""Create a tag on the toplevel or patch repo
If the tag exists, and force is False, no tag is made. If force is True,
and a tag exists, but it is a direct ancestor of the current commit,
and there is no difference in filestate between the current commit
and the tagged commit, no tag is made. Otherwise, the old tag is
overwritten to point at the current commit.
Returns True or False indicating whether the tag was actually committed
"""
# check if tag already exists, and if it does, if it is a direct
# ancestor, and there is NO difference in the files between the tagged
# state and current state
#
# This check is mainly to avoid re-creating the same tag over and over
# on what is essentially the same commit, since tagging will
# technically create a new commit, and update the working copy to it.
#
# Without this check, say you were releasing to three different
# locations, one right after another; the first would create the tag,
# and a new tag commit. The second would then recreate the exact same
# tag, but now pointing at the commit that made the first tag.
# The third would create the tag a THIRD time, but now pointing at the
# commit that created the 2nd tag.
tags = self.get_tags(patch=patch)
old_commit = tags.get(tag_name)
if old_commit is not None:
if not force:
return False # depends on [control=['if'], data=[]]
old_rev = old_commit['rev']
# ok, now check to see if direct ancestor...
if self.is_ancestor(old_rev, '.', patch=patch):
# ...and if filestates are same
altered = self.hg('status', '--rev', old_rev, '--rev', '.', '--no-status')
if not altered or altered == ['.hgtags']:
force = False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if not force:
return False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['old_commit']]
tag_args = ['tag', tag_name]
if message:
tag_args += ['--message', message] # depends on [control=['if'], data=[]]
# we should be ok with ALWAYS having force flag on now, since we should
# have already checked if the commit exists.. but be paranoid, in case
# we've missed some edge case...
if force:
tag_args += ['--force'] # depends on [control=['if'], data=[]]
self.hg(*tag_args, patch=patch)
return True |
def md5(self):
"""
"Hash" of transforms
Returns
-----------
md5 : str
Approximate hash of transforms
"""
result = str(self._updated) + str(self.base_frame)
return result | def function[md5, parameter[self]]:
constant[
"Hash" of transforms
Returns
-----------
md5 : str
Approximate hash of transforms
]
variable[result] assign[=] binary_operation[call[name[str], parameter[name[self]._updated]] + call[name[str], parameter[name[self].base_frame]]]
return[name[result]] | keyword[def] identifier[md5] ( identifier[self] ):
literal[string]
identifier[result] = identifier[str] ( identifier[self] . identifier[_updated] )+ identifier[str] ( identifier[self] . identifier[base_frame] )
keyword[return] identifier[result] | def md5(self):
"""
"Hash" of transforms
Returns
-----------
md5 : str
Approximate hash of transforms
"""
result = str(self._updated) + str(self.base_frame)
return result |
def _from_objects(cls, objects):
"""
Private constructor: create graph from the given Python objects.
The constructor examines the referents of each given object to build up
a graph showing the objects and their links.
"""
vertices = ElementTransformSet(transform=id)
out_edges = KeyTransformDict(transform=id)
in_edges = KeyTransformDict(transform=id)
for obj in objects:
vertices.add(obj)
out_edges[obj] = []
in_edges[obj] = []
# Edges are identified by simple integers, so
# we can use plain dictionaries for mapping
# edges to their heads and tails.
edge_label = itertools.count()
edges = set()
head = {}
tail = {}
for referrer in vertices:
for referent in gc.get_referents(referrer):
if referent not in vertices:
continue
edge = next(edge_label)
edges.add(edge)
tail[edge] = referrer
head[edge] = referent
out_edges[referrer].append(edge)
in_edges[referent].append(edge)
return cls._raw(
vertices=vertices,
edges=edges,
out_edges=out_edges,
in_edges=in_edges,
head=head,
tail=tail,
) | def function[_from_objects, parameter[cls, objects]]:
constant[
Private constructor: create graph from the given Python objects.
The constructor examines the referents of each given object to build up
a graph showing the objects and their links.
]
variable[vertices] assign[=] call[name[ElementTransformSet], parameter[]]
variable[out_edges] assign[=] call[name[KeyTransformDict], parameter[]]
variable[in_edges] assign[=] call[name[KeyTransformDict], parameter[]]
for taget[name[obj]] in starred[name[objects]] begin[:]
call[name[vertices].add, parameter[name[obj]]]
call[name[out_edges]][name[obj]] assign[=] list[[]]
call[name[in_edges]][name[obj]] assign[=] list[[]]
variable[edge_label] assign[=] call[name[itertools].count, parameter[]]
variable[edges] assign[=] call[name[set], parameter[]]
variable[head] assign[=] dictionary[[], []]
variable[tail] assign[=] dictionary[[], []]
for taget[name[referrer]] in starred[name[vertices]] begin[:]
for taget[name[referent]] in starred[call[name[gc].get_referents, parameter[name[referrer]]]] begin[:]
if compare[name[referent] <ast.NotIn object at 0x7da2590d7190> name[vertices]] begin[:]
continue
variable[edge] assign[=] call[name[next], parameter[name[edge_label]]]
call[name[edges].add, parameter[name[edge]]]
call[name[tail]][name[edge]] assign[=] name[referrer]
call[name[head]][name[edge]] assign[=] name[referent]
call[call[name[out_edges]][name[referrer]].append, parameter[name[edge]]]
call[call[name[in_edges]][name[referent]].append, parameter[name[edge]]]
return[call[name[cls]._raw, parameter[]]] | keyword[def] identifier[_from_objects] ( identifier[cls] , identifier[objects] ):
literal[string]
identifier[vertices] = identifier[ElementTransformSet] ( identifier[transform] = identifier[id] )
identifier[out_edges] = identifier[KeyTransformDict] ( identifier[transform] = identifier[id] )
identifier[in_edges] = identifier[KeyTransformDict] ( identifier[transform] = identifier[id] )
keyword[for] identifier[obj] keyword[in] identifier[objects] :
identifier[vertices] . identifier[add] ( identifier[obj] )
identifier[out_edges] [ identifier[obj] ]=[]
identifier[in_edges] [ identifier[obj] ]=[]
identifier[edge_label] = identifier[itertools] . identifier[count] ()
identifier[edges] = identifier[set] ()
identifier[head] ={}
identifier[tail] ={}
keyword[for] identifier[referrer] keyword[in] identifier[vertices] :
keyword[for] identifier[referent] keyword[in] identifier[gc] . identifier[get_referents] ( identifier[referrer] ):
keyword[if] identifier[referent] keyword[not] keyword[in] identifier[vertices] :
keyword[continue]
identifier[edge] = identifier[next] ( identifier[edge_label] )
identifier[edges] . identifier[add] ( identifier[edge] )
identifier[tail] [ identifier[edge] ]= identifier[referrer]
identifier[head] [ identifier[edge] ]= identifier[referent]
identifier[out_edges] [ identifier[referrer] ]. identifier[append] ( identifier[edge] )
identifier[in_edges] [ identifier[referent] ]. identifier[append] ( identifier[edge] )
keyword[return] identifier[cls] . identifier[_raw] (
identifier[vertices] = identifier[vertices] ,
identifier[edges] = identifier[edges] ,
identifier[out_edges] = identifier[out_edges] ,
identifier[in_edges] = identifier[in_edges] ,
identifier[head] = identifier[head] ,
identifier[tail] = identifier[tail] ,
) | def _from_objects(cls, objects):
"""
Private constructor: create graph from the given Python objects.
The constructor examines the referents of each given object to build up
a graph showing the objects and their links.
"""
vertices = ElementTransformSet(transform=id)
out_edges = KeyTransformDict(transform=id)
in_edges = KeyTransformDict(transform=id)
for obj in objects:
vertices.add(obj)
out_edges[obj] = []
in_edges[obj] = [] # depends on [control=['for'], data=['obj']]
# Edges are identified by simple integers, so
# we can use plain dictionaries for mapping
# edges to their heads and tails.
edge_label = itertools.count()
edges = set()
head = {}
tail = {}
for referrer in vertices:
for referent in gc.get_referents(referrer):
if referent not in vertices:
continue # depends on [control=['if'], data=[]]
edge = next(edge_label)
edges.add(edge)
tail[edge] = referrer
head[edge] = referent
out_edges[referrer].append(edge)
in_edges[referent].append(edge) # depends on [control=['for'], data=['referent']] # depends on [control=['for'], data=['referrer']]
return cls._raw(vertices=vertices, edges=edges, out_edges=out_edges, in_edges=in_edges, head=head, tail=tail) |
def show_blob_service_properties(kwargs=None, storage_conn=None, call=None):
'''
.. versionadded:: 2015.8.0
Show a blob's service properties
CLI Example:
.. code-block:: bash
salt-cloud -f show_blob_service_properties my-azure
'''
if call != 'function':
raise SaltCloudSystemExit(
'The show_blob_service_properties function must be called with -f or --function.'
)
if not storage_conn:
storage_conn = get_storage_conn(conn_kwargs=kwargs)
data = storage_conn.get_blob_service_properties(
timeout=kwargs.get('timeout', None),
)
return data | def function[show_blob_service_properties, parameter[kwargs, storage_conn, call]]:
constant[
.. versionadded:: 2015.8.0
Show a blob's service properties
CLI Example:
.. code-block:: bash
salt-cloud -f show_blob_service_properties my-azure
]
if compare[name[call] not_equal[!=] constant[function]] begin[:]
<ast.Raise object at 0x7da2046234f0>
if <ast.UnaryOp object at 0x7da204622ce0> begin[:]
variable[storage_conn] assign[=] call[name[get_storage_conn], parameter[]]
variable[data] assign[=] call[name[storage_conn].get_blob_service_properties, parameter[]]
return[name[data]] | keyword[def] identifier[show_blob_service_properties] ( identifier[kwargs] = keyword[None] , identifier[storage_conn] = keyword[None] , identifier[call] = keyword[None] ):
literal[string]
keyword[if] identifier[call] != literal[string] :
keyword[raise] identifier[SaltCloudSystemExit] (
literal[string]
)
keyword[if] keyword[not] identifier[storage_conn] :
identifier[storage_conn] = identifier[get_storage_conn] ( identifier[conn_kwargs] = identifier[kwargs] )
identifier[data] = identifier[storage_conn] . identifier[get_blob_service_properties] (
identifier[timeout] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] ),
)
keyword[return] identifier[data] | def show_blob_service_properties(kwargs=None, storage_conn=None, call=None):
"""
.. versionadded:: 2015.8.0
Show a blob's service properties
CLI Example:
.. code-block:: bash
salt-cloud -f show_blob_service_properties my-azure
"""
if call != 'function':
raise SaltCloudSystemExit('The show_blob_service_properties function must be called with -f or --function.') # depends on [control=['if'], data=[]]
if not storage_conn:
storage_conn = get_storage_conn(conn_kwargs=kwargs) # depends on [control=['if'], data=[]]
data = storage_conn.get_blob_service_properties(timeout=kwargs.get('timeout', None))
return data |
def add_cmd_to_checkplot(
cpx,
cmdpkl,
require_cmd_magcolor=True,
save_cmd_pngs=False
):
'''This adds CMD figures to a checkplot dict or pickle.
Looks up the CMDs in `cmdpkl`, adds the object from `cpx` as a gold(-ish)
star in the plot, and then saves the figure to a base64 encoded PNG, which
can then be read and used by the `checkplotserver`.
Parameters
----------
cpx : str or dict
This is the input checkplot pickle or dict to add the CMD to.
cmdpkl : str or dict
The CMD pickle generated by the `colormagdiagram_cplist` or
`colormagdiagram_cpdir` functions above, or the dict produced by reading
this pickle in.
require_cmd_magcolor : bool
If this is True, a CMD plot will not be made if the color and mag keys
required by the CMD are not present or are nan in this checkplot's
objectinfo dict.
save_cmd_png : bool
If this is True, then will save the CMD plots that were generated and
added back to the checkplotdict as PNGs to the same directory as
`cpx`. If `cpx` is a dict, will save them to the current working
directory.
Returns
-------
str or dict
If `cpx` was a str filename of checkplot pickle, this will return that
filename to indicate that the CMD was added to the file. If `cpx` was a
checkplotdict, this will return the checkplotdict with a new key called
'colormagdiagram' containing the base64 encoded PNG binary streams of
all CMDs generated.
'''
# get the checkplot
if isinstance(cpx, str) and os.path.exists(cpx):
cpdict = _read_checkplot_picklefile(cpx)
elif isinstance(cpx, dict):
cpdict = cpx
else:
LOGERROR('unknown type of checkplot provided as the cpx arg')
return None
# get the CMD
if isinstance(cmdpkl, str) and os.path.exists(cmdpkl):
with open(cmdpkl, 'rb') as infd:
cmd = pickle.load(infd)
elif isinstance(cmdpkl, dict):
cmd = cmdpkl
cpdict['colormagdiagram'] = {}
# get the mags and colors from the CMD dict
cplist_mags = cmd['mags']
cplist_colors = cmd['colors']
# now make the CMD plots for each color-mag combination in the CMD
for c1, c2, ym, ind in zip(cmd['color_mag1'],
cmd['color_mag2'],
cmd['yaxis_mag'],
range(len(cmd['color_mag1']))):
# get these from the checkplot for this object
if (c1 in cpdict['objectinfo'] and
cpdict['objectinfo'][c1] is not None):
c1mag = cpdict['objectinfo'][c1]
else:
c1mag = np.nan
if (c2 in cpdict['objectinfo'] and
cpdict['objectinfo'][c2] is not None):
c2mag = cpdict['objectinfo'][c2]
else:
c2mag = np.nan
if (ym in cpdict['objectinfo'] and
cpdict['objectinfo'][ym] is not None):
ymmag = cpdict['objectinfo'][ym]
else:
ymmag = np.nan
if (require_cmd_magcolor and
not (np.isfinite(c1mag) and
np.isfinite(c2mag) and
np.isfinite(ymmag))):
LOGWARNING("required color: %s-%s or mag: %s are not "
"in this checkplot's objectinfo dict "
"(objectid: %s), skipping CMD..." %
(c1, c2, ym, cpdict['objectid']))
continue
# make the CMD for this color-mag combination
try:
thiscmd_title = r'%s-%s/%s' % (CMD_LABELS[c1],
CMD_LABELS[c2],
CMD_LABELS[ym])
# make the scatter plot
fig = plt.figure(figsize=(10,8))
plt.plot(cplist_colors[:,ind],
cplist_mags[:,ind],
rasterized=True,
marker='o',
linestyle='none',
mew=0,
ms=3)
# put this object on the plot
plt.plot([c1mag - c2mag], [ymmag],
ms=20,
color='#b0ff05',
marker='*',
mew=0)
plt.xlabel(r'$%s - %s$' % (CMD_LABELS[c1], CMD_LABELS[c2]))
plt.ylabel(r'$%s$' % CMD_LABELS[ym])
plt.title('%s - $%s$ CMD' % (cpdict['objectid'], thiscmd_title))
plt.gca().invert_yaxis()
# now save the figure to StrIO and put it back in the checkplot
cmdpng = StrIO()
plt.savefig(cmdpng, bbox_inches='tight',
pad_inches=0.0, format='png')
cmdpng.seek(0)
cmdb64 = base64.b64encode(cmdpng.read())
cmdpng.close()
plt.close('all')
plt.gcf().clear()
cpdict['colormagdiagram']['%s-%s/%s' % (c1,c2,ym)] = cmdb64
# if we're supposed to export to PNG, do so
if save_cmd_pngs:
if isinstance(cpx, str):
outpng = os.path.join(os.path.dirname(cpx),
'cmd-%s-%s-%s.%s.png' %
(cpdict['objectid'],
c1,c2,ym))
else:
outpng = 'cmd-%s-%s-%s.%s.png' % (cpdict['objectid'],
c1,c2,ym)
_base64_to_file(cmdb64, outpng)
except Exception as e:
LOGEXCEPTION('CMD for %s-%s/%s does not exist in %s, skipping...' %
(c1, c2, ym, cmdpkl))
continue
#
# end of making CMDs
#
if isinstance(cpx, str):
cpf = _write_checkplot_picklefile(cpdict, outfile=cpx, protocol=4)
return cpf
elif isinstance(cpx, dict):
return cpdict | def function[add_cmd_to_checkplot, parameter[cpx, cmdpkl, require_cmd_magcolor, save_cmd_pngs]]:
constant[This adds CMD figures to a checkplot dict or pickle.
Looks up the CMDs in `cmdpkl`, adds the object from `cpx` as a gold(-ish)
star in the plot, and then saves the figure to a base64 encoded PNG, which
can then be read and used by the `checkplotserver`.
Parameters
----------
cpx : str or dict
This is the input checkplot pickle or dict to add the CMD to.
cmdpkl : str or dict
The CMD pickle generated by the `colormagdiagram_cplist` or
`colormagdiagram_cpdir` functions above, or the dict produced by reading
this pickle in.
require_cmd_magcolor : bool
If this is True, a CMD plot will not be made if the color and mag keys
required by the CMD are not present or are nan in this checkplot's
objectinfo dict.
save_cmd_png : bool
If this is True, then will save the CMD plots that were generated and
added back to the checkplotdict as PNGs to the same directory as
`cpx`. If `cpx` is a dict, will save them to the current working
directory.
Returns
-------
str or dict
If `cpx` was a str filename of checkplot pickle, this will return that
filename to indicate that the CMD was added to the file. If `cpx` was a
checkplotdict, this will return the checkplotdict with a new key called
'colormagdiagram' containing the base64 encoded PNG binary streams of
all CMDs generated.
]
if <ast.BoolOp object at 0x7da18ede7070> begin[:]
variable[cpdict] assign[=] call[name[_read_checkplot_picklefile], parameter[name[cpx]]]
if <ast.BoolOp object at 0x7da18ede6d40> begin[:]
with call[name[open], parameter[name[cmdpkl], constant[rb]]] begin[:]
variable[cmd] assign[=] call[name[pickle].load, parameter[name[infd]]]
call[name[cpdict]][constant[colormagdiagram]] assign[=] dictionary[[], []]
variable[cplist_mags] assign[=] call[name[cmd]][constant[mags]]
variable[cplist_colors] assign[=] call[name[cmd]][constant[colors]]
for taget[tuple[[<ast.Name object at 0x7da18dc06e60>, <ast.Name object at 0x7da18dc07190>, <ast.Name object at 0x7da18dc04550>, <ast.Name object at 0x7da18dc05540>]]] in starred[call[name[zip], parameter[call[name[cmd]][constant[color_mag1]], call[name[cmd]][constant[color_mag2]], call[name[cmd]][constant[yaxis_mag]], call[name[range], parameter[call[name[len], parameter[call[name[cmd]][constant[color_mag1]]]]]]]]] begin[:]
if <ast.BoolOp object at 0x7da18dc06770> begin[:]
variable[c1mag] assign[=] call[call[name[cpdict]][constant[objectinfo]]][name[c1]]
if <ast.BoolOp object at 0x7da18dc07850> begin[:]
variable[c2mag] assign[=] call[call[name[cpdict]][constant[objectinfo]]][name[c2]]
if <ast.BoolOp object at 0x7da237eefa90> begin[:]
variable[ymmag] assign[=] call[call[name[cpdict]][constant[objectinfo]]][name[ym]]
if <ast.BoolOp object at 0x7da18c4cf5e0> begin[:]
call[name[LOGWARNING], parameter[binary_operation[constant[required color: %s-%s or mag: %s are not in this checkplot's objectinfo dict (objectid: %s), skipping CMD...] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18c4ce770>, <ast.Name object at 0x7da18c4cf010>, <ast.Name object at 0x7da18c4cdc30>, <ast.Subscript object at 0x7da18c4cc940>]]]]]
continue
<ast.Try object at 0x7da18c4ccfa0>
if call[name[isinstance], parameter[name[cpx], name[str]]] begin[:]
variable[cpf] assign[=] call[name[_write_checkplot_picklefile], parameter[name[cpdict]]]
return[name[cpf]] | keyword[def] identifier[add_cmd_to_checkplot] (
identifier[cpx] ,
identifier[cmdpkl] ,
identifier[require_cmd_magcolor] = keyword[True] ,
identifier[save_cmd_pngs] = keyword[False]
):
literal[string]
keyword[if] identifier[isinstance] ( identifier[cpx] , identifier[str] ) keyword[and] identifier[os] . identifier[path] . identifier[exists] ( identifier[cpx] ):
identifier[cpdict] = identifier[_read_checkplot_picklefile] ( identifier[cpx] )
keyword[elif] identifier[isinstance] ( identifier[cpx] , identifier[dict] ):
identifier[cpdict] = identifier[cpx]
keyword[else] :
identifier[LOGERROR] ( literal[string] )
keyword[return] keyword[None]
keyword[if] identifier[isinstance] ( identifier[cmdpkl] , identifier[str] ) keyword[and] identifier[os] . identifier[path] . identifier[exists] ( identifier[cmdpkl] ):
keyword[with] identifier[open] ( identifier[cmdpkl] , literal[string] ) keyword[as] identifier[infd] :
identifier[cmd] = identifier[pickle] . identifier[load] ( identifier[infd] )
keyword[elif] identifier[isinstance] ( identifier[cmdpkl] , identifier[dict] ):
identifier[cmd] = identifier[cmdpkl]
identifier[cpdict] [ literal[string] ]={}
identifier[cplist_mags] = identifier[cmd] [ literal[string] ]
identifier[cplist_colors] = identifier[cmd] [ literal[string] ]
keyword[for] identifier[c1] , identifier[c2] , identifier[ym] , identifier[ind] keyword[in] identifier[zip] ( identifier[cmd] [ literal[string] ],
identifier[cmd] [ literal[string] ],
identifier[cmd] [ literal[string] ],
identifier[range] ( identifier[len] ( identifier[cmd] [ literal[string] ]))):
keyword[if] ( identifier[c1] keyword[in] identifier[cpdict] [ literal[string] ] keyword[and]
identifier[cpdict] [ literal[string] ][ identifier[c1] ] keyword[is] keyword[not] keyword[None] ):
identifier[c1mag] = identifier[cpdict] [ literal[string] ][ identifier[c1] ]
keyword[else] :
identifier[c1mag] = identifier[np] . identifier[nan]
keyword[if] ( identifier[c2] keyword[in] identifier[cpdict] [ literal[string] ] keyword[and]
identifier[cpdict] [ literal[string] ][ identifier[c2] ] keyword[is] keyword[not] keyword[None] ):
identifier[c2mag] = identifier[cpdict] [ literal[string] ][ identifier[c2] ]
keyword[else] :
identifier[c2mag] = identifier[np] . identifier[nan]
keyword[if] ( identifier[ym] keyword[in] identifier[cpdict] [ literal[string] ] keyword[and]
identifier[cpdict] [ literal[string] ][ identifier[ym] ] keyword[is] keyword[not] keyword[None] ):
identifier[ymmag] = identifier[cpdict] [ literal[string] ][ identifier[ym] ]
keyword[else] :
identifier[ymmag] = identifier[np] . identifier[nan]
keyword[if] ( identifier[require_cmd_magcolor] keyword[and]
keyword[not] ( identifier[np] . identifier[isfinite] ( identifier[c1mag] ) keyword[and]
identifier[np] . identifier[isfinite] ( identifier[c2mag] ) keyword[and]
identifier[np] . identifier[isfinite] ( identifier[ymmag] ))):
identifier[LOGWARNING] ( literal[string]
literal[string]
literal[string] %
( identifier[c1] , identifier[c2] , identifier[ym] , identifier[cpdict] [ literal[string] ]))
keyword[continue]
keyword[try] :
identifier[thiscmd_title] = literal[string] %( identifier[CMD_LABELS] [ identifier[c1] ],
identifier[CMD_LABELS] [ identifier[c2] ],
identifier[CMD_LABELS] [ identifier[ym] ])
identifier[fig] = identifier[plt] . identifier[figure] ( identifier[figsize] =( literal[int] , literal[int] ))
identifier[plt] . identifier[plot] ( identifier[cplist_colors] [:, identifier[ind] ],
identifier[cplist_mags] [:, identifier[ind] ],
identifier[rasterized] = keyword[True] ,
identifier[marker] = literal[string] ,
identifier[linestyle] = literal[string] ,
identifier[mew] = literal[int] ,
identifier[ms] = literal[int] )
identifier[plt] . identifier[plot] ([ identifier[c1mag] - identifier[c2mag] ],[ identifier[ymmag] ],
identifier[ms] = literal[int] ,
identifier[color] = literal[string] ,
identifier[marker] = literal[string] ,
identifier[mew] = literal[int] )
identifier[plt] . identifier[xlabel] ( literal[string] %( identifier[CMD_LABELS] [ identifier[c1] ], identifier[CMD_LABELS] [ identifier[c2] ]))
identifier[plt] . identifier[ylabel] ( literal[string] % identifier[CMD_LABELS] [ identifier[ym] ])
identifier[plt] . identifier[title] ( literal[string] %( identifier[cpdict] [ literal[string] ], identifier[thiscmd_title] ))
identifier[plt] . identifier[gca] (). identifier[invert_yaxis] ()
identifier[cmdpng] = identifier[StrIO] ()
identifier[plt] . identifier[savefig] ( identifier[cmdpng] , identifier[bbox_inches] = literal[string] ,
identifier[pad_inches] = literal[int] , identifier[format] = literal[string] )
identifier[cmdpng] . identifier[seek] ( literal[int] )
identifier[cmdb64] = identifier[base64] . identifier[b64encode] ( identifier[cmdpng] . identifier[read] ())
identifier[cmdpng] . identifier[close] ()
identifier[plt] . identifier[close] ( literal[string] )
identifier[plt] . identifier[gcf] (). identifier[clear] ()
identifier[cpdict] [ literal[string] ][ literal[string] %( identifier[c1] , identifier[c2] , identifier[ym] )]= identifier[cmdb64]
keyword[if] identifier[save_cmd_pngs] :
keyword[if] identifier[isinstance] ( identifier[cpx] , identifier[str] ):
identifier[outpng] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[cpx] ),
literal[string] %
( identifier[cpdict] [ literal[string] ],
identifier[c1] , identifier[c2] , identifier[ym] ))
keyword[else] :
identifier[outpng] = literal[string] %( identifier[cpdict] [ literal[string] ],
identifier[c1] , identifier[c2] , identifier[ym] )
identifier[_base64_to_file] ( identifier[cmdb64] , identifier[outpng] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[LOGEXCEPTION] ( literal[string] %
( identifier[c1] , identifier[c2] , identifier[ym] , identifier[cmdpkl] ))
keyword[continue]
keyword[if] identifier[isinstance] ( identifier[cpx] , identifier[str] ):
identifier[cpf] = identifier[_write_checkplot_picklefile] ( identifier[cpdict] , identifier[outfile] = identifier[cpx] , identifier[protocol] = literal[int] )
keyword[return] identifier[cpf]
keyword[elif] identifier[isinstance] ( identifier[cpx] , identifier[dict] ):
keyword[return] identifier[cpdict] | def add_cmd_to_checkplot(cpx, cmdpkl, require_cmd_magcolor=True, save_cmd_pngs=False):
"""This adds CMD figures to a checkplot dict or pickle.
Looks up the CMDs in `cmdpkl`, adds the object from `cpx` as a gold(-ish)
star in the plot, and then saves the figure to a base64 encoded PNG, which
can then be read and used by the `checkplotserver`.
Parameters
----------
cpx : str or dict
This is the input checkplot pickle or dict to add the CMD to.
cmdpkl : str or dict
The CMD pickle generated by the `colormagdiagram_cplist` or
`colormagdiagram_cpdir` functions above, or the dict produced by reading
this pickle in.
require_cmd_magcolor : bool
If this is True, a CMD plot will not be made if the color and mag keys
required by the CMD are not present or are nan in this checkplot's
objectinfo dict.
save_cmd_png : bool
If this is True, then will save the CMD plots that were generated and
added back to the checkplotdict as PNGs to the same directory as
`cpx`. If `cpx` is a dict, will save them to the current working
directory.
Returns
-------
str or dict
If `cpx` was a str filename of checkplot pickle, this will return that
filename to indicate that the CMD was added to the file. If `cpx` was a
checkplotdict, this will return the checkplotdict with a new key called
'colormagdiagram' containing the base64 encoded PNG binary streams of
all CMDs generated.
"""
# get the checkplot
if isinstance(cpx, str) and os.path.exists(cpx):
cpdict = _read_checkplot_picklefile(cpx) # depends on [control=['if'], data=[]]
elif isinstance(cpx, dict):
cpdict = cpx # depends on [control=['if'], data=[]]
else:
LOGERROR('unknown type of checkplot provided as the cpx arg')
return None
# get the CMD
if isinstance(cmdpkl, str) and os.path.exists(cmdpkl):
with open(cmdpkl, 'rb') as infd:
cmd = pickle.load(infd) # depends on [control=['with'], data=['infd']] # depends on [control=['if'], data=[]]
elif isinstance(cmdpkl, dict):
cmd = cmdpkl # depends on [control=['if'], data=[]]
cpdict['colormagdiagram'] = {}
# get the mags and colors from the CMD dict
cplist_mags = cmd['mags']
cplist_colors = cmd['colors']
# now make the CMD plots for each color-mag combination in the CMD
for (c1, c2, ym, ind) in zip(cmd['color_mag1'], cmd['color_mag2'], cmd['yaxis_mag'], range(len(cmd['color_mag1']))):
# get these from the checkplot for this object
if c1 in cpdict['objectinfo'] and cpdict['objectinfo'][c1] is not None:
c1mag = cpdict['objectinfo'][c1] # depends on [control=['if'], data=[]]
else:
c1mag = np.nan
if c2 in cpdict['objectinfo'] and cpdict['objectinfo'][c2] is not None:
c2mag = cpdict['objectinfo'][c2] # depends on [control=['if'], data=[]]
else:
c2mag = np.nan
if ym in cpdict['objectinfo'] and cpdict['objectinfo'][ym] is not None:
ymmag = cpdict['objectinfo'][ym] # depends on [control=['if'], data=[]]
else:
ymmag = np.nan
if require_cmd_magcolor and (not (np.isfinite(c1mag) and np.isfinite(c2mag) and np.isfinite(ymmag))):
LOGWARNING("required color: %s-%s or mag: %s are not in this checkplot's objectinfo dict (objectid: %s), skipping CMD..." % (c1, c2, ym, cpdict['objectid']))
continue # depends on [control=['if'], data=[]]
# make the CMD for this color-mag combination
try:
thiscmd_title = '%s-%s/%s' % (CMD_LABELS[c1], CMD_LABELS[c2], CMD_LABELS[ym])
# make the scatter plot
fig = plt.figure(figsize=(10, 8))
plt.plot(cplist_colors[:, ind], cplist_mags[:, ind], rasterized=True, marker='o', linestyle='none', mew=0, ms=3)
# put this object on the plot
plt.plot([c1mag - c2mag], [ymmag], ms=20, color='#b0ff05', marker='*', mew=0)
plt.xlabel('$%s - %s$' % (CMD_LABELS[c1], CMD_LABELS[c2]))
plt.ylabel('$%s$' % CMD_LABELS[ym])
plt.title('%s - $%s$ CMD' % (cpdict['objectid'], thiscmd_title))
plt.gca().invert_yaxis()
# now save the figure to StrIO and put it back in the checkplot
cmdpng = StrIO()
plt.savefig(cmdpng, bbox_inches='tight', pad_inches=0.0, format='png')
cmdpng.seek(0)
cmdb64 = base64.b64encode(cmdpng.read())
cmdpng.close()
plt.close('all')
plt.gcf().clear()
cpdict['colormagdiagram']['%s-%s/%s' % (c1, c2, ym)] = cmdb64
# if we're supposed to export to PNG, do so
if save_cmd_pngs:
if isinstance(cpx, str):
outpng = os.path.join(os.path.dirname(cpx), 'cmd-%s-%s-%s.%s.png' % (cpdict['objectid'], c1, c2, ym)) # depends on [control=['if'], data=[]]
else:
outpng = 'cmd-%s-%s-%s.%s.png' % (cpdict['objectid'], c1, c2, ym)
_base64_to_file(cmdb64, outpng) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except Exception as e:
LOGEXCEPTION('CMD for %s-%s/%s does not exist in %s, skipping...' % (c1, c2, ym, cmdpkl))
continue # depends on [control=['except'], data=[]] # depends on [control=['for'], data=[]]
#
# end of making CMDs
#
if isinstance(cpx, str):
cpf = _write_checkplot_picklefile(cpdict, outfile=cpx, protocol=4)
return cpf # depends on [control=['if'], data=[]]
elif isinstance(cpx, dict):
return cpdict # depends on [control=['if'], data=[]] |
def destroy_window(window):
'''
Destroys the specified window and its context.
Wrapper for:
void glfwDestroyWindow(GLFWwindow* window);
'''
_glfw.glfwDestroyWindow(window)
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_ulong)).contents.value
for callback_repository in _callback_repositories:
if window_addr in callback_repository:
del callback_repository[window_addr] | def function[destroy_window, parameter[window]]:
constant[
Destroys the specified window and its context.
Wrapper for:
void glfwDestroyWindow(GLFWwindow* window);
]
call[name[_glfw].glfwDestroyWindow, parameter[name[window]]]
variable[window_addr] assign[=] call[name[ctypes].cast, parameter[call[name[ctypes].pointer, parameter[name[window]]], call[name[ctypes].POINTER, parameter[name[ctypes].c_ulong]]]].contents.value
for taget[name[callback_repository]] in starred[name[_callback_repositories]] begin[:]
if compare[name[window_addr] in name[callback_repository]] begin[:]
<ast.Delete object at 0x7da20c795e70> | keyword[def] identifier[destroy_window] ( identifier[window] ):
literal[string]
identifier[_glfw] . identifier[glfwDestroyWindow] ( identifier[window] )
identifier[window_addr] = identifier[ctypes] . identifier[cast] ( identifier[ctypes] . identifier[pointer] ( identifier[window] ),
identifier[ctypes] . identifier[POINTER] ( identifier[ctypes] . identifier[c_ulong] )). identifier[contents] . identifier[value]
keyword[for] identifier[callback_repository] keyword[in] identifier[_callback_repositories] :
keyword[if] identifier[window_addr] keyword[in] identifier[callback_repository] :
keyword[del] identifier[callback_repository] [ identifier[window_addr] ] | def destroy_window(window):
"""
Destroys the specified window and its context.
Wrapper for:
void glfwDestroyWindow(GLFWwindow* window);
"""
_glfw.glfwDestroyWindow(window)
window_addr = ctypes.cast(ctypes.pointer(window), ctypes.POINTER(ctypes.c_ulong)).contents.value
for callback_repository in _callback_repositories:
if window_addr in callback_repository:
del callback_repository[window_addr] # depends on [control=['if'], data=['window_addr', 'callback_repository']] # depends on [control=['for'], data=['callback_repository']] |
def storeByteArray(self, context, page, len, data, returnError):
"""please override"""
returnError.contents.value = self.IllegalStateError
raise NotImplementedError("You must override this method.") | def function[storeByteArray, parameter[self, context, page, len, data, returnError]]:
constant[please override]
name[returnError].contents.value assign[=] name[self].IllegalStateError
<ast.Raise object at 0x7da1b05c88e0> | keyword[def] identifier[storeByteArray] ( identifier[self] , identifier[context] , identifier[page] , identifier[len] , identifier[data] , identifier[returnError] ):
literal[string]
identifier[returnError] . identifier[contents] . identifier[value] = identifier[self] . identifier[IllegalStateError]
keyword[raise] identifier[NotImplementedError] ( literal[string] ) | def storeByteArray(self, context, page, len, data, returnError):
"""please override"""
returnError.contents.value = self.IllegalStateError
raise NotImplementedError('You must override this method.') |
def reset_highlights(self):
"""
Remove red outlines from all buttons
"""
for dtype in ["specimens", "samples", "sites", "locations", "ages"]:
wind = self.FindWindowByName(dtype + '_btn')
wind.Unbind(wx.EVT_PAINT, handler=self.highlight_button)
self.Refresh()
#self.message.SetLabel('Highlighted grids have incorrect or incomplete data')
self.bSizer_msg.ShowItems(False)
self.hbox.Fit(self) | def function[reset_highlights, parameter[self]]:
constant[
Remove red outlines from all buttons
]
for taget[name[dtype]] in starred[list[[<ast.Constant object at 0x7da20e956950>, <ast.Constant object at 0x7da20e9578b0>, <ast.Constant object at 0x7da20e955ae0>, <ast.Constant object at 0x7da20e954b80>, <ast.Constant object at 0x7da20e9567d0>]]] begin[:]
variable[wind] assign[=] call[name[self].FindWindowByName, parameter[binary_operation[name[dtype] + constant[_btn]]]]
call[name[wind].Unbind, parameter[name[wx].EVT_PAINT]]
call[name[self].Refresh, parameter[]]
call[name[self].bSizer_msg.ShowItems, parameter[constant[False]]]
call[name[self].hbox.Fit, parameter[name[self]]] | keyword[def] identifier[reset_highlights] ( identifier[self] ):
literal[string]
keyword[for] identifier[dtype] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]:
identifier[wind] = identifier[self] . identifier[FindWindowByName] ( identifier[dtype] + literal[string] )
identifier[wind] . identifier[Unbind] ( identifier[wx] . identifier[EVT_PAINT] , identifier[handler] = identifier[self] . identifier[highlight_button] )
identifier[self] . identifier[Refresh] ()
identifier[self] . identifier[bSizer_msg] . identifier[ShowItems] ( keyword[False] )
identifier[self] . identifier[hbox] . identifier[Fit] ( identifier[self] ) | def reset_highlights(self):
"""
Remove red outlines from all buttons
"""
for dtype in ['specimens', 'samples', 'sites', 'locations', 'ages']:
wind = self.FindWindowByName(dtype + '_btn')
wind.Unbind(wx.EVT_PAINT, handler=self.highlight_button) # depends on [control=['for'], data=['dtype']]
self.Refresh()
#self.message.SetLabel('Highlighted grids have incorrect or incomplete data')
self.bSizer_msg.ShowItems(False)
self.hbox.Fit(self) |
def DbPutClassAttributeProperty2(self, argin):
""" This command adds support for array properties compared to the previous one
called DbPutClassAttributeProperty. The old comman is still there for compatibility reason
:param argin: Str[0] = Tango class name
Str[1] = Attribute number
Str[2] = Attribute name
Str[3] = Property number
Str[4] = Property name
Str[5] = Property value number (array case)
Str[5] = Property value 1
Str[n] = Property value n (array case)
.....
:type: tango.DevVarStringArray
:return:
:rtype: tango.DevVoid """
self._log.debug("In DbPutClassAttributeProperty2()")
class_name = argin[0]
nb_attributes = int(argin[1])
self.db.put_class_attribute_property2(class_name, nb_attributes, argin[2:]) | def function[DbPutClassAttributeProperty2, parameter[self, argin]]:
constant[ This command adds support for array properties compared to the previous one
called DbPutClassAttributeProperty. The old comman is still there for compatibility reason
:param argin: Str[0] = Tango class name
Str[1] = Attribute number
Str[2] = Attribute name
Str[3] = Property number
Str[4] = Property name
Str[5] = Property value number (array case)
Str[5] = Property value 1
Str[n] = Property value n (array case)
.....
:type: tango.DevVarStringArray
:return:
:rtype: tango.DevVoid ]
call[name[self]._log.debug, parameter[constant[In DbPutClassAttributeProperty2()]]]
variable[class_name] assign[=] call[name[argin]][constant[0]]
variable[nb_attributes] assign[=] call[name[int], parameter[call[name[argin]][constant[1]]]]
call[name[self].db.put_class_attribute_property2, parameter[name[class_name], name[nb_attributes], call[name[argin]][<ast.Slice object at 0x7da20c6a84c0>]]] | keyword[def] identifier[DbPutClassAttributeProperty2] ( identifier[self] , identifier[argin] ):
literal[string]
identifier[self] . identifier[_log] . identifier[debug] ( literal[string] )
identifier[class_name] = identifier[argin] [ literal[int] ]
identifier[nb_attributes] = identifier[int] ( identifier[argin] [ literal[int] ])
identifier[self] . identifier[db] . identifier[put_class_attribute_property2] ( identifier[class_name] , identifier[nb_attributes] , identifier[argin] [ literal[int] :]) | def DbPutClassAttributeProperty2(self, argin):
""" This command adds support for array properties compared to the previous one
called DbPutClassAttributeProperty. The old comman is still there for compatibility reason
:param argin: Str[0] = Tango class name
Str[1] = Attribute number
Str[2] = Attribute name
Str[3] = Property number
Str[4] = Property name
Str[5] = Property value number (array case)
Str[5] = Property value 1
Str[n] = Property value n (array case)
.....
:type: tango.DevVarStringArray
:return:
:rtype: tango.DevVoid """
self._log.debug('In DbPutClassAttributeProperty2()')
class_name = argin[0]
nb_attributes = int(argin[1])
self.db.put_class_attribute_property2(class_name, nb_attributes, argin[2:]) |
def walknset_vars(self, task_class=None, *args, **kwargs):
"""
Set the values of the ABINIT variables in the input files of the nodes
Args:
task_class: If not None, only the input files of the tasks belonging
to class `task_class` are modified.
Example:
flow.walknset_vars(ecut=10, kptopt=4)
"""
def change_task(task):
if task_class is not None and task.__class__ is not task_class: return False
return True
if self.is_work:
for task in self:
if not change_task(task): continue
task.set_vars(*args, **kwargs)
elif self.is_flow:
for task in self.iflat_tasks():
if not change_task(task): continue
task.set_vars(*args, **kwargs)
else:
raise TypeError("Don't know how to set variables for object class %s" % self.__class__.__name__) | def function[walknset_vars, parameter[self, task_class]]:
constant[
Set the values of the ABINIT variables in the input files of the nodes
Args:
task_class: If not None, only the input files of the tasks belonging
to class `task_class` are modified.
Example:
flow.walknset_vars(ecut=10, kptopt=4)
]
def function[change_task, parameter[task]]:
if <ast.BoolOp object at 0x7da1b2187a30> begin[:]
return[constant[False]]
return[constant[True]]
if name[self].is_work begin[:]
for taget[name[task]] in starred[name[self]] begin[:]
if <ast.UnaryOp object at 0x7da1b2186470> begin[:]
continue
call[name[task].set_vars, parameter[<ast.Starred object at 0x7da1b2187cd0>]] | keyword[def] identifier[walknset_vars] ( identifier[self] , identifier[task_class] = keyword[None] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[def] identifier[change_task] ( identifier[task] ):
keyword[if] identifier[task_class] keyword[is] keyword[not] keyword[None] keyword[and] identifier[task] . identifier[__class__] keyword[is] keyword[not] identifier[task_class] : keyword[return] keyword[False]
keyword[return] keyword[True]
keyword[if] identifier[self] . identifier[is_work] :
keyword[for] identifier[task] keyword[in] identifier[self] :
keyword[if] keyword[not] identifier[change_task] ( identifier[task] ): keyword[continue]
identifier[task] . identifier[set_vars] (* identifier[args] ,** identifier[kwargs] )
keyword[elif] identifier[self] . identifier[is_flow] :
keyword[for] identifier[task] keyword[in] identifier[self] . identifier[iflat_tasks] ():
keyword[if] keyword[not] identifier[change_task] ( identifier[task] ): keyword[continue]
identifier[task] . identifier[set_vars] (* identifier[args] ,** identifier[kwargs] )
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string] % identifier[self] . identifier[__class__] . identifier[__name__] ) | def walknset_vars(self, task_class=None, *args, **kwargs):
"""
Set the values of the ABINIT variables in the input files of the nodes
Args:
task_class: If not None, only the input files of the tasks belonging
to class `task_class` are modified.
Example:
flow.walknset_vars(ecut=10, kptopt=4)
"""
def change_task(task):
if task_class is not None and task.__class__ is not task_class:
return False # depends on [control=['if'], data=[]]
return True
if self.is_work:
for task in self:
if not change_task(task):
continue # depends on [control=['if'], data=[]]
task.set_vars(*args, **kwargs) # depends on [control=['for'], data=['task']] # depends on [control=['if'], data=[]]
elif self.is_flow:
for task in self.iflat_tasks():
if not change_task(task):
continue # depends on [control=['if'], data=[]]
task.set_vars(*args, **kwargs) # depends on [control=['for'], data=['task']] # depends on [control=['if'], data=[]]
else:
raise TypeError("Don't know how to set variables for object class %s" % self.__class__.__name__) |
def make_filter(self, fieldname, query_func, expct_value):
''' makes a filter that will be appliead to an object's property based
on query_func '''
def actual_filter(item):
value = getattr(item, fieldname)
if query_func in NULL_AFFECTED_FILTERS and value is None:
return False
if query_func == 'eq':
return value == expct_value
elif query_func == 'ne':
return value != expct_value
elif query_func == 'lt':
return value < expct_value
elif query_func == 'lte':
return value <= expct_value
elif query_func == 'gt':
return value > expct_value
elif query_func == 'gte':
return value >= expct_value
elif query_func == 'startswith':
return value.startswith(expct_value)
elif query_func == 'endswith':
return value.endswith(expct_value)
actual_filter.__doc__ = '{} {} {}'.format('val', query_func, expct_value)
return actual_filter | def function[make_filter, parameter[self, fieldname, query_func, expct_value]]:
constant[ makes a filter that will be appliead to an object's property based
on query_func ]
def function[actual_filter, parameter[item]]:
variable[value] assign[=] call[name[getattr], parameter[name[item], name[fieldname]]]
if <ast.BoolOp object at 0x7da2047eabf0> begin[:]
return[constant[False]]
if compare[name[query_func] equal[==] constant[eq]] begin[:]
return[compare[name[value] equal[==] name[expct_value]]]
name[actual_filter].__doc__ assign[=] call[constant[{} {} {}].format, parameter[constant[val], name[query_func], name[expct_value]]]
return[name[actual_filter]] | keyword[def] identifier[make_filter] ( identifier[self] , identifier[fieldname] , identifier[query_func] , identifier[expct_value] ):
literal[string]
keyword[def] identifier[actual_filter] ( identifier[item] ):
identifier[value] = identifier[getattr] ( identifier[item] , identifier[fieldname] )
keyword[if] identifier[query_func] keyword[in] identifier[NULL_AFFECTED_FILTERS] keyword[and] identifier[value] keyword[is] keyword[None] :
keyword[return] keyword[False]
keyword[if] identifier[query_func] == literal[string] :
keyword[return] identifier[value] == identifier[expct_value]
keyword[elif] identifier[query_func] == literal[string] :
keyword[return] identifier[value] != identifier[expct_value]
keyword[elif] identifier[query_func] == literal[string] :
keyword[return] identifier[value] < identifier[expct_value]
keyword[elif] identifier[query_func] == literal[string] :
keyword[return] identifier[value] <= identifier[expct_value]
keyword[elif] identifier[query_func] == literal[string] :
keyword[return] identifier[value] > identifier[expct_value]
keyword[elif] identifier[query_func] == literal[string] :
keyword[return] identifier[value] >= identifier[expct_value]
keyword[elif] identifier[query_func] == literal[string] :
keyword[return] identifier[value] . identifier[startswith] ( identifier[expct_value] )
keyword[elif] identifier[query_func] == literal[string] :
keyword[return] identifier[value] . identifier[endswith] ( identifier[expct_value] )
identifier[actual_filter] . identifier[__doc__] = literal[string] . identifier[format] ( literal[string] , identifier[query_func] , identifier[expct_value] )
keyword[return] identifier[actual_filter] | def make_filter(self, fieldname, query_func, expct_value):
""" makes a filter that will be appliead to an object's property based
on query_func """
def actual_filter(item):
value = getattr(item, fieldname)
if query_func in NULL_AFFECTED_FILTERS and value is None:
return False # depends on [control=['if'], data=[]]
if query_func == 'eq':
return value == expct_value # depends on [control=['if'], data=[]]
elif query_func == 'ne':
return value != expct_value # depends on [control=['if'], data=[]]
elif query_func == 'lt':
return value < expct_value # depends on [control=['if'], data=[]]
elif query_func == 'lte':
return value <= expct_value # depends on [control=['if'], data=[]]
elif query_func == 'gt':
return value > expct_value # depends on [control=['if'], data=[]]
elif query_func == 'gte':
return value >= expct_value # depends on [control=['if'], data=[]]
elif query_func == 'startswith':
return value.startswith(expct_value) # depends on [control=['if'], data=[]]
elif query_func == 'endswith':
return value.endswith(expct_value) # depends on [control=['if'], data=[]]
actual_filter.__doc__ = '{} {} {}'.format('val', query_func, expct_value)
return actual_filter |
def has_parent_vaults(self, vault_id):
"""Tests if the ``Vault`` has any parents.
arg: vault_id (osid.id.Id): a vault ``Id``
return: (boolean) - ``true`` if the vault has parents, ``false``
otherwise
raise: NotFound - ``vault_id`` is not found
raise: NullArgument - ``vault_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinHierarchySession.has_parent_bins
if self._catalog_session is not None:
return self._catalog_session.has_parent_catalogs(catalog_id=vault_id)
return self._hierarchy_session.has_parents(id_=vault_id) | def function[has_parent_vaults, parameter[self, vault_id]]:
constant[Tests if the ``Vault`` has any parents.
arg: vault_id (osid.id.Id): a vault ``Id``
return: (boolean) - ``true`` if the vault has parents, ``false``
otherwise
raise: NotFound - ``vault_id`` is not found
raise: NullArgument - ``vault_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
]
if compare[name[self]._catalog_session is_not constant[None]] begin[:]
return[call[name[self]._catalog_session.has_parent_catalogs, parameter[]]]
return[call[name[self]._hierarchy_session.has_parents, parameter[]]] | keyword[def] identifier[has_parent_vaults] ( identifier[self] , identifier[vault_id] ):
literal[string]
keyword[if] identifier[self] . identifier[_catalog_session] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[self] . identifier[_catalog_session] . identifier[has_parent_catalogs] ( identifier[catalog_id] = identifier[vault_id] )
keyword[return] identifier[self] . identifier[_hierarchy_session] . identifier[has_parents] ( identifier[id_] = identifier[vault_id] ) | def has_parent_vaults(self, vault_id):
"""Tests if the ``Vault`` has any parents.
arg: vault_id (osid.id.Id): a vault ``Id``
return: (boolean) - ``true`` if the vault has parents, ``false``
otherwise
raise: NotFound - ``vault_id`` is not found
raise: NullArgument - ``vault_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinHierarchySession.has_parent_bins
if self._catalog_session is not None:
return self._catalog_session.has_parent_catalogs(catalog_id=vault_id) # depends on [control=['if'], data=[]]
return self._hierarchy_session.has_parents(id_=vault_id) |
def date_in_range(date1, date2, range):
"""Check if two date objects are within a specific range"""
date_obj1 = convert_date(date1)
date_obj2 = convert_date(date2)
return (date_obj2 - date_obj1).days <= range | def function[date_in_range, parameter[date1, date2, range]]:
constant[Check if two date objects are within a specific range]
variable[date_obj1] assign[=] call[name[convert_date], parameter[name[date1]]]
variable[date_obj2] assign[=] call[name[convert_date], parameter[name[date2]]]
return[compare[binary_operation[name[date_obj2] - name[date_obj1]].days less_or_equal[<=] name[range]]] | keyword[def] identifier[date_in_range] ( identifier[date1] , identifier[date2] , identifier[range] ):
literal[string]
identifier[date_obj1] = identifier[convert_date] ( identifier[date1] )
identifier[date_obj2] = identifier[convert_date] ( identifier[date2] )
keyword[return] ( identifier[date_obj2] - identifier[date_obj1] ). identifier[days] <= identifier[range] | def date_in_range(date1, date2, range):
"""Check if two date objects are within a specific range"""
date_obj1 = convert_date(date1)
date_obj2 = convert_date(date2)
return (date_obj2 - date_obj1).days <= range |
def rollback(self, project_id, transaction):
"""Perform a ``rollback`` request.
:type project_id: str
:param project_id: The project to connect to. This is
usually your project name in the cloud console.
:type transaction: bytes
:param transaction: The transaction ID to rollback.
:rtype: :class:`.datastore_pb2.RollbackResponse`
:returns: The returned protobuf response object.
"""
request_pb = _datastore_pb2.RollbackRequest(
project_id=project_id, transaction=transaction
)
# Response is empty (i.e. no fields) but we return it anyway.
return _rpc(
self.client._http,
project_id,
"rollback",
self.client._base_url,
request_pb,
_datastore_pb2.RollbackResponse,
) | def function[rollback, parameter[self, project_id, transaction]]:
constant[Perform a ``rollback`` request.
:type project_id: str
:param project_id: The project to connect to. This is
usually your project name in the cloud console.
:type transaction: bytes
:param transaction: The transaction ID to rollback.
:rtype: :class:`.datastore_pb2.RollbackResponse`
:returns: The returned protobuf response object.
]
variable[request_pb] assign[=] call[name[_datastore_pb2].RollbackRequest, parameter[]]
return[call[name[_rpc], parameter[name[self].client._http, name[project_id], constant[rollback], name[self].client._base_url, name[request_pb], name[_datastore_pb2].RollbackResponse]]] | keyword[def] identifier[rollback] ( identifier[self] , identifier[project_id] , identifier[transaction] ):
literal[string]
identifier[request_pb] = identifier[_datastore_pb2] . identifier[RollbackRequest] (
identifier[project_id] = identifier[project_id] , identifier[transaction] = identifier[transaction]
)
keyword[return] identifier[_rpc] (
identifier[self] . identifier[client] . identifier[_http] ,
identifier[project_id] ,
literal[string] ,
identifier[self] . identifier[client] . identifier[_base_url] ,
identifier[request_pb] ,
identifier[_datastore_pb2] . identifier[RollbackResponse] ,
) | def rollback(self, project_id, transaction):
"""Perform a ``rollback`` request.
:type project_id: str
:param project_id: The project to connect to. This is
usually your project name in the cloud console.
:type transaction: bytes
:param transaction: The transaction ID to rollback.
:rtype: :class:`.datastore_pb2.RollbackResponse`
:returns: The returned protobuf response object.
"""
request_pb = _datastore_pb2.RollbackRequest(project_id=project_id, transaction=transaction)
# Response is empty (i.e. no fields) but we return it anyway.
return _rpc(self.client._http, project_id, 'rollback', self.client._base_url, request_pb, _datastore_pb2.RollbackResponse) |
def add_done_callback(self, fn):
"""Attaches a callable that will be called when the future finishes.
Args:
fn: A callable that will be called with this future as its only
argument when the future completes or is cancelled. The callable
will always be called by a thread in the same process in which
it was added. If the future has already completed or been
cancelled then the callable will be called immediately. These
callables are called in the order that they were added.
"""
with self._condition:
if self._state not in [CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED]:
self._done_callbacks.append(fn)
return
fn(self) | def function[add_done_callback, parameter[self, fn]]:
constant[Attaches a callable that will be called when the future finishes.
Args:
fn: A callable that will be called with this future as its only
argument when the future completes or is cancelled. The callable
will always be called by a thread in the same process in which
it was added. If the future has already completed or been
cancelled then the callable will be called immediately. These
callables are called in the order that they were added.
]
with name[self]._condition begin[:]
if compare[name[self]._state <ast.NotIn object at 0x7da2590d7190> list[[<ast.Name object at 0x7da20e9b12d0>, <ast.Name object at 0x7da20e9b3a30>, <ast.Name object at 0x7da20e9b2260>]]] begin[:]
call[name[self]._done_callbacks.append, parameter[name[fn]]]
return[None]
call[name[fn], parameter[name[self]]] | keyword[def] identifier[add_done_callback] ( identifier[self] , identifier[fn] ):
literal[string]
keyword[with] identifier[self] . identifier[_condition] :
keyword[if] identifier[self] . identifier[_state] keyword[not] keyword[in] [ identifier[CANCELLED] , identifier[CANCELLED_AND_NOTIFIED] , identifier[FINISHED] ]:
identifier[self] . identifier[_done_callbacks] . identifier[append] ( identifier[fn] )
keyword[return]
identifier[fn] ( identifier[self] ) | def add_done_callback(self, fn):
"""Attaches a callable that will be called when the future finishes.
Args:
fn: A callable that will be called with this future as its only
argument when the future completes or is cancelled. The callable
will always be called by a thread in the same process in which
it was added. If the future has already completed or been
cancelled then the callable will be called immediately. These
callables are called in the order that they were added.
"""
with self._condition:
if self._state not in [CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED]:
self._done_callbacks.append(fn)
return # depends on [control=['if'], data=[]] # depends on [control=['with'], data=[]]
fn(self) |
def _serve_file(self, path):
"""Call Paste's FileApp (a WSGI application) to serve the file
at the specified path
"""
fapp = paste.fileapp.FileApp(path)
return fapp(request.environ, self.start_response) | def function[_serve_file, parameter[self, path]]:
constant[Call Paste's FileApp (a WSGI application) to serve the file
at the specified path
]
variable[fapp] assign[=] call[name[paste].fileapp.FileApp, parameter[name[path]]]
return[call[name[fapp], parameter[name[request].environ, name[self].start_response]]] | keyword[def] identifier[_serve_file] ( identifier[self] , identifier[path] ):
literal[string]
identifier[fapp] = identifier[paste] . identifier[fileapp] . identifier[FileApp] ( identifier[path] )
keyword[return] identifier[fapp] ( identifier[request] . identifier[environ] , identifier[self] . identifier[start_response] ) | def _serve_file(self, path):
"""Call Paste's FileApp (a WSGI application) to serve the file
at the specified path
"""
fapp = paste.fileapp.FileApp(path)
return fapp(request.environ, self.start_response) |
def fetch_binary(self, fetch_request):
"""Fulfill a binary fetch request."""
bootstrap_dir = os.path.realpath(os.path.expanduser(self._bootstrap_dir))
bootstrapped_binary_path = os.path.join(bootstrap_dir, fetch_request.download_path)
logger.debug("bootstrapped_binary_path: {}".format(bootstrapped_binary_path))
file_name = fetch_request.file_name
urls = fetch_request.urls
if self._ignore_cached_download or not os.path.exists(bootstrapped_binary_path):
self._do_fetch(bootstrapped_binary_path, file_name, urls)
logger.debug('Selected {binary} binary bootstrapped to: {path}'
.format(binary=file_name, path=bootstrapped_binary_path))
return bootstrapped_binary_path | def function[fetch_binary, parameter[self, fetch_request]]:
constant[Fulfill a binary fetch request.]
variable[bootstrap_dir] assign[=] call[name[os].path.realpath, parameter[call[name[os].path.expanduser, parameter[name[self]._bootstrap_dir]]]]
variable[bootstrapped_binary_path] assign[=] call[name[os].path.join, parameter[name[bootstrap_dir], name[fetch_request].download_path]]
call[name[logger].debug, parameter[call[constant[bootstrapped_binary_path: {}].format, parameter[name[bootstrapped_binary_path]]]]]
variable[file_name] assign[=] name[fetch_request].file_name
variable[urls] assign[=] name[fetch_request].urls
if <ast.BoolOp object at 0x7da1b1e6a0e0> begin[:]
call[name[self]._do_fetch, parameter[name[bootstrapped_binary_path], name[file_name], name[urls]]]
call[name[logger].debug, parameter[call[constant[Selected {binary} binary bootstrapped to: {path}].format, parameter[]]]]
return[name[bootstrapped_binary_path]] | keyword[def] identifier[fetch_binary] ( identifier[self] , identifier[fetch_request] ):
literal[string]
identifier[bootstrap_dir] = identifier[os] . identifier[path] . identifier[realpath] ( identifier[os] . identifier[path] . identifier[expanduser] ( identifier[self] . identifier[_bootstrap_dir] ))
identifier[bootstrapped_binary_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[bootstrap_dir] , identifier[fetch_request] . identifier[download_path] )
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[bootstrapped_binary_path] ))
identifier[file_name] = identifier[fetch_request] . identifier[file_name]
identifier[urls] = identifier[fetch_request] . identifier[urls]
keyword[if] identifier[self] . identifier[_ignore_cached_download] keyword[or] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[bootstrapped_binary_path] ):
identifier[self] . identifier[_do_fetch] ( identifier[bootstrapped_binary_path] , identifier[file_name] , identifier[urls] )
identifier[logger] . identifier[debug] ( literal[string]
. identifier[format] ( identifier[binary] = identifier[file_name] , identifier[path] = identifier[bootstrapped_binary_path] ))
keyword[return] identifier[bootstrapped_binary_path] | def fetch_binary(self, fetch_request):
"""Fulfill a binary fetch request."""
bootstrap_dir = os.path.realpath(os.path.expanduser(self._bootstrap_dir))
bootstrapped_binary_path = os.path.join(bootstrap_dir, fetch_request.download_path)
logger.debug('bootstrapped_binary_path: {}'.format(bootstrapped_binary_path))
file_name = fetch_request.file_name
urls = fetch_request.urls
if self._ignore_cached_download or not os.path.exists(bootstrapped_binary_path):
self._do_fetch(bootstrapped_binary_path, file_name, urls) # depends on [control=['if'], data=[]]
logger.debug('Selected {binary} binary bootstrapped to: {path}'.format(binary=file_name, path=bootstrapped_binary_path))
return bootstrapped_binary_path |
def hist(self):
"""Draw normed histogram of the data using :attr:`bins`
.. plot::
>>> from scipy import stats
>>> data = stats.gamma.rvs(2, loc=1.5, scale=2, size=20000)
>>> # We then create the Fitter object
>>> import fitter
>>> fitter.Fitter(data).hist()
"""
_ = pylab.hist(self._data, bins=self.bins, density=True)
pylab.grid(True) | def function[hist, parameter[self]]:
constant[Draw normed histogram of the data using :attr:`bins`
.. plot::
>>> from scipy import stats
>>> data = stats.gamma.rvs(2, loc=1.5, scale=2, size=20000)
>>> # We then create the Fitter object
>>> import fitter
>>> fitter.Fitter(data).hist()
]
variable[_] assign[=] call[name[pylab].hist, parameter[name[self]._data]]
call[name[pylab].grid, parameter[constant[True]]] | keyword[def] identifier[hist] ( identifier[self] ):
literal[string]
identifier[_] = identifier[pylab] . identifier[hist] ( identifier[self] . identifier[_data] , identifier[bins] = identifier[self] . identifier[bins] , identifier[density] = keyword[True] )
identifier[pylab] . identifier[grid] ( keyword[True] ) | def hist(self):
"""Draw normed histogram of the data using :attr:`bins`
.. plot::
>>> from scipy import stats
>>> data = stats.gamma.rvs(2, loc=1.5, scale=2, size=20000)
>>> # We then create the Fitter object
>>> import fitter
>>> fitter.Fitter(data).hist()
"""
_ = pylab.hist(self._data, bins=self.bins, density=True)
pylab.grid(True) |
def from_path(path: str, encoding: str = 'utf-8', **kwargs) -> BELGraph:
"""Load a BEL graph from a file resource. This function is a thin wrapper around :func:`from_lines`.
:param path: A file path
:param encoding: the encoding to use when reading this file. Is passed to :code:`codecs.open`. See the python
`docs <https://docs.python.org/3/library/codecs.html#standard-encodings>`_ for a list of standard encodings. For
example, files starting with a UTF-8 BOM should use :code:`utf_8_sig`.
The remaining keyword arguments are passed to :func:`pybel.io.line_utils.parse_lines`.
"""
log.info('Loading from path: %s', path)
graph = BELGraph(path=path)
with codecs.open(os.path.expanduser(path), encoding=encoding) as lines:
parse_lines(graph=graph, lines=lines, **kwargs)
return graph | def function[from_path, parameter[path, encoding]]:
constant[Load a BEL graph from a file resource. This function is a thin wrapper around :func:`from_lines`.
:param path: A file path
:param encoding: the encoding to use when reading this file. Is passed to :code:`codecs.open`. See the python
`docs <https://docs.python.org/3/library/codecs.html#standard-encodings>`_ for a list of standard encodings. For
example, files starting with a UTF-8 BOM should use :code:`utf_8_sig`.
The remaining keyword arguments are passed to :func:`pybel.io.line_utils.parse_lines`.
]
call[name[log].info, parameter[constant[Loading from path: %s], name[path]]]
variable[graph] assign[=] call[name[BELGraph], parameter[]]
with call[name[codecs].open, parameter[call[name[os].path.expanduser, parameter[name[path]]]]] begin[:]
call[name[parse_lines], parameter[]]
return[name[graph]] | keyword[def] identifier[from_path] ( identifier[path] : identifier[str] , identifier[encoding] : identifier[str] = literal[string] ,** identifier[kwargs] )-> identifier[BELGraph] :
literal[string]
identifier[log] . identifier[info] ( literal[string] , identifier[path] )
identifier[graph] = identifier[BELGraph] ( identifier[path] = identifier[path] )
keyword[with] identifier[codecs] . identifier[open] ( identifier[os] . identifier[path] . identifier[expanduser] ( identifier[path] ), identifier[encoding] = identifier[encoding] ) keyword[as] identifier[lines] :
identifier[parse_lines] ( identifier[graph] = identifier[graph] , identifier[lines] = identifier[lines] ,** identifier[kwargs] )
keyword[return] identifier[graph] | def from_path(path: str, encoding: str='utf-8', **kwargs) -> BELGraph:
"""Load a BEL graph from a file resource. This function is a thin wrapper around :func:`from_lines`.
:param path: A file path
:param encoding: the encoding to use when reading this file. Is passed to :code:`codecs.open`. See the python
`docs <https://docs.python.org/3/library/codecs.html#standard-encodings>`_ for a list of standard encodings. For
example, files starting with a UTF-8 BOM should use :code:`utf_8_sig`.
The remaining keyword arguments are passed to :func:`pybel.io.line_utils.parse_lines`.
"""
log.info('Loading from path: %s', path)
graph = BELGraph(path=path)
with codecs.open(os.path.expanduser(path), encoding=encoding) as lines:
parse_lines(graph=graph, lines=lines, **kwargs) # depends on [control=['with'], data=['lines']]
return graph |
def fetch_push_logs():
"""
Run several fetch_hg_push_log subtasks, one per repository
"""
for repo in Repository.objects.filter(dvcs_type='hg',
active_status="active"):
fetch_hg_push_log.apply_async(
args=(repo.name, repo.url),
queue='pushlog'
) | def function[fetch_push_logs, parameter[]]:
constant[
Run several fetch_hg_push_log subtasks, one per repository
]
for taget[name[repo]] in starred[call[name[Repository].objects.filter, parameter[]]] begin[:]
call[name[fetch_hg_push_log].apply_async, parameter[]] | keyword[def] identifier[fetch_push_logs] ():
literal[string]
keyword[for] identifier[repo] keyword[in] identifier[Repository] . identifier[objects] . identifier[filter] ( identifier[dvcs_type] = literal[string] ,
identifier[active_status] = literal[string] ):
identifier[fetch_hg_push_log] . identifier[apply_async] (
identifier[args] =( identifier[repo] . identifier[name] , identifier[repo] . identifier[url] ),
identifier[queue] = literal[string]
) | def fetch_push_logs():
"""
Run several fetch_hg_push_log subtasks, one per repository
"""
for repo in Repository.objects.filter(dvcs_type='hg', active_status='active'):
fetch_hg_push_log.apply_async(args=(repo.name, repo.url), queue='pushlog') # depends on [control=['for'], data=['repo']] |
def updateratio(ctx, symbol, ratio, account):
""" Update the collateral ratio of a call positions
"""
from bitshares.dex import Dex
dex = Dex(bitshares_instance=ctx.bitshares)
print_tx(dex.adjust_collateral_ratio(symbol, ratio, account=account)) | def function[updateratio, parameter[ctx, symbol, ratio, account]]:
constant[ Update the collateral ratio of a call positions
]
from relative_module[bitshares.dex] import module[Dex]
variable[dex] assign[=] call[name[Dex], parameter[]]
call[name[print_tx], parameter[call[name[dex].adjust_collateral_ratio, parameter[name[symbol], name[ratio]]]]] | keyword[def] identifier[updateratio] ( identifier[ctx] , identifier[symbol] , identifier[ratio] , identifier[account] ):
literal[string]
keyword[from] identifier[bitshares] . identifier[dex] keyword[import] identifier[Dex]
identifier[dex] = identifier[Dex] ( identifier[bitshares_instance] = identifier[ctx] . identifier[bitshares] )
identifier[print_tx] ( identifier[dex] . identifier[adjust_collateral_ratio] ( identifier[symbol] , identifier[ratio] , identifier[account] = identifier[account] )) | def updateratio(ctx, symbol, ratio, account):
""" Update the collateral ratio of a call positions
"""
from bitshares.dex import Dex
dex = Dex(bitshares_instance=ctx.bitshares)
print_tx(dex.adjust_collateral_ratio(symbol, ratio, account=account)) |
def save_segment(self, f, segment, checksum=None):
""" Save the next segment to the image file, return next checksum value if provided """
segment_data = self.maybe_patch_segment_data(f, segment.data)
f.write(struct.pack('<II', segment.addr, len(segment_data)))
f.write(segment_data)
if checksum is not None:
return ESPLoader.checksum(segment_data, checksum) | def function[save_segment, parameter[self, f, segment, checksum]]:
constant[ Save the next segment to the image file, return next checksum value if provided ]
variable[segment_data] assign[=] call[name[self].maybe_patch_segment_data, parameter[name[f], name[segment].data]]
call[name[f].write, parameter[call[name[struct].pack, parameter[constant[<II], name[segment].addr, call[name[len], parameter[name[segment_data]]]]]]]
call[name[f].write, parameter[name[segment_data]]]
if compare[name[checksum] is_not constant[None]] begin[:]
return[call[name[ESPLoader].checksum, parameter[name[segment_data], name[checksum]]]] | keyword[def] identifier[save_segment] ( identifier[self] , identifier[f] , identifier[segment] , identifier[checksum] = keyword[None] ):
literal[string]
identifier[segment_data] = identifier[self] . identifier[maybe_patch_segment_data] ( identifier[f] , identifier[segment] . identifier[data] )
identifier[f] . identifier[write] ( identifier[struct] . identifier[pack] ( literal[string] , identifier[segment] . identifier[addr] , identifier[len] ( identifier[segment_data] )))
identifier[f] . identifier[write] ( identifier[segment_data] )
keyword[if] identifier[checksum] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[ESPLoader] . identifier[checksum] ( identifier[segment_data] , identifier[checksum] ) | def save_segment(self, f, segment, checksum=None):
""" Save the next segment to the image file, return next checksum value if provided """
segment_data = self.maybe_patch_segment_data(f, segment.data)
f.write(struct.pack('<II', segment.addr, len(segment_data)))
f.write(segment_data)
if checksum is not None:
return ESPLoader.checksum(segment_data, checksum) # depends on [control=['if'], data=['checksum']] |
def _parse_kexgss_hostkey(self, m):
"""
Parse the SSH2_MSG_KEXGSS_HOSTKEY message (client mode).
:param `.Message` m: The content of the SSH2_MSG_KEXGSS_HOSTKEY message
"""
# client mode
host_key = m.get_string()
self.transport.host_key = host_key
sig = m.get_string()
self.transport._verify_key(host_key, sig)
self.transport._expect_packet(MSG_KEXGSS_CONTINUE, MSG_KEXGSS_COMPLETE) | def function[_parse_kexgss_hostkey, parameter[self, m]]:
constant[
Parse the SSH2_MSG_KEXGSS_HOSTKEY message (client mode).
:param `.Message` m: The content of the SSH2_MSG_KEXGSS_HOSTKEY message
]
variable[host_key] assign[=] call[name[m].get_string, parameter[]]
name[self].transport.host_key assign[=] name[host_key]
variable[sig] assign[=] call[name[m].get_string, parameter[]]
call[name[self].transport._verify_key, parameter[name[host_key], name[sig]]]
call[name[self].transport._expect_packet, parameter[name[MSG_KEXGSS_CONTINUE], name[MSG_KEXGSS_COMPLETE]]] | keyword[def] identifier[_parse_kexgss_hostkey] ( identifier[self] , identifier[m] ):
literal[string]
identifier[host_key] = identifier[m] . identifier[get_string] ()
identifier[self] . identifier[transport] . identifier[host_key] = identifier[host_key]
identifier[sig] = identifier[m] . identifier[get_string] ()
identifier[self] . identifier[transport] . identifier[_verify_key] ( identifier[host_key] , identifier[sig] )
identifier[self] . identifier[transport] . identifier[_expect_packet] ( identifier[MSG_KEXGSS_CONTINUE] , identifier[MSG_KEXGSS_COMPLETE] ) | def _parse_kexgss_hostkey(self, m):
"""
Parse the SSH2_MSG_KEXGSS_HOSTKEY message (client mode).
:param `.Message` m: The content of the SSH2_MSG_KEXGSS_HOSTKEY message
"""
# client mode
host_key = m.get_string()
self.transport.host_key = host_key
sig = m.get_string()
self.transport._verify_key(host_key, sig)
self.transport._expect_packet(MSG_KEXGSS_CONTINUE, MSG_KEXGSS_COMPLETE) |
def rsdl(self):
"""Compute fixed point residual in Fourier domain."""
diff = self.Xf - self.Yfprv
return sl.rfl2norm2(diff, self.X.shape, axis=self.cri.axisN) | def function[rsdl, parameter[self]]:
constant[Compute fixed point residual in Fourier domain.]
variable[diff] assign[=] binary_operation[name[self].Xf - name[self].Yfprv]
return[call[name[sl].rfl2norm2, parameter[name[diff], name[self].X.shape]]] | keyword[def] identifier[rsdl] ( identifier[self] ):
literal[string]
identifier[diff] = identifier[self] . identifier[Xf] - identifier[self] . identifier[Yfprv]
keyword[return] identifier[sl] . identifier[rfl2norm2] ( identifier[diff] , identifier[self] . identifier[X] . identifier[shape] , identifier[axis] = identifier[self] . identifier[cri] . identifier[axisN] ) | def rsdl(self):
"""Compute fixed point residual in Fourier domain."""
diff = self.Xf - self.Yfprv
return sl.rfl2norm2(diff, self.X.shape, axis=self.cri.axisN) |
def attach_par_subdivision(par_name, par_times):
"""
Manual assignment of a collection of (stopped) Times objects as a parallel
subdivision of a running timer.
Notes:
An example sequence of proper usage:
1. Stamp in master process.
2. Run timed sub-processes.
3. Get timing data from sub-processes into master.
4. Attach timing data (i.e. list of Times objects) in master using this method.
5. Stamp in master process.
To stamp in the master between steps 1 and 5, it is recommended to
subdivide() between steps 1 and 2, and end that subdivision before
attaching, or else the master stamp will not reflect the sub-process
time.
Args:
par_name (any): Identifier for the collection, passed through str()
par_times (list or tuple): Collection of Times data objects.
Raises:
TypeError: If par_times not a list or tuple of Times data objects.
"""
t = timer()
if not isinstance(par_times, (list, tuple)):
raise TypeError("Expected list or tuple for param 'par_times'.")
for times in par_times:
if not isinstance(times, Times):
raise TypeError("Expected each element of param 'par_times' to be Times object.")
assert times.total > 0., "An attached par subdivision has total time 0, appears empty."
par_name = str(par_name)
sub_with_max_tot = max(par_times, key=lambda x: x.total)
f.r.self_agg += sub_with_max_tot.self_agg
if par_name not in f.t.par_subdvsn_awaiting:
f.t.par_subdvsn_awaiting[par_name] = []
for times in par_times:
times_copy = copy.deepcopy(times)
times_copy.parent = f.r
times_copy.par_in_parent = par_name
f.t.par_subdvsn_awaiting[par_name].append(times_copy)
else:
for new_sub in par_times:
is_prev_sub = False
for old_sub in f.t.par_subdvsn_awaiting[par_name]:
if old_sub.name == new_sub.name:
is_prev_sub = True
break
if is_prev_sub:
merge.merge_times(old_sub, new_sub)
else:
new_sub_copy = copy.deepcopy(new_sub)
new_sub_copy.parent = f.r
new_sub_copy.par_in_parent = par_name
f.t.par_subdvsn_awaiting[par_name].append(new_sub_copy)
f.t.self_cut += timer() - t | def function[attach_par_subdivision, parameter[par_name, par_times]]:
constant[
Manual assignment of a collection of (stopped) Times objects as a parallel
subdivision of a running timer.
Notes:
An example sequence of proper usage:
1. Stamp in master process.
2. Run timed sub-processes.
3. Get timing data from sub-processes into master.
4. Attach timing data (i.e. list of Times objects) in master using this method.
5. Stamp in master process.
To stamp in the master between steps 1 and 5, it is recommended to
subdivide() between steps 1 and 2, and end that subdivision before
attaching, or else the master stamp will not reflect the sub-process
time.
Args:
par_name (any): Identifier for the collection, passed through str()
par_times (list or tuple): Collection of Times data objects.
Raises:
TypeError: If par_times not a list or tuple of Times data objects.
]
variable[t] assign[=] call[name[timer], parameter[]]
if <ast.UnaryOp object at 0x7da20e961510> begin[:]
<ast.Raise object at 0x7da20e963fa0>
for taget[name[times]] in starred[name[par_times]] begin[:]
if <ast.UnaryOp object at 0x7da20e963550> begin[:]
<ast.Raise object at 0x7da20e9605b0>
assert[compare[name[times].total greater[>] constant[0.0]]]
variable[par_name] assign[=] call[name[str], parameter[name[par_name]]]
variable[sub_with_max_tot] assign[=] call[name[max], parameter[name[par_times]]]
<ast.AugAssign object at 0x7da20e962da0>
if compare[name[par_name] <ast.NotIn object at 0x7da2590d7190> name[f].t.par_subdvsn_awaiting] begin[:]
call[name[f].t.par_subdvsn_awaiting][name[par_name]] assign[=] list[[]]
for taget[name[times]] in starred[name[par_times]] begin[:]
variable[times_copy] assign[=] call[name[copy].deepcopy, parameter[name[times]]]
name[times_copy].parent assign[=] name[f].r
name[times_copy].par_in_parent assign[=] name[par_name]
call[call[name[f].t.par_subdvsn_awaiting][name[par_name]].append, parameter[name[times_copy]]]
<ast.AugAssign object at 0x7da18ede63b0> | keyword[def] identifier[attach_par_subdivision] ( identifier[par_name] , identifier[par_times] ):
literal[string]
identifier[t] = identifier[timer] ()
keyword[if] keyword[not] identifier[isinstance] ( identifier[par_times] ,( identifier[list] , identifier[tuple] )):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[for] identifier[times] keyword[in] identifier[par_times] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[times] , identifier[Times] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[assert] identifier[times] . identifier[total] > literal[int] , literal[string]
identifier[par_name] = identifier[str] ( identifier[par_name] )
identifier[sub_with_max_tot] = identifier[max] ( identifier[par_times] , identifier[key] = keyword[lambda] identifier[x] : identifier[x] . identifier[total] )
identifier[f] . identifier[r] . identifier[self_agg] += identifier[sub_with_max_tot] . identifier[self_agg]
keyword[if] identifier[par_name] keyword[not] keyword[in] identifier[f] . identifier[t] . identifier[par_subdvsn_awaiting] :
identifier[f] . identifier[t] . identifier[par_subdvsn_awaiting] [ identifier[par_name] ]=[]
keyword[for] identifier[times] keyword[in] identifier[par_times] :
identifier[times_copy] = identifier[copy] . identifier[deepcopy] ( identifier[times] )
identifier[times_copy] . identifier[parent] = identifier[f] . identifier[r]
identifier[times_copy] . identifier[par_in_parent] = identifier[par_name]
identifier[f] . identifier[t] . identifier[par_subdvsn_awaiting] [ identifier[par_name] ]. identifier[append] ( identifier[times_copy] )
keyword[else] :
keyword[for] identifier[new_sub] keyword[in] identifier[par_times] :
identifier[is_prev_sub] = keyword[False]
keyword[for] identifier[old_sub] keyword[in] identifier[f] . identifier[t] . identifier[par_subdvsn_awaiting] [ identifier[par_name] ]:
keyword[if] identifier[old_sub] . identifier[name] == identifier[new_sub] . identifier[name] :
identifier[is_prev_sub] = keyword[True]
keyword[break]
keyword[if] identifier[is_prev_sub] :
identifier[merge] . identifier[merge_times] ( identifier[old_sub] , identifier[new_sub] )
keyword[else] :
identifier[new_sub_copy] = identifier[copy] . identifier[deepcopy] ( identifier[new_sub] )
identifier[new_sub_copy] . identifier[parent] = identifier[f] . identifier[r]
identifier[new_sub_copy] . identifier[par_in_parent] = identifier[par_name]
identifier[f] . identifier[t] . identifier[par_subdvsn_awaiting] [ identifier[par_name] ]. identifier[append] ( identifier[new_sub_copy] )
identifier[f] . identifier[t] . identifier[self_cut] += identifier[timer] ()- identifier[t] | def attach_par_subdivision(par_name, par_times):
"""
Manual assignment of a collection of (stopped) Times objects as a parallel
subdivision of a running timer.
Notes:
An example sequence of proper usage:
1. Stamp in master process.
2. Run timed sub-processes.
3. Get timing data from sub-processes into master.
4. Attach timing data (i.e. list of Times objects) in master using this method.
5. Stamp in master process.
To stamp in the master between steps 1 and 5, it is recommended to
subdivide() between steps 1 and 2, and end that subdivision before
attaching, or else the master stamp will not reflect the sub-process
time.
Args:
par_name (any): Identifier for the collection, passed through str()
par_times (list or tuple): Collection of Times data objects.
Raises:
TypeError: If par_times not a list or tuple of Times data objects.
"""
t = timer()
if not isinstance(par_times, (list, tuple)):
raise TypeError("Expected list or tuple for param 'par_times'.") # depends on [control=['if'], data=[]]
for times in par_times:
if not isinstance(times, Times):
raise TypeError("Expected each element of param 'par_times' to be Times object.") # depends on [control=['if'], data=[]]
assert times.total > 0.0, 'An attached par subdivision has total time 0, appears empty.' # depends on [control=['for'], data=['times']]
par_name = str(par_name)
sub_with_max_tot = max(par_times, key=lambda x: x.total)
f.r.self_agg += sub_with_max_tot.self_agg
if par_name not in f.t.par_subdvsn_awaiting:
f.t.par_subdvsn_awaiting[par_name] = []
for times in par_times:
times_copy = copy.deepcopy(times)
times_copy.parent = f.r
times_copy.par_in_parent = par_name
f.t.par_subdvsn_awaiting[par_name].append(times_copy) # depends on [control=['for'], data=['times']] # depends on [control=['if'], data=['par_name']]
else:
for new_sub in par_times:
is_prev_sub = False
for old_sub in f.t.par_subdvsn_awaiting[par_name]:
if old_sub.name == new_sub.name:
is_prev_sub = True
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['old_sub']]
if is_prev_sub:
merge.merge_times(old_sub, new_sub) # depends on [control=['if'], data=[]]
else:
new_sub_copy = copy.deepcopy(new_sub)
new_sub_copy.parent = f.r
new_sub_copy.par_in_parent = par_name
f.t.par_subdvsn_awaiting[par_name].append(new_sub_copy) # depends on [control=['for'], data=['new_sub']]
f.t.self_cut += timer() - t |
def should_include_node(ctx, directives):
# type: (ExecutionContext, Optional[List[Directive]]) -> bool
"""Determines if a field should be included based on the @include and
@skip directives, where @skip has higher precidence than @include."""
# TODO: Refactor based on latest code
if directives:
skip_ast = None
for directive in directives:
if directive.name.value == GraphQLSkipDirective.name:
skip_ast = directive
break
if skip_ast:
args = get_argument_values(
GraphQLSkipDirective.args, skip_ast.arguments, ctx.variable_values
)
if args.get("if") is True:
return False
include_ast = None
for directive in directives:
if directive.name.value == GraphQLIncludeDirective.name:
include_ast = directive
break
if include_ast:
args = get_argument_values(
GraphQLIncludeDirective.args, include_ast.arguments, ctx.variable_values
)
if args.get("if") is False:
return False
return True | def function[should_include_node, parameter[ctx, directives]]:
constant[Determines if a field should be included based on the @include and
@skip directives, where @skip has higher precidence than @include.]
if name[directives] begin[:]
variable[skip_ast] assign[=] constant[None]
for taget[name[directive]] in starred[name[directives]] begin[:]
if compare[name[directive].name.value equal[==] name[GraphQLSkipDirective].name] begin[:]
variable[skip_ast] assign[=] name[directive]
break
if name[skip_ast] begin[:]
variable[args] assign[=] call[name[get_argument_values], parameter[name[GraphQLSkipDirective].args, name[skip_ast].arguments, name[ctx].variable_values]]
if compare[call[name[args].get, parameter[constant[if]]] is constant[True]] begin[:]
return[constant[False]]
variable[include_ast] assign[=] constant[None]
for taget[name[directive]] in starred[name[directives]] begin[:]
if compare[name[directive].name.value equal[==] name[GraphQLIncludeDirective].name] begin[:]
variable[include_ast] assign[=] name[directive]
break
if name[include_ast] begin[:]
variable[args] assign[=] call[name[get_argument_values], parameter[name[GraphQLIncludeDirective].args, name[include_ast].arguments, name[ctx].variable_values]]
if compare[call[name[args].get, parameter[constant[if]]] is constant[False]] begin[:]
return[constant[False]]
return[constant[True]] | keyword[def] identifier[should_include_node] ( identifier[ctx] , identifier[directives] ):
literal[string]
keyword[if] identifier[directives] :
identifier[skip_ast] = keyword[None]
keyword[for] identifier[directive] keyword[in] identifier[directives] :
keyword[if] identifier[directive] . identifier[name] . identifier[value] == identifier[GraphQLSkipDirective] . identifier[name] :
identifier[skip_ast] = identifier[directive]
keyword[break]
keyword[if] identifier[skip_ast] :
identifier[args] = identifier[get_argument_values] (
identifier[GraphQLSkipDirective] . identifier[args] , identifier[skip_ast] . identifier[arguments] , identifier[ctx] . identifier[variable_values]
)
keyword[if] identifier[args] . identifier[get] ( literal[string] ) keyword[is] keyword[True] :
keyword[return] keyword[False]
identifier[include_ast] = keyword[None]
keyword[for] identifier[directive] keyword[in] identifier[directives] :
keyword[if] identifier[directive] . identifier[name] . identifier[value] == identifier[GraphQLIncludeDirective] . identifier[name] :
identifier[include_ast] = identifier[directive]
keyword[break]
keyword[if] identifier[include_ast] :
identifier[args] = identifier[get_argument_values] (
identifier[GraphQLIncludeDirective] . identifier[args] , identifier[include_ast] . identifier[arguments] , identifier[ctx] . identifier[variable_values]
)
keyword[if] identifier[args] . identifier[get] ( literal[string] ) keyword[is] keyword[False] :
keyword[return] keyword[False]
keyword[return] keyword[True] | def should_include_node(ctx, directives):
# type: (ExecutionContext, Optional[List[Directive]]) -> bool
'Determines if a field should be included based on the @include and\n @skip directives, where @skip has higher precidence than @include.'
# TODO: Refactor based on latest code
if directives:
skip_ast = None
for directive in directives:
if directive.name.value == GraphQLSkipDirective.name:
skip_ast = directive
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['directive']]
if skip_ast:
args = get_argument_values(GraphQLSkipDirective.args, skip_ast.arguments, ctx.variable_values)
if args.get('if') is True:
return False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
include_ast = None
for directive in directives:
if directive.name.value == GraphQLIncludeDirective.name:
include_ast = directive
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['directive']]
if include_ast:
args = get_argument_values(GraphQLIncludeDirective.args, include_ast.arguments, ctx.variable_values)
if args.get('if') is False:
return False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return True |
def graphiter(self, graph, target, ascendants=0, descendants=1):
""" Iter on a graph to finds object connected
:param graph: Graph to serialize
:type graph: Graph
:param target: Node to iterate over
:type target: Node
:param ascendants: Number of level to iter over upwards (-1 = No Limit)
:param descendants: Number of level to iter over downwards (-1 = No limit)
:return:
"""
asc = 0 + ascendants
if asc != 0:
asc -= 1
desc = 0 + descendants
if desc != 0:
desc -= 1
t = str(target)
if descendants != 0 and self.downwards[t] is True:
self.downwards[t] = False
for pred, obj in graph.predicate_objects(target):
if desc == 0 and isinstance(obj, BNode):
continue
self.add((target, pred, obj))
# Retrieve triples about the object
if desc != 0 and self.downwards[str(obj)] is True:
self.graphiter(graph, target=obj, ascendants=0, descendants=desc)
if ascendants != 0 and self.updwards[t] is True:
self.updwards[t] = False
for s, p in graph.subject_predicates(object=target):
if desc == 0 and isinstance(s, BNode):
continue
self.add((s, p, target))
# Retrieve triples about the parent as object
if asc != 0 and self.updwards[str(s)] is True:
self.graphiter(graph, target=s, ascendants=asc, descendants=0) | def function[graphiter, parameter[self, graph, target, ascendants, descendants]]:
constant[ Iter on a graph to finds object connected
:param graph: Graph to serialize
:type graph: Graph
:param target: Node to iterate over
:type target: Node
:param ascendants: Number of level to iter over upwards (-1 = No Limit)
:param descendants: Number of level to iter over downwards (-1 = No limit)
:return:
]
variable[asc] assign[=] binary_operation[constant[0] + name[ascendants]]
if compare[name[asc] not_equal[!=] constant[0]] begin[:]
<ast.AugAssign object at 0x7da18c4cf970>
variable[desc] assign[=] binary_operation[constant[0] + name[descendants]]
if compare[name[desc] not_equal[!=] constant[0]] begin[:]
<ast.AugAssign object at 0x7da20cabd7e0>
variable[t] assign[=] call[name[str], parameter[name[target]]]
if <ast.BoolOp object at 0x7da20cabfb50> begin[:]
call[name[self].downwards][name[t]] assign[=] constant[False]
for taget[tuple[[<ast.Name object at 0x7da20cabf400>, <ast.Name object at 0x7da20cabcdf0>]]] in starred[call[name[graph].predicate_objects, parameter[name[target]]]] begin[:]
if <ast.BoolOp object at 0x7da20cabe860> begin[:]
continue
call[name[self].add, parameter[tuple[[<ast.Name object at 0x7da204621720>, <ast.Name object at 0x7da204622560>, <ast.Name object at 0x7da2046223b0>]]]]
if <ast.BoolOp object at 0x7da204622140> begin[:]
call[name[self].graphiter, parameter[name[graph]]]
if <ast.BoolOp object at 0x7da204622f20> begin[:]
call[name[self].updwards][name[t]] assign[=] constant[False]
for taget[tuple[[<ast.Name object at 0x7da204620ee0>, <ast.Name object at 0x7da204621e40>]]] in starred[call[name[graph].subject_predicates, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da204622fb0> begin[:]
continue
call[name[self].add, parameter[tuple[[<ast.Name object at 0x7da2046236a0>, <ast.Name object at 0x7da204623fa0>, <ast.Name object at 0x7da2046236d0>]]]]
if <ast.BoolOp object at 0x7da204620370> begin[:]
call[name[self].graphiter, parameter[name[graph]]] | keyword[def] identifier[graphiter] ( identifier[self] , identifier[graph] , identifier[target] , identifier[ascendants] = literal[int] , identifier[descendants] = literal[int] ):
literal[string]
identifier[asc] = literal[int] + identifier[ascendants]
keyword[if] identifier[asc] != literal[int] :
identifier[asc] -= literal[int]
identifier[desc] = literal[int] + identifier[descendants]
keyword[if] identifier[desc] != literal[int] :
identifier[desc] -= literal[int]
identifier[t] = identifier[str] ( identifier[target] )
keyword[if] identifier[descendants] != literal[int] keyword[and] identifier[self] . identifier[downwards] [ identifier[t] ] keyword[is] keyword[True] :
identifier[self] . identifier[downwards] [ identifier[t] ]= keyword[False]
keyword[for] identifier[pred] , identifier[obj] keyword[in] identifier[graph] . identifier[predicate_objects] ( identifier[target] ):
keyword[if] identifier[desc] == literal[int] keyword[and] identifier[isinstance] ( identifier[obj] , identifier[BNode] ):
keyword[continue]
identifier[self] . identifier[add] (( identifier[target] , identifier[pred] , identifier[obj] ))
keyword[if] identifier[desc] != literal[int] keyword[and] identifier[self] . identifier[downwards] [ identifier[str] ( identifier[obj] )] keyword[is] keyword[True] :
identifier[self] . identifier[graphiter] ( identifier[graph] , identifier[target] = identifier[obj] , identifier[ascendants] = literal[int] , identifier[descendants] = identifier[desc] )
keyword[if] identifier[ascendants] != literal[int] keyword[and] identifier[self] . identifier[updwards] [ identifier[t] ] keyword[is] keyword[True] :
identifier[self] . identifier[updwards] [ identifier[t] ]= keyword[False]
keyword[for] identifier[s] , identifier[p] keyword[in] identifier[graph] . identifier[subject_predicates] ( identifier[object] = identifier[target] ):
keyword[if] identifier[desc] == literal[int] keyword[and] identifier[isinstance] ( identifier[s] , identifier[BNode] ):
keyword[continue]
identifier[self] . identifier[add] (( identifier[s] , identifier[p] , identifier[target] ))
keyword[if] identifier[asc] != literal[int] keyword[and] identifier[self] . identifier[updwards] [ identifier[str] ( identifier[s] )] keyword[is] keyword[True] :
identifier[self] . identifier[graphiter] ( identifier[graph] , identifier[target] = identifier[s] , identifier[ascendants] = identifier[asc] , identifier[descendants] = literal[int] ) | def graphiter(self, graph, target, ascendants=0, descendants=1):
""" Iter on a graph to finds object connected
:param graph: Graph to serialize
:type graph: Graph
:param target: Node to iterate over
:type target: Node
:param ascendants: Number of level to iter over upwards (-1 = No Limit)
:param descendants: Number of level to iter over downwards (-1 = No limit)
:return:
"""
asc = 0 + ascendants
if asc != 0:
asc -= 1 # depends on [control=['if'], data=['asc']]
desc = 0 + descendants
if desc != 0:
desc -= 1 # depends on [control=['if'], data=['desc']]
t = str(target)
if descendants != 0 and self.downwards[t] is True:
self.downwards[t] = False
for (pred, obj) in graph.predicate_objects(target):
if desc == 0 and isinstance(obj, BNode):
continue # depends on [control=['if'], data=[]]
self.add((target, pred, obj))
# Retrieve triples about the object
if desc != 0 and self.downwards[str(obj)] is True:
self.graphiter(graph, target=obj, ascendants=0, descendants=desc) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
if ascendants != 0 and self.updwards[t] is True:
self.updwards[t] = False
for (s, p) in graph.subject_predicates(object=target):
if desc == 0 and isinstance(s, BNode):
continue # depends on [control=['if'], data=[]]
self.add((s, p, target))
# Retrieve triples about the parent as object
if asc != 0 and self.updwards[str(s)] is True:
self.graphiter(graph, target=s, ascendants=asc, descendants=0) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] |
def process_minter(value):
"""Load minter from PIDStore registry based on given value.
:param value: Name of the minter.
:returns: The minter.
"""
try:
return current_pidstore.minters[value]
except KeyError:
raise click.BadParameter(
'Unknown minter {0}. Please use one of {1}.'.format(
value, ', '.join(current_pidstore.minters.keys())
)
) | def function[process_minter, parameter[value]]:
constant[Load minter from PIDStore registry based on given value.
:param value: Name of the minter.
:returns: The minter.
]
<ast.Try object at 0x7da1afe1a4a0> | keyword[def] identifier[process_minter] ( identifier[value] ):
literal[string]
keyword[try] :
keyword[return] identifier[current_pidstore] . identifier[minters] [ identifier[value] ]
keyword[except] identifier[KeyError] :
keyword[raise] identifier[click] . identifier[BadParameter] (
literal[string] . identifier[format] (
identifier[value] , literal[string] . identifier[join] ( identifier[current_pidstore] . identifier[minters] . identifier[keys] ())
)
) | def process_minter(value):
"""Load minter from PIDStore registry based on given value.
:param value: Name of the minter.
:returns: The minter.
"""
try:
return current_pidstore.minters[value] # depends on [control=['try'], data=[]]
except KeyError:
raise click.BadParameter('Unknown minter {0}. Please use one of {1}.'.format(value, ', '.join(current_pidstore.minters.keys()))) # depends on [control=['except'], data=[]] |
def _init_w_transforms(data, features):
"""Initialize the mappings (Wi) for the SRM with random orthogonal matrices.
Parameters
----------
data : list of 2D arrays, element i has shape=[voxels_i, samples]
Each element in the list contains the fMRI data of one subject.
features : int
The number of features in the model.
Returns
-------
w : list of array, element i has shape=[voxels_i, features]
The initialized orthogonal transforms (mappings) :math:`W_i` for each
subject.
voxels : list of int
A list with the number of voxels per subject.
Note
----
This function assumes that the numpy random number generator was
initialized.
Not thread safe.
"""
w = []
subjects = len(data)
voxels = np.empty(subjects, dtype=int)
# Set Wi to a random orthogonal voxels by features matrix
for subject in range(subjects):
voxels[subject] = data[subject].shape[0]
rnd_matrix = np.random.random((voxels[subject], features))
q, r = np.linalg.qr(rnd_matrix)
w.append(q)
return w, voxels | def function[_init_w_transforms, parameter[data, features]]:
constant[Initialize the mappings (Wi) for the SRM with random orthogonal matrices.
Parameters
----------
data : list of 2D arrays, element i has shape=[voxels_i, samples]
Each element in the list contains the fMRI data of one subject.
features : int
The number of features in the model.
Returns
-------
w : list of array, element i has shape=[voxels_i, features]
The initialized orthogonal transforms (mappings) :math:`W_i` for each
subject.
voxels : list of int
A list with the number of voxels per subject.
Note
----
This function assumes that the numpy random number generator was
initialized.
Not thread safe.
]
variable[w] assign[=] list[[]]
variable[subjects] assign[=] call[name[len], parameter[name[data]]]
variable[voxels] assign[=] call[name[np].empty, parameter[name[subjects]]]
for taget[name[subject]] in starred[call[name[range], parameter[name[subjects]]]] begin[:]
call[name[voxels]][name[subject]] assign[=] call[call[name[data]][name[subject]].shape][constant[0]]
variable[rnd_matrix] assign[=] call[name[np].random.random, parameter[tuple[[<ast.Subscript object at 0x7da2047e96c0>, <ast.Name object at 0x7da2047eb0a0>]]]]
<ast.Tuple object at 0x7da2047e81f0> assign[=] call[name[np].linalg.qr, parameter[name[rnd_matrix]]]
call[name[w].append, parameter[name[q]]]
return[tuple[[<ast.Name object at 0x7da2047eb880>, <ast.Name object at 0x7da2047e9300>]]] | keyword[def] identifier[_init_w_transforms] ( identifier[data] , identifier[features] ):
literal[string]
identifier[w] =[]
identifier[subjects] = identifier[len] ( identifier[data] )
identifier[voxels] = identifier[np] . identifier[empty] ( identifier[subjects] , identifier[dtype] = identifier[int] )
keyword[for] identifier[subject] keyword[in] identifier[range] ( identifier[subjects] ):
identifier[voxels] [ identifier[subject] ]= identifier[data] [ identifier[subject] ]. identifier[shape] [ literal[int] ]
identifier[rnd_matrix] = identifier[np] . identifier[random] . identifier[random] (( identifier[voxels] [ identifier[subject] ], identifier[features] ))
identifier[q] , identifier[r] = identifier[np] . identifier[linalg] . identifier[qr] ( identifier[rnd_matrix] )
identifier[w] . identifier[append] ( identifier[q] )
keyword[return] identifier[w] , identifier[voxels] | def _init_w_transforms(data, features):
"""Initialize the mappings (Wi) for the SRM with random orthogonal matrices.
Parameters
----------
data : list of 2D arrays, element i has shape=[voxels_i, samples]
Each element in the list contains the fMRI data of one subject.
features : int
The number of features in the model.
Returns
-------
w : list of array, element i has shape=[voxels_i, features]
The initialized orthogonal transforms (mappings) :math:`W_i` for each
subject.
voxels : list of int
A list with the number of voxels per subject.
Note
----
This function assumes that the numpy random number generator was
initialized.
Not thread safe.
"""
w = []
subjects = len(data)
voxels = np.empty(subjects, dtype=int)
# Set Wi to a random orthogonal voxels by features matrix
for subject in range(subjects):
voxels[subject] = data[subject].shape[0]
rnd_matrix = np.random.random((voxels[subject], features))
(q, r) = np.linalg.qr(rnd_matrix)
w.append(q) # depends on [control=['for'], data=['subject']]
return (w, voxels) |
def p_expression_uxor(self, p):
'expression : XOR expression %prec UXOR'
p[0] = Uxor(p[2], lineno=p.lineno(1))
p.set_lineno(0, p.lineno(1)) | def function[p_expression_uxor, parameter[self, p]]:
constant[expression : XOR expression %prec UXOR]
call[name[p]][constant[0]] assign[=] call[name[Uxor], parameter[call[name[p]][constant[2]]]]
call[name[p].set_lineno, parameter[constant[0], call[name[p].lineno, parameter[constant[1]]]]] | keyword[def] identifier[p_expression_uxor] ( identifier[self] , identifier[p] ):
literal[string]
identifier[p] [ literal[int] ]= identifier[Uxor] ( identifier[p] [ literal[int] ], identifier[lineno] = identifier[p] . identifier[lineno] ( literal[int] ))
identifier[p] . identifier[set_lineno] ( literal[int] , identifier[p] . identifier[lineno] ( literal[int] )) | def p_expression_uxor(self, p):
"""expression : XOR expression %prec UXOR"""
p[0] = Uxor(p[2], lineno=p.lineno(1))
p.set_lineno(0, p.lineno(1)) |
def get_path(self, dir=None):
"""Return path relative to the current working directory of the
Node.FS.Base object that owns us."""
if not dir:
dir = self.fs.getcwd()
if self == dir:
return '.'
path_elems = self.get_path_elements()
pathname = ''
try: i = path_elems.index(dir)
except ValueError:
for p in path_elems[:-1]:
pathname += p.dirname
else:
for p in path_elems[i+1:-1]:
pathname += p.dirname
return pathname + path_elems[-1].name | def function[get_path, parameter[self, dir]]:
constant[Return path relative to the current working directory of the
Node.FS.Base object that owns us.]
if <ast.UnaryOp object at 0x7da20c6c79a0> begin[:]
variable[dir] assign[=] call[name[self].fs.getcwd, parameter[]]
if compare[name[self] equal[==] name[dir]] begin[:]
return[constant[.]]
variable[path_elems] assign[=] call[name[self].get_path_elements, parameter[]]
variable[pathname] assign[=] constant[]
<ast.Try object at 0x7da20c6c5870>
return[binary_operation[name[pathname] + call[name[path_elems]][<ast.UnaryOp object at 0x7da20c6c60b0>].name]] | keyword[def] identifier[get_path] ( identifier[self] , identifier[dir] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[dir] :
identifier[dir] = identifier[self] . identifier[fs] . identifier[getcwd] ()
keyword[if] identifier[self] == identifier[dir] :
keyword[return] literal[string]
identifier[path_elems] = identifier[self] . identifier[get_path_elements] ()
identifier[pathname] = literal[string]
keyword[try] : identifier[i] = identifier[path_elems] . identifier[index] ( identifier[dir] )
keyword[except] identifier[ValueError] :
keyword[for] identifier[p] keyword[in] identifier[path_elems] [:- literal[int] ]:
identifier[pathname] += identifier[p] . identifier[dirname]
keyword[else] :
keyword[for] identifier[p] keyword[in] identifier[path_elems] [ identifier[i] + literal[int] :- literal[int] ]:
identifier[pathname] += identifier[p] . identifier[dirname]
keyword[return] identifier[pathname] + identifier[path_elems] [- literal[int] ]. identifier[name] | def get_path(self, dir=None):
"""Return path relative to the current working directory of the
Node.FS.Base object that owns us."""
if not dir:
dir = self.fs.getcwd() # depends on [control=['if'], data=[]]
if self == dir:
return '.' # depends on [control=['if'], data=[]]
path_elems = self.get_path_elements()
pathname = ''
try:
i = path_elems.index(dir) # depends on [control=['try'], data=[]]
except ValueError:
for p in path_elems[:-1]:
pathname += p.dirname # depends on [control=['for'], data=['p']] # depends on [control=['except'], data=[]]
else:
for p in path_elems[i + 1:-1]:
pathname += p.dirname # depends on [control=['for'], data=['p']]
return pathname + path_elems[-1].name |
def get_edxml(self):
"""stub"""
if self.has_raw_edxml():
has_python = False
my_files = self.my_osid_object.object_map['fileIds']
raw_text = self.get_text('edxml').text
soup = BeautifulSoup(raw_text, 'xml')
# replace all file listings with an appropriate path...
attrs = {
'draggable': 'icon',
'drag_and_drop_input': 'img',
'files': 'included_files',
'img': 'src'
}
local_regex = re.compile('[^http]')
for key, attr in attrs.items():
search = {attr: local_regex}
tags = soup.find_all(**search)
for item in tags:
if key == 'files' or item.name == key:
file_label = self._clean(item[attr])
if file_label in my_files:
content_type = Id(my_files[file_label]['assetContentTypeId'])
item[attr] = '/static/' + file_label + '.' + \
content_type.get_identifier()
# replace any python script with the item's get_text('python_script')
# text...will fix weird whitespace issues
if len(soup.find_all('script')) >= 1:
scripts = soup.find_all('script')
for script in scripts:
if 'python' in script['type']:
has_python = True
# contents = script.contents[0]
# contents.replaceWith(str(NavigableString(self.python)))
break
try:
if has_python:
return str(soup.find('problem'))
else:
return soup.find('problem').prettify()
except Exception:
# if the edxml is not valid XML, it will not parse properly in soup
# return just the raw edxml
return self.get_text('edxml').text
else:
# have to construct the edxml from various components
obj_map = self.my_osid_object.object_map
question = obj_map['question']
answers = obj_map['answers']
if 'edx-multi-choice-problem-type' in obj_map['genusTypeId']:
# get answer Ids to compare them to the choices
answer_ids = []
for answer in answers:
answer_ids += answer['choiceIds']
# add the body text element (item.question.text)
soup = BeautifulSoup('<problem></problem>', 'xml')
p = soup.new_tag('p')
p.string = self.get_text('questionString').text
problem = soup.find('problem')
problem.append(p)
# add the metadata
problem['display_name'] = question['displayName']['text']
problem['showanswer'] = self.showanswer
if 'rerandomize' in obj_map:
problem['rerandomize'] = obj_map['rerandomize']
elif 'rerandomize' in question:
problem['rerandomize'] = question['rerandomize']
problem['max_attempts'] = self.attempts
# add the choices
multichoice = soup.new_tag('multiplechoiceresponse')
problem.append(multichoice)
choicegroup = soup.new_tag('choicegroup')
choicegroup['direction'] = 'vertical'
multichoice.append(choicegroup)
choices = question['choices']
for choice in choices:
new_choice = soup.new_tag('choice')
# mark the correct choice(s)
if choice['id'] in answer_ids:
new_choice['correct'] = 'true'
else:
new_choice['correct'] = 'false'
new_choice['name'] = choice['name']
choice_text = soup.new_tag('text')
choice_text.string = choice['text']
new_choice.append(choice_text)
choicegroup.append(new_choice)
return problem.prettify()
raise IllegalState('records.assessment.edx.item_records.get_edxml()') | def function[get_edxml, parameter[self]]:
constant[stub]
if call[name[self].has_raw_edxml, parameter[]] begin[:]
variable[has_python] assign[=] constant[False]
variable[my_files] assign[=] call[name[self].my_osid_object.object_map][constant[fileIds]]
variable[raw_text] assign[=] call[name[self].get_text, parameter[constant[edxml]]].text
variable[soup] assign[=] call[name[BeautifulSoup], parameter[name[raw_text], constant[xml]]]
variable[attrs] assign[=] dictionary[[<ast.Constant object at 0x7da1b26aef50>, <ast.Constant object at 0x7da1b26ae980>, <ast.Constant object at 0x7da1b26acb50>, <ast.Constant object at 0x7da1b26af730>], [<ast.Constant object at 0x7da1b26af820>, <ast.Constant object at 0x7da1b26ac3a0>, <ast.Constant object at 0x7da1b26afe50>, <ast.Constant object at 0x7da1b26ace80>]]
variable[local_regex] assign[=] call[name[re].compile, parameter[constant[[^http]]]]
for taget[tuple[[<ast.Name object at 0x7da1b26ae5c0>, <ast.Name object at 0x7da1b26ace20>]]] in starred[call[name[attrs].items, parameter[]]] begin[:]
variable[search] assign[=] dictionary[[<ast.Name object at 0x7da1b26ac9a0>], [<ast.Name object at 0x7da1b26af130>]]
variable[tags] assign[=] call[name[soup].find_all, parameter[]]
for taget[name[item]] in starred[name[tags]] begin[:]
if <ast.BoolOp object at 0x7da1b26ae560> begin[:]
variable[file_label] assign[=] call[name[self]._clean, parameter[call[name[item]][name[attr]]]]
if compare[name[file_label] in name[my_files]] begin[:]
variable[content_type] assign[=] call[name[Id], parameter[call[call[name[my_files]][name[file_label]]][constant[assetContentTypeId]]]]
call[name[item]][name[attr]] assign[=] binary_operation[binary_operation[binary_operation[constant[/static/] + name[file_label]] + constant[.]] + call[name[content_type].get_identifier, parameter[]]]
if compare[call[name[len], parameter[call[name[soup].find_all, parameter[constant[script]]]]] greater_or_equal[>=] constant[1]] begin[:]
variable[scripts] assign[=] call[name[soup].find_all, parameter[constant[script]]]
for taget[name[script]] in starred[name[scripts]] begin[:]
if compare[constant[python] in call[name[script]][constant[type]]] begin[:]
variable[has_python] assign[=] constant[True]
break
<ast.Try object at 0x7da1b26aca30>
<ast.Raise object at 0x7da1b09165f0> | keyword[def] identifier[get_edxml] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[has_raw_edxml] ():
identifier[has_python] = keyword[False]
identifier[my_files] = identifier[self] . identifier[my_osid_object] . identifier[object_map] [ literal[string] ]
identifier[raw_text] = identifier[self] . identifier[get_text] ( literal[string] ). identifier[text]
identifier[soup] = identifier[BeautifulSoup] ( identifier[raw_text] , literal[string] )
identifier[attrs] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string]
}
identifier[local_regex] = identifier[re] . identifier[compile] ( literal[string] )
keyword[for] identifier[key] , identifier[attr] keyword[in] identifier[attrs] . identifier[items] ():
identifier[search] ={ identifier[attr] : identifier[local_regex] }
identifier[tags] = identifier[soup] . identifier[find_all] (** identifier[search] )
keyword[for] identifier[item] keyword[in] identifier[tags] :
keyword[if] identifier[key] == literal[string] keyword[or] identifier[item] . identifier[name] == identifier[key] :
identifier[file_label] = identifier[self] . identifier[_clean] ( identifier[item] [ identifier[attr] ])
keyword[if] identifier[file_label] keyword[in] identifier[my_files] :
identifier[content_type] = identifier[Id] ( identifier[my_files] [ identifier[file_label] ][ literal[string] ])
identifier[item] [ identifier[attr] ]= literal[string] + identifier[file_label] + literal[string] + identifier[content_type] . identifier[get_identifier] ()
keyword[if] identifier[len] ( identifier[soup] . identifier[find_all] ( literal[string] ))>= literal[int] :
identifier[scripts] = identifier[soup] . identifier[find_all] ( literal[string] )
keyword[for] identifier[script] keyword[in] identifier[scripts] :
keyword[if] literal[string] keyword[in] identifier[script] [ literal[string] ]:
identifier[has_python] = keyword[True]
keyword[break]
keyword[try] :
keyword[if] identifier[has_python] :
keyword[return] identifier[str] ( identifier[soup] . identifier[find] ( literal[string] ))
keyword[else] :
keyword[return] identifier[soup] . identifier[find] ( literal[string] ). identifier[prettify] ()
keyword[except] identifier[Exception] :
keyword[return] identifier[self] . identifier[get_text] ( literal[string] ). identifier[text]
keyword[else] :
identifier[obj_map] = identifier[self] . identifier[my_osid_object] . identifier[object_map]
identifier[question] = identifier[obj_map] [ literal[string] ]
identifier[answers] = identifier[obj_map] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[obj_map] [ literal[string] ]:
identifier[answer_ids] =[]
keyword[for] identifier[answer] keyword[in] identifier[answers] :
identifier[answer_ids] += identifier[answer] [ literal[string] ]
identifier[soup] = identifier[BeautifulSoup] ( literal[string] , literal[string] )
identifier[p] = identifier[soup] . identifier[new_tag] ( literal[string] )
identifier[p] . identifier[string] = identifier[self] . identifier[get_text] ( literal[string] ). identifier[text]
identifier[problem] = identifier[soup] . identifier[find] ( literal[string] )
identifier[problem] . identifier[append] ( identifier[p] )
identifier[problem] [ literal[string] ]= identifier[question] [ literal[string] ][ literal[string] ]
identifier[problem] [ literal[string] ]= identifier[self] . identifier[showanswer]
keyword[if] literal[string] keyword[in] identifier[obj_map] :
identifier[problem] [ literal[string] ]= identifier[obj_map] [ literal[string] ]
keyword[elif] literal[string] keyword[in] identifier[question] :
identifier[problem] [ literal[string] ]= identifier[question] [ literal[string] ]
identifier[problem] [ literal[string] ]= identifier[self] . identifier[attempts]
identifier[multichoice] = identifier[soup] . identifier[new_tag] ( literal[string] )
identifier[problem] . identifier[append] ( identifier[multichoice] )
identifier[choicegroup] = identifier[soup] . identifier[new_tag] ( literal[string] )
identifier[choicegroup] [ literal[string] ]= literal[string]
identifier[multichoice] . identifier[append] ( identifier[choicegroup] )
identifier[choices] = identifier[question] [ literal[string] ]
keyword[for] identifier[choice] keyword[in] identifier[choices] :
identifier[new_choice] = identifier[soup] . identifier[new_tag] ( literal[string] )
keyword[if] identifier[choice] [ literal[string] ] keyword[in] identifier[answer_ids] :
identifier[new_choice] [ literal[string] ]= literal[string]
keyword[else] :
identifier[new_choice] [ literal[string] ]= literal[string]
identifier[new_choice] [ literal[string] ]= identifier[choice] [ literal[string] ]
identifier[choice_text] = identifier[soup] . identifier[new_tag] ( literal[string] )
identifier[choice_text] . identifier[string] = identifier[choice] [ literal[string] ]
identifier[new_choice] . identifier[append] ( identifier[choice_text] )
identifier[choicegroup] . identifier[append] ( identifier[new_choice] )
keyword[return] identifier[problem] . identifier[prettify] ()
keyword[raise] identifier[IllegalState] ( literal[string] ) | def get_edxml(self):
"""stub"""
if self.has_raw_edxml():
has_python = False
my_files = self.my_osid_object.object_map['fileIds']
raw_text = self.get_text('edxml').text
soup = BeautifulSoup(raw_text, 'xml')
# replace all file listings with an appropriate path...
attrs = {'draggable': 'icon', 'drag_and_drop_input': 'img', 'files': 'included_files', 'img': 'src'}
local_regex = re.compile('[^http]')
for (key, attr) in attrs.items():
search = {attr: local_regex}
tags = soup.find_all(**search)
for item in tags:
if key == 'files' or item.name == key:
file_label = self._clean(item[attr])
if file_label in my_files:
content_type = Id(my_files[file_label]['assetContentTypeId'])
item[attr] = '/static/' + file_label + '.' + content_type.get_identifier() # depends on [control=['if'], data=['file_label', 'my_files']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']] # depends on [control=['for'], data=[]]
# replace any python script with the item's get_text('python_script')
# text...will fix weird whitespace issues
if len(soup.find_all('script')) >= 1:
scripts = soup.find_all('script')
for script in scripts:
if 'python' in script['type']:
has_python = True
# contents = script.contents[0]
# contents.replaceWith(str(NavigableString(self.python)))
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['script']] # depends on [control=['if'], data=[]]
try:
if has_python:
return str(soup.find('problem')) # depends on [control=['if'], data=[]]
else:
return soup.find('problem').prettify() # depends on [control=['try'], data=[]]
except Exception:
# if the edxml is not valid XML, it will not parse properly in soup
# return just the raw edxml
return self.get_text('edxml').text # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
# have to construct the edxml from various components
obj_map = self.my_osid_object.object_map
question = obj_map['question']
answers = obj_map['answers']
if 'edx-multi-choice-problem-type' in obj_map['genusTypeId']:
# get answer Ids to compare them to the choices
answer_ids = []
for answer in answers:
answer_ids += answer['choiceIds'] # depends on [control=['for'], data=['answer']]
# add the body text element (item.question.text)
soup = BeautifulSoup('<problem></problem>', 'xml')
p = soup.new_tag('p')
p.string = self.get_text('questionString').text
problem = soup.find('problem')
problem.append(p)
# add the metadata
problem['display_name'] = question['displayName']['text']
problem['showanswer'] = self.showanswer
if 'rerandomize' in obj_map:
problem['rerandomize'] = obj_map['rerandomize'] # depends on [control=['if'], data=['obj_map']]
elif 'rerandomize' in question:
problem['rerandomize'] = question['rerandomize'] # depends on [control=['if'], data=['question']]
problem['max_attempts'] = self.attempts
# add the choices
multichoice = soup.new_tag('multiplechoiceresponse')
problem.append(multichoice)
choicegroup = soup.new_tag('choicegroup')
choicegroup['direction'] = 'vertical'
multichoice.append(choicegroup)
choices = question['choices']
for choice in choices:
new_choice = soup.new_tag('choice')
# mark the correct choice(s)
if choice['id'] in answer_ids:
new_choice['correct'] = 'true' # depends on [control=['if'], data=[]]
else:
new_choice['correct'] = 'false'
new_choice['name'] = choice['name']
choice_text = soup.new_tag('text')
choice_text.string = choice['text']
new_choice.append(choice_text)
choicegroup.append(new_choice) # depends on [control=['for'], data=['choice']]
return problem.prettify() # depends on [control=['if'], data=[]]
raise IllegalState('records.assessment.edx.item_records.get_edxml()') |
def get_kubernetes_configuration(self, mount_point='kubernetes'):
"""GET /auth/<mount_point>/config
:param mount_point: The "path" the k8s auth backend was mounted on. Vault currently defaults to "kubernetes".
:type mount_point: str.
:return: Parsed JSON response from the config GET request
:rtype: dict.
"""
url = '/v1/auth/{0}/config'.format(mount_point)
return self._adapter.get(url).json() | def function[get_kubernetes_configuration, parameter[self, mount_point]]:
constant[GET /auth/<mount_point>/config
:param mount_point: The "path" the k8s auth backend was mounted on. Vault currently defaults to "kubernetes".
:type mount_point: str.
:return: Parsed JSON response from the config GET request
:rtype: dict.
]
variable[url] assign[=] call[constant[/v1/auth/{0}/config].format, parameter[name[mount_point]]]
return[call[call[name[self]._adapter.get, parameter[name[url]]].json, parameter[]]] | keyword[def] identifier[get_kubernetes_configuration] ( identifier[self] , identifier[mount_point] = literal[string] ):
literal[string]
identifier[url] = literal[string] . identifier[format] ( identifier[mount_point] )
keyword[return] identifier[self] . identifier[_adapter] . identifier[get] ( identifier[url] ). identifier[json] () | def get_kubernetes_configuration(self, mount_point='kubernetes'):
"""GET /auth/<mount_point>/config
:param mount_point: The "path" the k8s auth backend was mounted on. Vault currently defaults to "kubernetes".
:type mount_point: str.
:return: Parsed JSON response from the config GET request
:rtype: dict.
"""
url = '/v1/auth/{0}/config'.format(mount_point)
return self._adapter.get(url).json() |
def load_acknowledge_config(self, file_id):
"""
Loads the CWR acknowledge config
:return: the values matrix
"""
if self._cwr_defaults is None:
self._cwr_defaults = self._reader.read_yaml_file(
'acknowledge_config_%s.yml' % file_id)
return self._cwr_defaults | def function[load_acknowledge_config, parameter[self, file_id]]:
constant[
Loads the CWR acknowledge config
:return: the values matrix
]
if compare[name[self]._cwr_defaults is constant[None]] begin[:]
name[self]._cwr_defaults assign[=] call[name[self]._reader.read_yaml_file, parameter[binary_operation[constant[acknowledge_config_%s.yml] <ast.Mod object at 0x7da2590d6920> name[file_id]]]]
return[name[self]._cwr_defaults] | keyword[def] identifier[load_acknowledge_config] ( identifier[self] , identifier[file_id] ):
literal[string]
keyword[if] identifier[self] . identifier[_cwr_defaults] keyword[is] keyword[None] :
identifier[self] . identifier[_cwr_defaults] = identifier[self] . identifier[_reader] . identifier[read_yaml_file] (
literal[string] % identifier[file_id] )
keyword[return] identifier[self] . identifier[_cwr_defaults] | def load_acknowledge_config(self, file_id):
"""
Loads the CWR acknowledge config
:return: the values matrix
"""
if self._cwr_defaults is None:
self._cwr_defaults = self._reader.read_yaml_file('acknowledge_config_%s.yml' % file_id) # depends on [control=['if'], data=[]]
return self._cwr_defaults |
def load(self, profile_args):
"""Load provided CLI Args.
Args:
args (dict): Dictionary of args in key/value format.
"""
for key, value in profile_args.items():
self.add(key, value) | def function[load, parameter[self, profile_args]]:
constant[Load provided CLI Args.
Args:
args (dict): Dictionary of args in key/value format.
]
for taget[tuple[[<ast.Name object at 0x7da2041d9900>, <ast.Name object at 0x7da2041d89d0>]]] in starred[call[name[profile_args].items, parameter[]]] begin[:]
call[name[self].add, parameter[name[key], name[value]]] | keyword[def] identifier[load] ( identifier[self] , identifier[profile_args] ):
literal[string]
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[profile_args] . identifier[items] ():
identifier[self] . identifier[add] ( identifier[key] , identifier[value] ) | def load(self, profile_args):
"""Load provided CLI Args.
Args:
args (dict): Dictionary of args in key/value format.
"""
for (key, value) in profile_args.items():
self.add(key, value) # depends on [control=['for'], data=[]] |
def em_rates_from_E_DA_mix(em_rates_tot, E_values):
"""D and A emission rates for two populations.
"""
em_rates_d, em_rates_a = [], []
for em_rate_tot, E_value in zip(em_rates_tot, E_values):
em_rate_di, em_rate_ai = em_rates_from_E_DA(em_rate_tot, E_value)
em_rates_d.append(em_rate_di)
em_rates_a.append(em_rate_ai)
return em_rates_d, em_rates_a | def function[em_rates_from_E_DA_mix, parameter[em_rates_tot, E_values]]:
constant[D and A emission rates for two populations.
]
<ast.Tuple object at 0x7da204347340> assign[=] tuple[[<ast.List object at 0x7da204347f40>, <ast.List object at 0x7da2043457b0>]]
for taget[tuple[[<ast.Name object at 0x7da2041db640>, <ast.Name object at 0x7da2041d9c90>]]] in starred[call[name[zip], parameter[name[em_rates_tot], name[E_values]]]] begin[:]
<ast.Tuple object at 0x7da2041d9000> assign[=] call[name[em_rates_from_E_DA], parameter[name[em_rate_tot], name[E_value]]]
call[name[em_rates_d].append, parameter[name[em_rate_di]]]
call[name[em_rates_a].append, parameter[name[em_rate_ai]]]
return[tuple[[<ast.Name object at 0x7da2041db730>, <ast.Name object at 0x7da2041d9270>]]] | keyword[def] identifier[em_rates_from_E_DA_mix] ( identifier[em_rates_tot] , identifier[E_values] ):
literal[string]
identifier[em_rates_d] , identifier[em_rates_a] =[],[]
keyword[for] identifier[em_rate_tot] , identifier[E_value] keyword[in] identifier[zip] ( identifier[em_rates_tot] , identifier[E_values] ):
identifier[em_rate_di] , identifier[em_rate_ai] = identifier[em_rates_from_E_DA] ( identifier[em_rate_tot] , identifier[E_value] )
identifier[em_rates_d] . identifier[append] ( identifier[em_rate_di] )
identifier[em_rates_a] . identifier[append] ( identifier[em_rate_ai] )
keyword[return] identifier[em_rates_d] , identifier[em_rates_a] | def em_rates_from_E_DA_mix(em_rates_tot, E_values):
"""D and A emission rates for two populations.
"""
(em_rates_d, em_rates_a) = ([], [])
for (em_rate_tot, E_value) in zip(em_rates_tot, E_values):
(em_rate_di, em_rate_ai) = em_rates_from_E_DA(em_rate_tot, E_value)
em_rates_d.append(em_rate_di)
em_rates_a.append(em_rate_ai) # depends on [control=['for'], data=[]]
return (em_rates_d, em_rates_a) |
def scan_mem(self, data_to_find):
"""
Scan for concrete bytes in all mapped memory. Successively yield addresses of all matches.
:param bytes data_to_find: String to locate
:return:
"""
# TODO: for the moment we just treat symbolic bytes as bytes that don't match.
# for our simple test cases right now, the bytes we're interested in scanning
# for will all just be there concretely
# TODO: Can probably do something smarter here like Boyer-Moore, but unnecessary
# if we're looking for short strings.
# Querying mem with an index returns [bytes]
if isinstance(data_to_find, bytes):
data_to_find = [bytes([c]) for c in data_to_find]
for mapping in sorted(self.maps):
for ptr in mapping:
if ptr + len(data_to_find) >= mapping.end:
break
candidate = mapping[ptr:ptr + len(data_to_find)]
# TODO: treat symbolic bytes as bytes that don't match. for our simple tests right now, the
# bytes will be there concretely
if issymbolic(candidate[0]):
break
if candidate == data_to_find:
yield ptr | def function[scan_mem, parameter[self, data_to_find]]:
constant[
Scan for concrete bytes in all mapped memory. Successively yield addresses of all matches.
:param bytes data_to_find: String to locate
:return:
]
if call[name[isinstance], parameter[name[data_to_find], name[bytes]]] begin[:]
variable[data_to_find] assign[=] <ast.ListComp object at 0x7da1b000eb00>
for taget[name[mapping]] in starred[call[name[sorted], parameter[name[self].maps]]] begin[:]
for taget[name[ptr]] in starred[name[mapping]] begin[:]
if compare[binary_operation[name[ptr] + call[name[len], parameter[name[data_to_find]]]] greater_or_equal[>=] name[mapping].end] begin[:]
break
variable[candidate] assign[=] call[name[mapping]][<ast.Slice object at 0x7da1b000f400>]
if call[name[issymbolic], parameter[call[name[candidate]][constant[0]]]] begin[:]
break
if compare[name[candidate] equal[==] name[data_to_find]] begin[:]
<ast.Yield object at 0x7da1b000f640> | keyword[def] identifier[scan_mem] ( identifier[self] , identifier[data_to_find] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[data_to_find] , identifier[bytes] ):
identifier[data_to_find] =[ identifier[bytes] ([ identifier[c] ]) keyword[for] identifier[c] keyword[in] identifier[data_to_find] ]
keyword[for] identifier[mapping] keyword[in] identifier[sorted] ( identifier[self] . identifier[maps] ):
keyword[for] identifier[ptr] keyword[in] identifier[mapping] :
keyword[if] identifier[ptr] + identifier[len] ( identifier[data_to_find] )>= identifier[mapping] . identifier[end] :
keyword[break]
identifier[candidate] = identifier[mapping] [ identifier[ptr] : identifier[ptr] + identifier[len] ( identifier[data_to_find] )]
keyword[if] identifier[issymbolic] ( identifier[candidate] [ literal[int] ]):
keyword[break]
keyword[if] identifier[candidate] == identifier[data_to_find] :
keyword[yield] identifier[ptr] | def scan_mem(self, data_to_find):
"""
Scan for concrete bytes in all mapped memory. Successively yield addresses of all matches.
:param bytes data_to_find: String to locate
:return:
"""
# TODO: for the moment we just treat symbolic bytes as bytes that don't match.
# for our simple test cases right now, the bytes we're interested in scanning
# for will all just be there concretely
# TODO: Can probably do something smarter here like Boyer-Moore, but unnecessary
# if we're looking for short strings.
# Querying mem with an index returns [bytes]
if isinstance(data_to_find, bytes):
data_to_find = [bytes([c]) for c in data_to_find] # depends on [control=['if'], data=[]]
for mapping in sorted(self.maps):
for ptr in mapping:
if ptr + len(data_to_find) >= mapping.end:
break # depends on [control=['if'], data=[]]
candidate = mapping[ptr:ptr + len(data_to_find)]
# TODO: treat symbolic bytes as bytes that don't match. for our simple tests right now, the
# bytes will be there concretely
if issymbolic(candidate[0]):
break # depends on [control=['if'], data=[]]
if candidate == data_to_find:
yield ptr # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['ptr']] # depends on [control=['for'], data=['mapping']] |
def _try_parse_datetime(time_str, fmts):
'''
A helper function that attempts to parse the input time_str as a date.
Args:
time_str (str): A string representing the time
fmts (list): A list of date format strings
Returns:
datetime: Returns a datetime object if parsed properly, otherwise None
'''
result = None
for fmt in fmts:
try:
result = datetime.strptime(time_str, fmt)
break
except ValueError:
pass
return result | def function[_try_parse_datetime, parameter[time_str, fmts]]:
constant[
A helper function that attempts to parse the input time_str as a date.
Args:
time_str (str): A string representing the time
fmts (list): A list of date format strings
Returns:
datetime: Returns a datetime object if parsed properly, otherwise None
]
variable[result] assign[=] constant[None]
for taget[name[fmt]] in starred[name[fmts]] begin[:]
<ast.Try object at 0x7da18bc73e50>
return[name[result]] | keyword[def] identifier[_try_parse_datetime] ( identifier[time_str] , identifier[fmts] ):
literal[string]
identifier[result] = keyword[None]
keyword[for] identifier[fmt] keyword[in] identifier[fmts] :
keyword[try] :
identifier[result] = identifier[datetime] . identifier[strptime] ( identifier[time_str] , identifier[fmt] )
keyword[break]
keyword[except] identifier[ValueError] :
keyword[pass]
keyword[return] identifier[result] | def _try_parse_datetime(time_str, fmts):
"""
A helper function that attempts to parse the input time_str as a date.
Args:
time_str (str): A string representing the time
fmts (list): A list of date format strings
Returns:
datetime: Returns a datetime object if parsed properly, otherwise None
"""
result = None
for fmt in fmts:
try:
result = datetime.strptime(time_str, fmt)
break # depends on [control=['try'], data=[]]
except ValueError:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['fmt']]
return result |
def save_image(xdata: DataAndMetadata.DataAndMetadata, file):
"""
Saves the nparray data to the file-like object (or string) file.
"""
# we need to create a basic DM tree suitable for an image
# we'll try the minimum: just an data list
# doesn't work. Do we need a ImageSourceList too?
# and a DocumentObjectList?
data = xdata.data
data_descriptor = xdata.data_descriptor
dimensional_calibrations = xdata.dimensional_calibrations
intensity_calibration = xdata.intensity_calibration
metadata = xdata.metadata
modified = xdata.timestamp
timezone = xdata.timezone
timezone_offset = xdata.timezone_offset
needs_slice = False
is_sequence = False
if len(data.shape) == 3 and data.dtype != numpy.uint8 and data_descriptor.datum_dimension_count == 1:
data = numpy.moveaxis(data, 2, 0)
dimensional_calibrations = (dimensional_calibrations[2],) + tuple(dimensional_calibrations[0:2])
if len(data.shape) == 2 and data.dtype != numpy.uint8 and data_descriptor.datum_dimension_count == 1:
is_sequence = data_descriptor.is_sequence
data = numpy.moveaxis(data, 1, 0)
data = numpy.expand_dims(data, axis=1)
dimensional_calibrations = (dimensional_calibrations[1], Calibration.Calibration(), dimensional_calibrations[0])
data_descriptor = DataAndMetadata.DataDescriptor(False, 2, 1)
needs_slice = True
data_dict = ndarray_to_imagedatadict(data)
ret = {}
ret["ImageList"] = [{"ImageData": data_dict}]
if dimensional_calibrations and len(dimensional_calibrations) == len(data.shape):
dimension_list = data_dict.setdefault("Calibrations", dict()).setdefault("Dimension", list())
for dimensional_calibration in reversed(dimensional_calibrations):
dimension = dict()
if dimensional_calibration.scale != 0.0:
origin = -dimensional_calibration.offset / dimensional_calibration.scale
else:
origin = 0.0
dimension['Origin'] = origin
dimension['Scale'] = dimensional_calibration.scale
dimension['Units'] = dimensional_calibration.units
dimension_list.append(dimension)
if intensity_calibration:
if intensity_calibration.scale != 0.0:
origin = -intensity_calibration.offset / intensity_calibration.scale
else:
origin = 0.0
brightness = data_dict.setdefault("Calibrations", dict()).setdefault("Brightness", dict())
brightness['Origin'] = origin
brightness['Scale'] = intensity_calibration.scale
brightness['Units'] = intensity_calibration.units
if modified:
timezone_str = None
if timezone_str is None and timezone:
try:
import pytz
tz = pytz.timezone(timezone)
timezone_str = tz.tzname(modified)
except:
pass
if timezone_str is None and timezone_offset:
timezone_str = timezone_offset
timezone_str = " " + timezone_str if timezone_str is not None else ""
date_str = modified.strftime("%x")
time_str = modified.strftime("%X") + timezone_str
ret["DataBar"] = {"Acquisition Date": date_str, "Acquisition Time": time_str}
# I think ImageSource list creates a mapping between ImageSourceIds and Images
ret["ImageSourceList"] = [{"ClassName": "ImageSource:Simple", "Id": [0], "ImageRef": 0}]
# I think this lists the sources for the DocumentObjectlist. The source number is not
# the indxe in the imagelist but is either the index in the ImageSourceList or the Id
# from that list. We also need to set the annotation type to identify it as an data
ret["DocumentObjectList"] = [{"ImageSource": 0, "AnnotationType": 20}]
# finally some display options
ret["Image Behavior"] = {"ViewDisplayID": 8}
dm_metadata = copy.deepcopy(metadata)
if metadata.get("hardware_source", dict()).get("signal_type", "").lower() == "eels":
if len(data.shape) == 1 or (len(data.shape) == 2 and data.shape[0] == 1):
dm_metadata.setdefault("Meta Data", dict())["Format"] = "Spectrum"
dm_metadata.setdefault("Meta Data", dict())["Signal"] = "EELS"
elif data_descriptor.collection_dimension_count == 2 and data_descriptor.datum_dimension_count == 1:
dm_metadata.setdefault("Meta Data", dict())["Format"] = "Spectrum image"
dm_metadata.setdefault("Meta Data", dict())["Signal"] = "EELS"
elif data_descriptor.datum_dimension_count == 1:
dm_metadata.setdefault("Meta Data", dict())["Format"] = "Spectrum"
if (1 if data_descriptor.is_sequence else 0) + data_descriptor.collection_dimension_count == 1 or needs_slice:
if data_descriptor.is_sequence or is_sequence:
dm_metadata.setdefault("Meta Data", dict())["IsSequence"] = True
ret["ImageSourceList"] = [{"ClassName": "ImageSource:Summed", "Do Sum": True, "Id": [0], "ImageRef": 0, "LayerEnd": 0, "LayerStart": 0, "Summed Dimension": len(data.shape) - 1}]
if needs_slice:
ret["DocumentObjectList"][0]["AnnotationGroupList"] = [{"AnnotationType": 23, "Name": "SICursor", "Rectangle": (0, 0, 1, 1)}]
ret["DocumentObjectList"][0]["ImageDisplayType"] = 1 # display as an image
if modified:
dm_metadata["Timestamp"] = modified.isoformat()
if timezone:
dm_metadata["Timezone"] = timezone
if timezone_offset:
dm_metadata["TimezoneOffset"] = timezone_offset
ret["ImageList"][0]["ImageTags"] = dm_metadata
ret["InImageMode"] = True
parse_dm3.parse_dm_header(file, ret) | def function[save_image, parameter[xdata, file]]:
constant[
Saves the nparray data to the file-like object (or string) file.
]
variable[data] assign[=] name[xdata].data
variable[data_descriptor] assign[=] name[xdata].data_descriptor
variable[dimensional_calibrations] assign[=] name[xdata].dimensional_calibrations
variable[intensity_calibration] assign[=] name[xdata].intensity_calibration
variable[metadata] assign[=] name[xdata].metadata
variable[modified] assign[=] name[xdata].timestamp
variable[timezone] assign[=] name[xdata].timezone
variable[timezone_offset] assign[=] name[xdata].timezone_offset
variable[needs_slice] assign[=] constant[False]
variable[is_sequence] assign[=] constant[False]
if <ast.BoolOp object at 0x7da20e9b19f0> begin[:]
variable[data] assign[=] call[name[numpy].moveaxis, parameter[name[data], constant[2], constant[0]]]
variable[dimensional_calibrations] assign[=] binary_operation[tuple[[<ast.Subscript object at 0x7da20e9b2b90>]] + call[name[tuple], parameter[call[name[dimensional_calibrations]][<ast.Slice object at 0x7da20e9b0eb0>]]]]
if <ast.BoolOp object at 0x7da20e9b2ef0> begin[:]
variable[is_sequence] assign[=] name[data_descriptor].is_sequence
variable[data] assign[=] call[name[numpy].moveaxis, parameter[name[data], constant[1], constant[0]]]
variable[data] assign[=] call[name[numpy].expand_dims, parameter[name[data]]]
variable[dimensional_calibrations] assign[=] tuple[[<ast.Subscript object at 0x7da20e9b23b0>, <ast.Call object at 0x7da20e9b1420>, <ast.Subscript object at 0x7da20e9b3760>]]
variable[data_descriptor] assign[=] call[name[DataAndMetadata].DataDescriptor, parameter[constant[False], constant[2], constant[1]]]
variable[needs_slice] assign[=] constant[True]
variable[data_dict] assign[=] call[name[ndarray_to_imagedatadict], parameter[name[data]]]
variable[ret] assign[=] dictionary[[], []]
call[name[ret]][constant[ImageList]] assign[=] list[[<ast.Dict object at 0x7da1b1712ef0>]]
if <ast.BoolOp object at 0x7da1b1712590> begin[:]
variable[dimension_list] assign[=] call[call[name[data_dict].setdefault, parameter[constant[Calibrations], call[name[dict], parameter[]]]].setdefault, parameter[constant[Dimension], call[name[list], parameter[]]]]
for taget[name[dimensional_calibration]] in starred[call[name[reversed], parameter[name[dimensional_calibrations]]]] begin[:]
variable[dimension] assign[=] call[name[dict], parameter[]]
if compare[name[dimensional_calibration].scale not_equal[!=] constant[0.0]] begin[:]
variable[origin] assign[=] binary_operation[<ast.UnaryOp object at 0x7da1b1710f40> / name[dimensional_calibration].scale]
call[name[dimension]][constant[Origin]] assign[=] name[origin]
call[name[dimension]][constant[Scale]] assign[=] name[dimensional_calibration].scale
call[name[dimension]][constant[Units]] assign[=] name[dimensional_calibration].units
call[name[dimension_list].append, parameter[name[dimension]]]
if name[intensity_calibration] begin[:]
if compare[name[intensity_calibration].scale not_equal[!=] constant[0.0]] begin[:]
variable[origin] assign[=] binary_operation[<ast.UnaryOp object at 0x7da1b1713370> / name[intensity_calibration].scale]
variable[brightness] assign[=] call[call[name[data_dict].setdefault, parameter[constant[Calibrations], call[name[dict], parameter[]]]].setdefault, parameter[constant[Brightness], call[name[dict], parameter[]]]]
call[name[brightness]][constant[Origin]] assign[=] name[origin]
call[name[brightness]][constant[Scale]] assign[=] name[intensity_calibration].scale
call[name[brightness]][constant[Units]] assign[=] name[intensity_calibration].units
if name[modified] begin[:]
variable[timezone_str] assign[=] constant[None]
if <ast.BoolOp object at 0x7da1b17102e0> begin[:]
<ast.Try object at 0x7da1b17106d0>
if <ast.BoolOp object at 0x7da1b1713fd0> begin[:]
variable[timezone_str] assign[=] name[timezone_offset]
variable[timezone_str] assign[=] <ast.IfExp object at 0x7da1b1710d00>
variable[date_str] assign[=] call[name[modified].strftime, parameter[constant[%x]]]
variable[time_str] assign[=] binary_operation[call[name[modified].strftime, parameter[constant[%X]]] + name[timezone_str]]
call[name[ret]][constant[DataBar]] assign[=] dictionary[[<ast.Constant object at 0x7da1b1712260>, <ast.Constant object at 0x7da1b1712890>], [<ast.Name object at 0x7da1b1713940>, <ast.Name object at 0x7da1b1713d30>]]
call[name[ret]][constant[ImageSourceList]] assign[=] list[[<ast.Dict object at 0x7da1b17100a0>]]
call[name[ret]][constant[DocumentObjectList]] assign[=] list[[<ast.Dict object at 0x7da1b1712e30>]]
call[name[ret]][constant[Image Behavior]] assign[=] dictionary[[<ast.Constant object at 0x7da1b1712920>], [<ast.Constant object at 0x7da1b1712bf0>]]
variable[dm_metadata] assign[=] call[name[copy].deepcopy, parameter[name[metadata]]]
if compare[call[call[call[name[metadata].get, parameter[constant[hardware_source], call[name[dict], parameter[]]]].get, parameter[constant[signal_type], constant[]]].lower, parameter[]] equal[==] constant[eels]] begin[:]
if <ast.BoolOp object at 0x7da1b1713070> begin[:]
call[call[name[dm_metadata].setdefault, parameter[constant[Meta Data], call[name[dict], parameter[]]]]][constant[Format]] assign[=] constant[Spectrum]
call[call[name[dm_metadata].setdefault, parameter[constant[Meta Data], call[name[dict], parameter[]]]]][constant[Signal]] assign[=] constant[EELS]
if <ast.BoolOp object at 0x7da1b19eebf0> begin[:]
if <ast.BoolOp object at 0x7da1b19ef7f0> begin[:]
call[call[name[dm_metadata].setdefault, parameter[constant[Meta Data], call[name[dict], parameter[]]]]][constant[IsSequence]] assign[=] constant[True]
call[name[ret]][constant[ImageSourceList]] assign[=] list[[<ast.Dict object at 0x7da1b19eee90>]]
if name[needs_slice] begin[:]
call[call[call[name[ret]][constant[DocumentObjectList]]][constant[0]]][constant[AnnotationGroupList]] assign[=] list[[<ast.Dict object at 0x7da1b19eca60>]]
call[call[call[name[ret]][constant[DocumentObjectList]]][constant[0]]][constant[ImageDisplayType]] assign[=] constant[1]
if name[modified] begin[:]
call[name[dm_metadata]][constant[Timestamp]] assign[=] call[name[modified].isoformat, parameter[]]
if name[timezone] begin[:]
call[name[dm_metadata]][constant[Timezone]] assign[=] name[timezone]
if name[timezone_offset] begin[:]
call[name[dm_metadata]][constant[TimezoneOffset]] assign[=] name[timezone_offset]
call[call[call[name[ret]][constant[ImageList]]][constant[0]]][constant[ImageTags]] assign[=] name[dm_metadata]
call[name[ret]][constant[InImageMode]] assign[=] constant[True]
call[name[parse_dm3].parse_dm_header, parameter[name[file], name[ret]]] | keyword[def] identifier[save_image] ( identifier[xdata] : identifier[DataAndMetadata] . identifier[DataAndMetadata] , identifier[file] ):
literal[string]
identifier[data] = identifier[xdata] . identifier[data]
identifier[data_descriptor] = identifier[xdata] . identifier[data_descriptor]
identifier[dimensional_calibrations] = identifier[xdata] . identifier[dimensional_calibrations]
identifier[intensity_calibration] = identifier[xdata] . identifier[intensity_calibration]
identifier[metadata] = identifier[xdata] . identifier[metadata]
identifier[modified] = identifier[xdata] . identifier[timestamp]
identifier[timezone] = identifier[xdata] . identifier[timezone]
identifier[timezone_offset] = identifier[xdata] . identifier[timezone_offset]
identifier[needs_slice] = keyword[False]
identifier[is_sequence] = keyword[False]
keyword[if] identifier[len] ( identifier[data] . identifier[shape] )== literal[int] keyword[and] identifier[data] . identifier[dtype] != identifier[numpy] . identifier[uint8] keyword[and] identifier[data_descriptor] . identifier[datum_dimension_count] == literal[int] :
identifier[data] = identifier[numpy] . identifier[moveaxis] ( identifier[data] , literal[int] , literal[int] )
identifier[dimensional_calibrations] =( identifier[dimensional_calibrations] [ literal[int] ],)+ identifier[tuple] ( identifier[dimensional_calibrations] [ literal[int] : literal[int] ])
keyword[if] identifier[len] ( identifier[data] . identifier[shape] )== literal[int] keyword[and] identifier[data] . identifier[dtype] != identifier[numpy] . identifier[uint8] keyword[and] identifier[data_descriptor] . identifier[datum_dimension_count] == literal[int] :
identifier[is_sequence] = identifier[data_descriptor] . identifier[is_sequence]
identifier[data] = identifier[numpy] . identifier[moveaxis] ( identifier[data] , literal[int] , literal[int] )
identifier[data] = identifier[numpy] . identifier[expand_dims] ( identifier[data] , identifier[axis] = literal[int] )
identifier[dimensional_calibrations] =( identifier[dimensional_calibrations] [ literal[int] ], identifier[Calibration] . identifier[Calibration] (), identifier[dimensional_calibrations] [ literal[int] ])
identifier[data_descriptor] = identifier[DataAndMetadata] . identifier[DataDescriptor] ( keyword[False] , literal[int] , literal[int] )
identifier[needs_slice] = keyword[True]
identifier[data_dict] = identifier[ndarray_to_imagedatadict] ( identifier[data] )
identifier[ret] ={}
identifier[ret] [ literal[string] ]=[{ literal[string] : identifier[data_dict] }]
keyword[if] identifier[dimensional_calibrations] keyword[and] identifier[len] ( identifier[dimensional_calibrations] )== identifier[len] ( identifier[data] . identifier[shape] ):
identifier[dimension_list] = identifier[data_dict] . identifier[setdefault] ( literal[string] , identifier[dict] ()). identifier[setdefault] ( literal[string] , identifier[list] ())
keyword[for] identifier[dimensional_calibration] keyword[in] identifier[reversed] ( identifier[dimensional_calibrations] ):
identifier[dimension] = identifier[dict] ()
keyword[if] identifier[dimensional_calibration] . identifier[scale] != literal[int] :
identifier[origin] =- identifier[dimensional_calibration] . identifier[offset] / identifier[dimensional_calibration] . identifier[scale]
keyword[else] :
identifier[origin] = literal[int]
identifier[dimension] [ literal[string] ]= identifier[origin]
identifier[dimension] [ literal[string] ]= identifier[dimensional_calibration] . identifier[scale]
identifier[dimension] [ literal[string] ]= identifier[dimensional_calibration] . identifier[units]
identifier[dimension_list] . identifier[append] ( identifier[dimension] )
keyword[if] identifier[intensity_calibration] :
keyword[if] identifier[intensity_calibration] . identifier[scale] != literal[int] :
identifier[origin] =- identifier[intensity_calibration] . identifier[offset] / identifier[intensity_calibration] . identifier[scale]
keyword[else] :
identifier[origin] = literal[int]
identifier[brightness] = identifier[data_dict] . identifier[setdefault] ( literal[string] , identifier[dict] ()). identifier[setdefault] ( literal[string] , identifier[dict] ())
identifier[brightness] [ literal[string] ]= identifier[origin]
identifier[brightness] [ literal[string] ]= identifier[intensity_calibration] . identifier[scale]
identifier[brightness] [ literal[string] ]= identifier[intensity_calibration] . identifier[units]
keyword[if] identifier[modified] :
identifier[timezone_str] = keyword[None]
keyword[if] identifier[timezone_str] keyword[is] keyword[None] keyword[and] identifier[timezone] :
keyword[try] :
keyword[import] identifier[pytz]
identifier[tz] = identifier[pytz] . identifier[timezone] ( identifier[timezone] )
identifier[timezone_str] = identifier[tz] . identifier[tzname] ( identifier[modified] )
keyword[except] :
keyword[pass]
keyword[if] identifier[timezone_str] keyword[is] keyword[None] keyword[and] identifier[timezone_offset] :
identifier[timezone_str] = identifier[timezone_offset]
identifier[timezone_str] = literal[string] + identifier[timezone_str] keyword[if] identifier[timezone_str] keyword[is] keyword[not] keyword[None] keyword[else] literal[string]
identifier[date_str] = identifier[modified] . identifier[strftime] ( literal[string] )
identifier[time_str] = identifier[modified] . identifier[strftime] ( literal[string] )+ identifier[timezone_str]
identifier[ret] [ literal[string] ]={ literal[string] : identifier[date_str] , literal[string] : identifier[time_str] }
identifier[ret] [ literal[string] ]=[{ literal[string] : literal[string] , literal[string] :[ literal[int] ], literal[string] : literal[int] }]
identifier[ret] [ literal[string] ]=[{ literal[string] : literal[int] , literal[string] : literal[int] }]
identifier[ret] [ literal[string] ]={ literal[string] : literal[int] }
identifier[dm_metadata] = identifier[copy] . identifier[deepcopy] ( identifier[metadata] )
keyword[if] identifier[metadata] . identifier[get] ( literal[string] , identifier[dict] ()). identifier[get] ( literal[string] , literal[string] ). identifier[lower] ()== literal[string] :
keyword[if] identifier[len] ( identifier[data] . identifier[shape] )== literal[int] keyword[or] ( identifier[len] ( identifier[data] . identifier[shape] )== literal[int] keyword[and] identifier[data] . identifier[shape] [ literal[int] ]== literal[int] ):
identifier[dm_metadata] . identifier[setdefault] ( literal[string] , identifier[dict] ())[ literal[string] ]= literal[string]
identifier[dm_metadata] . identifier[setdefault] ( literal[string] , identifier[dict] ())[ literal[string] ]= literal[string]
keyword[elif] identifier[data_descriptor] . identifier[collection_dimension_count] == literal[int] keyword[and] identifier[data_descriptor] . identifier[datum_dimension_count] == literal[int] :
identifier[dm_metadata] . identifier[setdefault] ( literal[string] , identifier[dict] ())[ literal[string] ]= literal[string]
identifier[dm_metadata] . identifier[setdefault] ( literal[string] , identifier[dict] ())[ literal[string] ]= literal[string]
keyword[elif] identifier[data_descriptor] . identifier[datum_dimension_count] == literal[int] :
identifier[dm_metadata] . identifier[setdefault] ( literal[string] , identifier[dict] ())[ literal[string] ]= literal[string]
keyword[if] ( literal[int] keyword[if] identifier[data_descriptor] . identifier[is_sequence] keyword[else] literal[int] )+ identifier[data_descriptor] . identifier[collection_dimension_count] == literal[int] keyword[or] identifier[needs_slice] :
keyword[if] identifier[data_descriptor] . identifier[is_sequence] keyword[or] identifier[is_sequence] :
identifier[dm_metadata] . identifier[setdefault] ( literal[string] , identifier[dict] ())[ literal[string] ]= keyword[True]
identifier[ret] [ literal[string] ]=[{ literal[string] : literal[string] , literal[string] : keyword[True] , literal[string] :[ literal[int] ], literal[string] : literal[int] , literal[string] : literal[int] , literal[string] : literal[int] , literal[string] : identifier[len] ( identifier[data] . identifier[shape] )- literal[int] }]
keyword[if] identifier[needs_slice] :
identifier[ret] [ literal[string] ][ literal[int] ][ literal[string] ]=[{ literal[string] : literal[int] , literal[string] : literal[string] , literal[string] :( literal[int] , literal[int] , literal[int] , literal[int] )}]
identifier[ret] [ literal[string] ][ literal[int] ][ literal[string] ]= literal[int]
keyword[if] identifier[modified] :
identifier[dm_metadata] [ literal[string] ]= identifier[modified] . identifier[isoformat] ()
keyword[if] identifier[timezone] :
identifier[dm_metadata] [ literal[string] ]= identifier[timezone]
keyword[if] identifier[timezone_offset] :
identifier[dm_metadata] [ literal[string] ]= identifier[timezone_offset]
identifier[ret] [ literal[string] ][ literal[int] ][ literal[string] ]= identifier[dm_metadata]
identifier[ret] [ literal[string] ]= keyword[True]
identifier[parse_dm3] . identifier[parse_dm_header] ( identifier[file] , identifier[ret] ) | def save_image(xdata: DataAndMetadata.DataAndMetadata, file):
"""
Saves the nparray data to the file-like object (or string) file.
"""
# we need to create a basic DM tree suitable for an image
# we'll try the minimum: just an data list
# doesn't work. Do we need a ImageSourceList too?
# and a DocumentObjectList?
data = xdata.data
data_descriptor = xdata.data_descriptor
dimensional_calibrations = xdata.dimensional_calibrations
intensity_calibration = xdata.intensity_calibration
metadata = xdata.metadata
modified = xdata.timestamp
timezone = xdata.timezone
timezone_offset = xdata.timezone_offset
needs_slice = False
is_sequence = False
if len(data.shape) == 3 and data.dtype != numpy.uint8 and (data_descriptor.datum_dimension_count == 1):
data = numpy.moveaxis(data, 2, 0)
dimensional_calibrations = (dimensional_calibrations[2],) + tuple(dimensional_calibrations[0:2]) # depends on [control=['if'], data=[]]
if len(data.shape) == 2 and data.dtype != numpy.uint8 and (data_descriptor.datum_dimension_count == 1):
is_sequence = data_descriptor.is_sequence
data = numpy.moveaxis(data, 1, 0)
data = numpy.expand_dims(data, axis=1)
dimensional_calibrations = (dimensional_calibrations[1], Calibration.Calibration(), dimensional_calibrations[0])
data_descriptor = DataAndMetadata.DataDescriptor(False, 2, 1)
needs_slice = True # depends on [control=['if'], data=[]]
data_dict = ndarray_to_imagedatadict(data)
ret = {}
ret['ImageList'] = [{'ImageData': data_dict}]
if dimensional_calibrations and len(dimensional_calibrations) == len(data.shape):
dimension_list = data_dict.setdefault('Calibrations', dict()).setdefault('Dimension', list())
for dimensional_calibration in reversed(dimensional_calibrations):
dimension = dict()
if dimensional_calibration.scale != 0.0:
origin = -dimensional_calibration.offset / dimensional_calibration.scale # depends on [control=['if'], data=[]]
else:
origin = 0.0
dimension['Origin'] = origin
dimension['Scale'] = dimensional_calibration.scale
dimension['Units'] = dimensional_calibration.units
dimension_list.append(dimension) # depends on [control=['for'], data=['dimensional_calibration']] # depends on [control=['if'], data=[]]
if intensity_calibration:
if intensity_calibration.scale != 0.0:
origin = -intensity_calibration.offset / intensity_calibration.scale # depends on [control=['if'], data=[]]
else:
origin = 0.0
brightness = data_dict.setdefault('Calibrations', dict()).setdefault('Brightness', dict())
brightness['Origin'] = origin
brightness['Scale'] = intensity_calibration.scale
brightness['Units'] = intensity_calibration.units # depends on [control=['if'], data=[]]
if modified:
timezone_str = None
if timezone_str is None and timezone:
try:
import pytz
tz = pytz.timezone(timezone)
timezone_str = tz.tzname(modified) # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
if timezone_str is None and timezone_offset:
timezone_str = timezone_offset # depends on [control=['if'], data=[]]
timezone_str = ' ' + timezone_str if timezone_str is not None else ''
date_str = modified.strftime('%x')
time_str = modified.strftime('%X') + timezone_str
ret['DataBar'] = {'Acquisition Date': date_str, 'Acquisition Time': time_str} # depends on [control=['if'], data=[]]
# I think ImageSource list creates a mapping between ImageSourceIds and Images
ret['ImageSourceList'] = [{'ClassName': 'ImageSource:Simple', 'Id': [0], 'ImageRef': 0}]
# I think this lists the sources for the DocumentObjectlist. The source number is not
# the indxe in the imagelist but is either the index in the ImageSourceList or the Id
# from that list. We also need to set the annotation type to identify it as an data
ret['DocumentObjectList'] = [{'ImageSource': 0, 'AnnotationType': 20}]
# finally some display options
ret['Image Behavior'] = {'ViewDisplayID': 8}
dm_metadata = copy.deepcopy(metadata)
if metadata.get('hardware_source', dict()).get('signal_type', '').lower() == 'eels':
if len(data.shape) == 1 or (len(data.shape) == 2 and data.shape[0] == 1):
dm_metadata.setdefault('Meta Data', dict())['Format'] = 'Spectrum'
dm_metadata.setdefault('Meta Data', dict())['Signal'] = 'EELS' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif data_descriptor.collection_dimension_count == 2 and data_descriptor.datum_dimension_count == 1:
dm_metadata.setdefault('Meta Data', dict())['Format'] = 'Spectrum image'
dm_metadata.setdefault('Meta Data', dict())['Signal'] = 'EELS' # depends on [control=['if'], data=[]]
elif data_descriptor.datum_dimension_count == 1:
dm_metadata.setdefault('Meta Data', dict())['Format'] = 'Spectrum' # depends on [control=['if'], data=[]]
if (1 if data_descriptor.is_sequence else 0) + data_descriptor.collection_dimension_count == 1 or needs_slice:
if data_descriptor.is_sequence or is_sequence:
dm_metadata.setdefault('Meta Data', dict())['IsSequence'] = True # depends on [control=['if'], data=[]]
ret['ImageSourceList'] = [{'ClassName': 'ImageSource:Summed', 'Do Sum': True, 'Id': [0], 'ImageRef': 0, 'LayerEnd': 0, 'LayerStart': 0, 'Summed Dimension': len(data.shape) - 1}]
if needs_slice:
ret['DocumentObjectList'][0]['AnnotationGroupList'] = [{'AnnotationType': 23, 'Name': 'SICursor', 'Rectangle': (0, 0, 1, 1)}]
ret['DocumentObjectList'][0]['ImageDisplayType'] = 1 # display as an image # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if modified:
dm_metadata['Timestamp'] = modified.isoformat() # depends on [control=['if'], data=[]]
if timezone:
dm_metadata['Timezone'] = timezone # depends on [control=['if'], data=[]]
if timezone_offset:
dm_metadata['TimezoneOffset'] = timezone_offset # depends on [control=['if'], data=[]]
ret['ImageList'][0]['ImageTags'] = dm_metadata
ret['InImageMode'] = True
parse_dm3.parse_dm_header(file, ret) |
def Interpolate(time, mask, y):
'''
Masks certain elements in the array `y` and linearly
interpolates over them, returning an array `y'` of the
same length.
:param array_like time: The time array
:param array_like mask: The indices to be interpolated over
:param array_like y: The dependent array
'''
# Ensure `y` doesn't get modified in place
yy = np.array(y)
t_ = np.delete(time, mask)
y_ = np.delete(y, mask, axis=0)
if len(yy.shape) == 1:
yy[mask] = np.interp(time[mask], t_, y_)
elif len(yy.shape) == 2:
for n in range(yy.shape[1]):
yy[mask, n] = np.interp(time[mask], t_, y_[:, n])
else:
raise Exception("Array ``y`` must be either 1- or 2-d.")
return yy | def function[Interpolate, parameter[time, mask, y]]:
constant[
Masks certain elements in the array `y` and linearly
interpolates over them, returning an array `y'` of the
same length.
:param array_like time: The time array
:param array_like mask: The indices to be interpolated over
:param array_like y: The dependent array
]
variable[yy] assign[=] call[name[np].array, parameter[name[y]]]
variable[t_] assign[=] call[name[np].delete, parameter[name[time], name[mask]]]
variable[y_] assign[=] call[name[np].delete, parameter[name[y], name[mask]]]
if compare[call[name[len], parameter[name[yy].shape]] equal[==] constant[1]] begin[:]
call[name[yy]][name[mask]] assign[=] call[name[np].interp, parameter[call[name[time]][name[mask]], name[t_], name[y_]]]
return[name[yy]] | keyword[def] identifier[Interpolate] ( identifier[time] , identifier[mask] , identifier[y] ):
literal[string]
identifier[yy] = identifier[np] . identifier[array] ( identifier[y] )
identifier[t_] = identifier[np] . identifier[delete] ( identifier[time] , identifier[mask] )
identifier[y_] = identifier[np] . identifier[delete] ( identifier[y] , identifier[mask] , identifier[axis] = literal[int] )
keyword[if] identifier[len] ( identifier[yy] . identifier[shape] )== literal[int] :
identifier[yy] [ identifier[mask] ]= identifier[np] . identifier[interp] ( identifier[time] [ identifier[mask] ], identifier[t_] , identifier[y_] )
keyword[elif] identifier[len] ( identifier[yy] . identifier[shape] )== literal[int] :
keyword[for] identifier[n] keyword[in] identifier[range] ( identifier[yy] . identifier[shape] [ literal[int] ]):
identifier[yy] [ identifier[mask] , identifier[n] ]= identifier[np] . identifier[interp] ( identifier[time] [ identifier[mask] ], identifier[t_] , identifier[y_] [:, identifier[n] ])
keyword[else] :
keyword[raise] identifier[Exception] ( literal[string] )
keyword[return] identifier[yy] | def Interpolate(time, mask, y):
"""
Masks certain elements in the array `y` and linearly
interpolates over them, returning an array `y'` of the
same length.
:param array_like time: The time array
:param array_like mask: The indices to be interpolated over
:param array_like y: The dependent array
"""
# Ensure `y` doesn't get modified in place
yy = np.array(y)
t_ = np.delete(time, mask)
y_ = np.delete(y, mask, axis=0)
if len(yy.shape) == 1:
yy[mask] = np.interp(time[mask], t_, y_) # depends on [control=['if'], data=[]]
elif len(yy.shape) == 2:
for n in range(yy.shape[1]):
yy[mask, n] = np.interp(time[mask], t_, y_[:, n]) # depends on [control=['for'], data=['n']] # depends on [control=['if'], data=[]]
else:
raise Exception('Array ``y`` must be either 1- or 2-d.')
return yy |
def corruptDenseVector(vector, noiseLevel):
"""
Corrupts a binary vector by inverting noiseLevel percent of its bits.
@param vector (array) binary vector to be corrupted
@param noiseLevel (float) amount of noise to be applied on the vector.
"""
size = len(vector)
for i in range(size):
rnd = random.random()
if rnd < noiseLevel:
if vector[i] == 1:
vector[i] = 0
else:
vector[i] = 1 | def function[corruptDenseVector, parameter[vector, noiseLevel]]:
constant[
Corrupts a binary vector by inverting noiseLevel percent of its bits.
@param vector (array) binary vector to be corrupted
@param noiseLevel (float) amount of noise to be applied on the vector.
]
variable[size] assign[=] call[name[len], parameter[name[vector]]]
for taget[name[i]] in starred[call[name[range], parameter[name[size]]]] begin[:]
variable[rnd] assign[=] call[name[random].random, parameter[]]
if compare[name[rnd] less[<] name[noiseLevel]] begin[:]
if compare[call[name[vector]][name[i]] equal[==] constant[1]] begin[:]
call[name[vector]][name[i]] assign[=] constant[0] | keyword[def] identifier[corruptDenseVector] ( identifier[vector] , identifier[noiseLevel] ):
literal[string]
identifier[size] = identifier[len] ( identifier[vector] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[size] ):
identifier[rnd] = identifier[random] . identifier[random] ()
keyword[if] identifier[rnd] < identifier[noiseLevel] :
keyword[if] identifier[vector] [ identifier[i] ]== literal[int] :
identifier[vector] [ identifier[i] ]= literal[int]
keyword[else] :
identifier[vector] [ identifier[i] ]= literal[int] | def corruptDenseVector(vector, noiseLevel):
"""
Corrupts a binary vector by inverting noiseLevel percent of its bits.
@param vector (array) binary vector to be corrupted
@param noiseLevel (float) amount of noise to be applied on the vector.
"""
size = len(vector)
for i in range(size):
rnd = random.random()
if rnd < noiseLevel:
if vector[i] == 1:
vector[i] = 0 # depends on [control=['if'], data=[]]
else:
vector[i] = 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] |
def is_image_format(self):
""" Checks whether file format is an image format
Example: ``MimeType.PNG.is_image_format()`` or ``MimeType.is_image_format(MimeType.PNG)``
:param self: File format
:type self: MimeType
:return: ``True`` if file is in image format, ``False`` otherwise
:rtype: bool
"""
return self in frozenset([MimeType.TIFF, MimeType.TIFF_d8, MimeType.TIFF_d16, MimeType.TIFF_d32f, MimeType.PNG,
MimeType.JP2, MimeType.JPG]) | def function[is_image_format, parameter[self]]:
constant[ Checks whether file format is an image format
Example: ``MimeType.PNG.is_image_format()`` or ``MimeType.is_image_format(MimeType.PNG)``
:param self: File format
:type self: MimeType
:return: ``True`` if file is in image format, ``False`` otherwise
:rtype: bool
]
return[compare[name[self] in call[name[frozenset], parameter[list[[<ast.Attribute object at 0x7da20c6abfd0>, <ast.Attribute object at 0x7da20c6a9360>, <ast.Attribute object at 0x7da20c6a8a90>, <ast.Attribute object at 0x7da20c6ab370>, <ast.Attribute object at 0x7da20c6aaf80>, <ast.Attribute object at 0x7da20c6ab520>, <ast.Attribute object at 0x7da20c6a8af0>]]]]]] | keyword[def] identifier[is_image_format] ( identifier[self] ):
literal[string]
keyword[return] identifier[self] keyword[in] identifier[frozenset] ([ identifier[MimeType] . identifier[TIFF] , identifier[MimeType] . identifier[TIFF_d8] , identifier[MimeType] . identifier[TIFF_d16] , identifier[MimeType] . identifier[TIFF_d32f] , identifier[MimeType] . identifier[PNG] ,
identifier[MimeType] . identifier[JP2] , identifier[MimeType] . identifier[JPG] ]) | def is_image_format(self):
""" Checks whether file format is an image format
Example: ``MimeType.PNG.is_image_format()`` or ``MimeType.is_image_format(MimeType.PNG)``
:param self: File format
:type self: MimeType
:return: ``True`` if file is in image format, ``False`` otherwise
:rtype: bool
"""
return self in frozenset([MimeType.TIFF, MimeType.TIFF_d8, MimeType.TIFF_d16, MimeType.TIFF_d32f, MimeType.PNG, MimeType.JP2, MimeType.JPG]) |
def set_alt(self, i, alt, break_alt=None, change_time=True):
'''set rally point altitude(s)'''
if i < 1 or i > self.rally_count():
print("Inavlid rally point number %u" % i)
return
self.rally_points[i-1].alt = int(alt)
if (break_alt != None):
self.rally_points[i-1].break_alt = break_alt
if change_time:
self.last_change = time.time() | def function[set_alt, parameter[self, i, alt, break_alt, change_time]]:
constant[set rally point altitude(s)]
if <ast.BoolOp object at 0x7da1b17de980> begin[:]
call[name[print], parameter[binary_operation[constant[Inavlid rally point number %u] <ast.Mod object at 0x7da2590d6920> name[i]]]]
return[None]
call[name[self].rally_points][binary_operation[name[i] - constant[1]]].alt assign[=] call[name[int], parameter[name[alt]]]
if compare[name[break_alt] not_equal[!=] constant[None]] begin[:]
call[name[self].rally_points][binary_operation[name[i] - constant[1]]].break_alt assign[=] name[break_alt]
if name[change_time] begin[:]
name[self].last_change assign[=] call[name[time].time, parameter[]] | keyword[def] identifier[set_alt] ( identifier[self] , identifier[i] , identifier[alt] , identifier[break_alt] = keyword[None] , identifier[change_time] = keyword[True] ):
literal[string]
keyword[if] identifier[i] < literal[int] keyword[or] identifier[i] > identifier[self] . identifier[rally_count] ():
identifier[print] ( literal[string] % identifier[i] )
keyword[return]
identifier[self] . identifier[rally_points] [ identifier[i] - literal[int] ]. identifier[alt] = identifier[int] ( identifier[alt] )
keyword[if] ( identifier[break_alt] != keyword[None] ):
identifier[self] . identifier[rally_points] [ identifier[i] - literal[int] ]. identifier[break_alt] = identifier[break_alt]
keyword[if] identifier[change_time] :
identifier[self] . identifier[last_change] = identifier[time] . identifier[time] () | def set_alt(self, i, alt, break_alt=None, change_time=True):
"""set rally point altitude(s)"""
if i < 1 or i > self.rally_count():
print('Inavlid rally point number %u' % i)
return # depends on [control=['if'], data=[]]
self.rally_points[i - 1].alt = int(alt)
if break_alt != None:
self.rally_points[i - 1].break_alt = break_alt # depends on [control=['if'], data=['break_alt']]
if change_time:
self.last_change = time.time() # depends on [control=['if'], data=[]] |
def _wrap_key(function, args, kws):
'''
get the key from the function input.
'''
return hashlib.md5(pickle.dumps((_from_file(function) + function.__name__, args, kws))).hexdigest() | def function[_wrap_key, parameter[function, args, kws]]:
constant[
get the key from the function input.
]
return[call[call[name[hashlib].md5, parameter[call[name[pickle].dumps, parameter[tuple[[<ast.BinOp object at 0x7da1b0ff01f0>, <ast.Name object at 0x7da1b0ff0e20>, <ast.Name object at 0x7da1b0ff1a80>]]]]]].hexdigest, parameter[]]] | keyword[def] identifier[_wrap_key] ( identifier[function] , identifier[args] , identifier[kws] ):
literal[string]
keyword[return] identifier[hashlib] . identifier[md5] ( identifier[pickle] . identifier[dumps] (( identifier[_from_file] ( identifier[function] )+ identifier[function] . identifier[__name__] , identifier[args] , identifier[kws] ))). identifier[hexdigest] () | def _wrap_key(function, args, kws):
"""
get the key from the function input.
"""
return hashlib.md5(pickle.dumps((_from_file(function) + function.__name__, args, kws))).hexdigest() |
def uncompress_file(inputfile, filename):
"""
Uncompress this file using gzip and change its name.
:param inputfile: File to compress
:type inputfile: ``file`` like object
:param filename: File's name
:type filename: ``str``
:returns: Tuple with file and new file's name
:rtype: :class:`tempfile.SpooledTemporaryFile`, ``str``
"""
zipfile = gzip.GzipFile(fileobj=inputfile, mode="rb")
try:
outputfile = create_spooled_temporary_file(fileobj=zipfile)
finally:
zipfile.close()
new_basename = os.path.basename(filename).replace('.gz', '')
return outputfile, new_basename | def function[uncompress_file, parameter[inputfile, filename]]:
constant[
Uncompress this file using gzip and change its name.
:param inputfile: File to compress
:type inputfile: ``file`` like object
:param filename: File's name
:type filename: ``str``
:returns: Tuple with file and new file's name
:rtype: :class:`tempfile.SpooledTemporaryFile`, ``str``
]
variable[zipfile] assign[=] call[name[gzip].GzipFile, parameter[]]
<ast.Try object at 0x7da1b1237a60>
variable[new_basename] assign[=] call[call[name[os].path.basename, parameter[name[filename]]].replace, parameter[constant[.gz], constant[]]]
return[tuple[[<ast.Name object at 0x7da1b1236e90>, <ast.Name object at 0x7da1b1235ae0>]]] | keyword[def] identifier[uncompress_file] ( identifier[inputfile] , identifier[filename] ):
literal[string]
identifier[zipfile] = identifier[gzip] . identifier[GzipFile] ( identifier[fileobj] = identifier[inputfile] , identifier[mode] = literal[string] )
keyword[try] :
identifier[outputfile] = identifier[create_spooled_temporary_file] ( identifier[fileobj] = identifier[zipfile] )
keyword[finally] :
identifier[zipfile] . identifier[close] ()
identifier[new_basename] = identifier[os] . identifier[path] . identifier[basename] ( identifier[filename] ). identifier[replace] ( literal[string] , literal[string] )
keyword[return] identifier[outputfile] , identifier[new_basename] | def uncompress_file(inputfile, filename):
"""
Uncompress this file using gzip and change its name.
:param inputfile: File to compress
:type inputfile: ``file`` like object
:param filename: File's name
:type filename: ``str``
:returns: Tuple with file and new file's name
:rtype: :class:`tempfile.SpooledTemporaryFile`, ``str``
"""
zipfile = gzip.GzipFile(fileobj=inputfile, mode='rb')
try:
outputfile = create_spooled_temporary_file(fileobj=zipfile) # depends on [control=['try'], data=[]]
finally:
zipfile.close()
new_basename = os.path.basename(filename).replace('.gz', '')
return (outputfile, new_basename) |
def get_component_exceptions(cluster, environ, topology, component, role=None):
'''
Get exceptions for 'component' for 'topology'
:param cluster:
:param environ:
:param topology:
:param component:
:param role:
:return:
'''
params = dict(
cluster=cluster,
environ=environ,
topology=topology,
component=component)
if role is not None:
params['role'] = role
request_url = tornado.httputil.url_concat(
create_url(EXCEPTIONS_URL_FMT), params)
raise tornado.gen.Return((yield fetch_url_as_json(request_url))) | def function[get_component_exceptions, parameter[cluster, environ, topology, component, role]]:
constant[
Get exceptions for 'component' for 'topology'
:param cluster:
:param environ:
:param topology:
:param component:
:param role:
:return:
]
variable[params] assign[=] call[name[dict], parameter[]]
if compare[name[role] is_not constant[None]] begin[:]
call[name[params]][constant[role]] assign[=] name[role]
variable[request_url] assign[=] call[name[tornado].httputil.url_concat, parameter[call[name[create_url], parameter[name[EXCEPTIONS_URL_FMT]]], name[params]]]
<ast.Raise object at 0x7da18dc984f0> | keyword[def] identifier[get_component_exceptions] ( identifier[cluster] , identifier[environ] , identifier[topology] , identifier[component] , identifier[role] = keyword[None] ):
literal[string]
identifier[params] = identifier[dict] (
identifier[cluster] = identifier[cluster] ,
identifier[environ] = identifier[environ] ,
identifier[topology] = identifier[topology] ,
identifier[component] = identifier[component] )
keyword[if] identifier[role] keyword[is] keyword[not] keyword[None] :
identifier[params] [ literal[string] ]= identifier[role]
identifier[request_url] = identifier[tornado] . identifier[httputil] . identifier[url_concat] (
identifier[create_url] ( identifier[EXCEPTIONS_URL_FMT] ), identifier[params] )
keyword[raise] identifier[tornado] . identifier[gen] . identifier[Return] (( keyword[yield] identifier[fetch_url_as_json] ( identifier[request_url] ))) | def get_component_exceptions(cluster, environ, topology, component, role=None):
"""
Get exceptions for 'component' for 'topology'
:param cluster:
:param environ:
:param topology:
:param component:
:param role:
:return:
"""
params = dict(cluster=cluster, environ=environ, topology=topology, component=component)
if role is not None:
params['role'] = role # depends on [control=['if'], data=['role']]
request_url = tornado.httputil.url_concat(create_url(EXCEPTIONS_URL_FMT), params)
raise tornado.gen.Return((yield fetch_url_as_json(request_url))) |
def parse_acl(acl_string):
""" Parse raw string :acl_string: of RAML-defined ACLs.
If :acl_string: is blank or None, all permissions are given.
Values of ACL action and principal are parsed using `actions` and
`special_principals` maps and are looked up after `strip()` and
`lower()`.
ACEs in :acl_string: may be separated by newlines or semicolons.
Action, principal and permission lists must be separated by spaces.
Permissions must be comma-separated.
E.g. 'allow everyone view,create,update' and 'deny authenticated delete'
:param acl_string: Raw RAML string containing defined ACEs.
"""
if not acl_string:
return [ALLOW_ALL]
aces_list = acl_string.replace('\n', ';').split(';')
aces_list = [ace.strip().split(' ', 2) for ace in aces_list if ace]
aces_list = [(a, b, c.split(',')) for a, b, c in aces_list]
result_acl = []
for action_str, princ_str, perms in aces_list:
# Process action
action_str = action_str.strip().lower()
action = actions.get(action_str)
if action is None:
raise ValueError(
'Unknown ACL action: {}. Valid actions: {}'.format(
action_str, list(actions.keys())))
# Process principal
princ_str = princ_str.strip().lower()
if princ_str in special_principals:
principal = special_principals[princ_str]
elif is_callable_tag(princ_str):
principal = resolve_to_callable(princ_str)
else:
principal = princ_str
# Process permissions
permissions = parse_permissions(perms)
result_acl.append((action, principal, permissions))
return result_acl | def function[parse_acl, parameter[acl_string]]:
constant[ Parse raw string :acl_string: of RAML-defined ACLs.
If :acl_string: is blank or None, all permissions are given.
Values of ACL action and principal are parsed using `actions` and
`special_principals` maps and are looked up after `strip()` and
`lower()`.
ACEs in :acl_string: may be separated by newlines or semicolons.
Action, principal and permission lists must be separated by spaces.
Permissions must be comma-separated.
E.g. 'allow everyone view,create,update' and 'deny authenticated delete'
:param acl_string: Raw RAML string containing defined ACEs.
]
if <ast.UnaryOp object at 0x7da18eb54700> begin[:]
return[list[[<ast.Name object at 0x7da18eb557e0>]]]
variable[aces_list] assign[=] call[call[name[acl_string].replace, parameter[constant[
], constant[;]]].split, parameter[constant[;]]]
variable[aces_list] assign[=] <ast.ListComp object at 0x7da18eb55d80>
variable[aces_list] assign[=] <ast.ListComp object at 0x7da18eb565f0>
variable[result_acl] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da18eb54c40>, <ast.Name object at 0x7da18eb558a0>, <ast.Name object at 0x7da18eb54cd0>]]] in starred[name[aces_list]] begin[:]
variable[action_str] assign[=] call[call[name[action_str].strip, parameter[]].lower, parameter[]]
variable[action] assign[=] call[name[actions].get, parameter[name[action_str]]]
if compare[name[action] is constant[None]] begin[:]
<ast.Raise object at 0x7da18eb54850>
variable[princ_str] assign[=] call[call[name[princ_str].strip, parameter[]].lower, parameter[]]
if compare[name[princ_str] in name[special_principals]] begin[:]
variable[principal] assign[=] call[name[special_principals]][name[princ_str]]
variable[permissions] assign[=] call[name[parse_permissions], parameter[name[perms]]]
call[name[result_acl].append, parameter[tuple[[<ast.Name object at 0x7da18eb55b10>, <ast.Name object at 0x7da18eb56a40>, <ast.Name object at 0x7da18eb56f80>]]]]
return[name[result_acl]] | keyword[def] identifier[parse_acl] ( identifier[acl_string] ):
literal[string]
keyword[if] keyword[not] identifier[acl_string] :
keyword[return] [ identifier[ALLOW_ALL] ]
identifier[aces_list] = identifier[acl_string] . identifier[replace] ( literal[string] , literal[string] ). identifier[split] ( literal[string] )
identifier[aces_list] =[ identifier[ace] . identifier[strip] (). identifier[split] ( literal[string] , literal[int] ) keyword[for] identifier[ace] keyword[in] identifier[aces_list] keyword[if] identifier[ace] ]
identifier[aces_list] =[( identifier[a] , identifier[b] , identifier[c] . identifier[split] ( literal[string] )) keyword[for] identifier[a] , identifier[b] , identifier[c] keyword[in] identifier[aces_list] ]
identifier[result_acl] =[]
keyword[for] identifier[action_str] , identifier[princ_str] , identifier[perms] keyword[in] identifier[aces_list] :
identifier[action_str] = identifier[action_str] . identifier[strip] (). identifier[lower] ()
identifier[action] = identifier[actions] . identifier[get] ( identifier[action_str] )
keyword[if] identifier[action] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] (
literal[string] . identifier[format] (
identifier[action_str] , identifier[list] ( identifier[actions] . identifier[keys] ())))
identifier[princ_str] = identifier[princ_str] . identifier[strip] (). identifier[lower] ()
keyword[if] identifier[princ_str] keyword[in] identifier[special_principals] :
identifier[principal] = identifier[special_principals] [ identifier[princ_str] ]
keyword[elif] identifier[is_callable_tag] ( identifier[princ_str] ):
identifier[principal] = identifier[resolve_to_callable] ( identifier[princ_str] )
keyword[else] :
identifier[principal] = identifier[princ_str]
identifier[permissions] = identifier[parse_permissions] ( identifier[perms] )
identifier[result_acl] . identifier[append] (( identifier[action] , identifier[principal] , identifier[permissions] ))
keyword[return] identifier[result_acl] | def parse_acl(acl_string):
""" Parse raw string :acl_string: of RAML-defined ACLs.
If :acl_string: is blank or None, all permissions are given.
Values of ACL action and principal are parsed using `actions` and
`special_principals` maps and are looked up after `strip()` and
`lower()`.
ACEs in :acl_string: may be separated by newlines or semicolons.
Action, principal and permission lists must be separated by spaces.
Permissions must be comma-separated.
E.g. 'allow everyone view,create,update' and 'deny authenticated delete'
:param acl_string: Raw RAML string containing defined ACEs.
"""
if not acl_string:
return [ALLOW_ALL] # depends on [control=['if'], data=[]]
aces_list = acl_string.replace('\n', ';').split(';')
aces_list = [ace.strip().split(' ', 2) for ace in aces_list if ace]
aces_list = [(a, b, c.split(',')) for (a, b, c) in aces_list]
result_acl = []
for (action_str, princ_str, perms) in aces_list:
# Process action
action_str = action_str.strip().lower()
action = actions.get(action_str)
if action is None:
raise ValueError('Unknown ACL action: {}. Valid actions: {}'.format(action_str, list(actions.keys()))) # depends on [control=['if'], data=[]]
# Process principal
princ_str = princ_str.strip().lower()
if princ_str in special_principals:
principal = special_principals[princ_str] # depends on [control=['if'], data=['princ_str', 'special_principals']]
elif is_callable_tag(princ_str):
principal = resolve_to_callable(princ_str) # depends on [control=['if'], data=[]]
else:
principal = princ_str
# Process permissions
permissions = parse_permissions(perms)
result_acl.append((action, principal, permissions)) # depends on [control=['for'], data=[]]
return result_acl |
def _ycbcr2rgb(self, mode):
"""Convert the image from YCbCr mode to RGB.
"""
self._check_modes(("YCbCr", "YCbCrA"))
(self.channels[0], self.channels[1], self.channels[2]) = \
ycbcr2rgb(self.channels[0],
self.channels[1],
self.channels[2])
if self.fill_value is not None:
self.fill_value[0:3] = ycbcr2rgb(self.fill_value[0],
self.fill_value[1],
self.fill_value[2])
self.mode = mode | def function[_ycbcr2rgb, parameter[self, mode]]:
constant[Convert the image from YCbCr mode to RGB.
]
call[name[self]._check_modes, parameter[tuple[[<ast.Constant object at 0x7da1b0528f10>, <ast.Constant object at 0x7da1b0529000>]]]]
<ast.Tuple object at 0x7da1b0528fd0> assign[=] call[name[ycbcr2rgb], parameter[call[name[self].channels][constant[0]], call[name[self].channels][constant[1]], call[name[self].channels][constant[2]]]]
if compare[name[self].fill_value is_not constant[None]] begin[:]
call[name[self].fill_value][<ast.Slice object at 0x7da20c6abf40>] assign[=] call[name[ycbcr2rgb], parameter[call[name[self].fill_value][constant[0]], call[name[self].fill_value][constant[1]], call[name[self].fill_value][constant[2]]]]
name[self].mode assign[=] name[mode] | keyword[def] identifier[_ycbcr2rgb] ( identifier[self] , identifier[mode] ):
literal[string]
identifier[self] . identifier[_check_modes] (( literal[string] , literal[string] ))
( identifier[self] . identifier[channels] [ literal[int] ], identifier[self] . identifier[channels] [ literal[int] ], identifier[self] . identifier[channels] [ literal[int] ])= identifier[ycbcr2rgb] ( identifier[self] . identifier[channels] [ literal[int] ],
identifier[self] . identifier[channels] [ literal[int] ],
identifier[self] . identifier[channels] [ literal[int] ])
keyword[if] identifier[self] . identifier[fill_value] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[fill_value] [ literal[int] : literal[int] ]= identifier[ycbcr2rgb] ( identifier[self] . identifier[fill_value] [ literal[int] ],
identifier[self] . identifier[fill_value] [ literal[int] ],
identifier[self] . identifier[fill_value] [ literal[int] ])
identifier[self] . identifier[mode] = identifier[mode] | def _ycbcr2rgb(self, mode):
"""Convert the image from YCbCr mode to RGB.
"""
self._check_modes(('YCbCr', 'YCbCrA'))
(self.channels[0], self.channels[1], self.channels[2]) = ycbcr2rgb(self.channels[0], self.channels[1], self.channels[2])
if self.fill_value is not None:
self.fill_value[0:3] = ycbcr2rgb(self.fill_value[0], self.fill_value[1], self.fill_value[2]) # depends on [control=['if'], data=[]]
self.mode = mode |
def _get_rule_changes(rules, _rules):
'''
given a list of desired rules (rules) and existing rules (_rules) return
a list of rules to delete (to_delete) and to create (to_create)
'''
to_delete = []
to_create = []
# for each rule in state file
# 1. validate rule
# 2. determine if rule exists in existing security group rules
for rule in rules:
try:
ip_protocol = six.text_type(rule.get('ip_protocol'))
except KeyError:
raise SaltInvocationError('ip_protocol, to_port, and from_port are'
' required arguments for security group'
' rules.')
supported_protocols = ['tcp', '6', 6, 'udp', '17', 17, 'icmp', '1', 1,
'all', '-1', -1]
if ip_protocol not in supported_protocols and (not
'{0}'.format(ip_protocol).isdigit() or int(ip_protocol) > 255):
raise SaltInvocationError(
'Invalid ip_protocol {0} specified in security group rule.'.format(ip_protocol))
# For the 'all' case, we need to change the protocol name to '-1'.
if ip_protocol == 'all':
rule['ip_protocol'] = '-1'
cidr_ip = rule.get('cidr_ip', None)
group_name = rule.get('source_group_name', None)
group_id = rule.get('source_group_group_id', None)
if cidr_ip and (group_id or group_name):
raise SaltInvocationError('cidr_ip and source groups can not both'
' be specified in security group rules.')
if group_id and group_name:
raise SaltInvocationError('Either source_group_group_id or'
' source_group_name can be specified in'
' security group rules, but not both.')
if not (cidr_ip or group_id or group_name):
raise SaltInvocationError('cidr_ip, source_group_group_id, or'
' source_group_name must be provided for'
' security group rules.')
rule_found = False
# for each rule in existing security group ruleset determine if
# new rule exists
for _rule in _rules:
if _check_rule(rule, _rule):
rule_found = True
break
if not rule_found:
to_create.append(rule)
# for each rule in existing security group configuration
# 1. determine if rules needed to be deleted
for _rule in _rules:
rule_found = False
for rule in rules:
if _check_rule(rule, _rule):
rule_found = True
break
if not rule_found:
# Can only supply name or id, not both. Since we're deleting
# entries, it doesn't matter which we pick.
_rule.pop('source_group_name', None)
to_delete.append(_rule)
log.debug('Rules to be deleted: %s', to_delete)
log.debug('Rules to be created: %s', to_create)
return (to_delete, to_create) | def function[_get_rule_changes, parameter[rules, _rules]]:
constant[
given a list of desired rules (rules) and existing rules (_rules) return
a list of rules to delete (to_delete) and to create (to_create)
]
variable[to_delete] assign[=] list[[]]
variable[to_create] assign[=] list[[]]
for taget[name[rule]] in starred[name[rules]] begin[:]
<ast.Try object at 0x7da1b2123760>
variable[supported_protocols] assign[=] list[[<ast.Constant object at 0x7da1b1f49510>, <ast.Constant object at 0x7da1b1f484c0>, <ast.Constant object at 0x7da1b1f498d0>, <ast.Constant object at 0x7da1b1f491e0>, <ast.Constant object at 0x7da1b1f49270>, <ast.Constant object at 0x7da1b1f48970>, <ast.Constant object at 0x7da1b1f4bbb0>, <ast.Constant object at 0x7da1b1f49240>, <ast.Constant object at 0x7da1b1f4bca0>, <ast.Constant object at 0x7da1b1f48a00>, <ast.Constant object at 0x7da1b1f49900>, <ast.UnaryOp object at 0x7da1b1f483d0>]]
if <ast.BoolOp object at 0x7da1b1f488b0> begin[:]
<ast.Raise object at 0x7da1b1f494e0>
if compare[name[ip_protocol] equal[==] constant[all]] begin[:]
call[name[rule]][constant[ip_protocol]] assign[=] constant[-1]
variable[cidr_ip] assign[=] call[name[rule].get, parameter[constant[cidr_ip], constant[None]]]
variable[group_name] assign[=] call[name[rule].get, parameter[constant[source_group_name], constant[None]]]
variable[group_id] assign[=] call[name[rule].get, parameter[constant[source_group_group_id], constant[None]]]
if <ast.BoolOp object at 0x7da1b1f48730> begin[:]
<ast.Raise object at 0x7da1b1f48c70>
if <ast.BoolOp object at 0x7da1b1f48be0> begin[:]
<ast.Raise object at 0x7da1b1f48e50>
if <ast.UnaryOp object at 0x7da1b1f49870> begin[:]
<ast.Raise object at 0x7da1b1f49810>
variable[rule_found] assign[=] constant[False]
for taget[name[_rule]] in starred[name[_rules]] begin[:]
if call[name[_check_rule], parameter[name[rule], name[_rule]]] begin[:]
variable[rule_found] assign[=] constant[True]
break
if <ast.UnaryOp object at 0x7da1b1f9ba90> begin[:]
call[name[to_create].append, parameter[name[rule]]]
for taget[name[_rule]] in starred[name[_rules]] begin[:]
variable[rule_found] assign[=] constant[False]
for taget[name[rule]] in starred[name[rules]] begin[:]
if call[name[_check_rule], parameter[name[rule], name[_rule]]] begin[:]
variable[rule_found] assign[=] constant[True]
break
if <ast.UnaryOp object at 0x7da1b1fe47c0> begin[:]
call[name[_rule].pop, parameter[constant[source_group_name], constant[None]]]
call[name[to_delete].append, parameter[name[_rule]]]
call[name[log].debug, parameter[constant[Rules to be deleted: %s], name[to_delete]]]
call[name[log].debug, parameter[constant[Rules to be created: %s], name[to_create]]]
return[tuple[[<ast.Name object at 0x7da1b1fe4520>, <ast.Name object at 0x7da1b1fe4700>]]] | keyword[def] identifier[_get_rule_changes] ( identifier[rules] , identifier[_rules] ):
literal[string]
identifier[to_delete] =[]
identifier[to_create] =[]
keyword[for] identifier[rule] keyword[in] identifier[rules] :
keyword[try] :
identifier[ip_protocol] = identifier[six] . identifier[text_type] ( identifier[rule] . identifier[get] ( literal[string] ))
keyword[except] identifier[KeyError] :
keyword[raise] identifier[SaltInvocationError] ( literal[string]
literal[string]
literal[string] )
identifier[supported_protocols] =[ literal[string] , literal[string] , literal[int] , literal[string] , literal[string] , literal[int] , literal[string] , literal[string] , literal[int] ,
literal[string] , literal[string] ,- literal[int] ]
keyword[if] identifier[ip_protocol] keyword[not] keyword[in] identifier[supported_protocols] keyword[and] ( keyword[not]
literal[string] . identifier[format] ( identifier[ip_protocol] ). identifier[isdigit] () keyword[or] identifier[int] ( identifier[ip_protocol] )> literal[int] ):
keyword[raise] identifier[SaltInvocationError] (
literal[string] . identifier[format] ( identifier[ip_protocol] ))
keyword[if] identifier[ip_protocol] == literal[string] :
identifier[rule] [ literal[string] ]= literal[string]
identifier[cidr_ip] = identifier[rule] . identifier[get] ( literal[string] , keyword[None] )
identifier[group_name] = identifier[rule] . identifier[get] ( literal[string] , keyword[None] )
identifier[group_id] = identifier[rule] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] identifier[cidr_ip] keyword[and] ( identifier[group_id] keyword[or] identifier[group_name] ):
keyword[raise] identifier[SaltInvocationError] ( literal[string]
literal[string] )
keyword[if] identifier[group_id] keyword[and] identifier[group_name] :
keyword[raise] identifier[SaltInvocationError] ( literal[string]
literal[string]
literal[string] )
keyword[if] keyword[not] ( identifier[cidr_ip] keyword[or] identifier[group_id] keyword[or] identifier[group_name] ):
keyword[raise] identifier[SaltInvocationError] ( literal[string]
literal[string]
literal[string] )
identifier[rule_found] = keyword[False]
keyword[for] identifier[_rule] keyword[in] identifier[_rules] :
keyword[if] identifier[_check_rule] ( identifier[rule] , identifier[_rule] ):
identifier[rule_found] = keyword[True]
keyword[break]
keyword[if] keyword[not] identifier[rule_found] :
identifier[to_create] . identifier[append] ( identifier[rule] )
keyword[for] identifier[_rule] keyword[in] identifier[_rules] :
identifier[rule_found] = keyword[False]
keyword[for] identifier[rule] keyword[in] identifier[rules] :
keyword[if] identifier[_check_rule] ( identifier[rule] , identifier[_rule] ):
identifier[rule_found] = keyword[True]
keyword[break]
keyword[if] keyword[not] identifier[rule_found] :
identifier[_rule] . identifier[pop] ( literal[string] , keyword[None] )
identifier[to_delete] . identifier[append] ( identifier[_rule] )
identifier[log] . identifier[debug] ( literal[string] , identifier[to_delete] )
identifier[log] . identifier[debug] ( literal[string] , identifier[to_create] )
keyword[return] ( identifier[to_delete] , identifier[to_create] ) | def _get_rule_changes(rules, _rules):
"""
given a list of desired rules (rules) and existing rules (_rules) return
a list of rules to delete (to_delete) and to create (to_create)
"""
to_delete = []
to_create = []
# for each rule in state file
# 1. validate rule
# 2. determine if rule exists in existing security group rules
for rule in rules:
try:
ip_protocol = six.text_type(rule.get('ip_protocol')) # depends on [control=['try'], data=[]]
except KeyError:
raise SaltInvocationError('ip_protocol, to_port, and from_port are required arguments for security group rules.') # depends on [control=['except'], data=[]]
supported_protocols = ['tcp', '6', 6, 'udp', '17', 17, 'icmp', '1', 1, 'all', '-1', -1]
if ip_protocol not in supported_protocols and (not '{0}'.format(ip_protocol).isdigit() or int(ip_protocol) > 255):
raise SaltInvocationError('Invalid ip_protocol {0} specified in security group rule.'.format(ip_protocol)) # depends on [control=['if'], data=[]]
# For the 'all' case, we need to change the protocol name to '-1'.
if ip_protocol == 'all':
rule['ip_protocol'] = '-1' # depends on [control=['if'], data=[]]
cidr_ip = rule.get('cidr_ip', None)
group_name = rule.get('source_group_name', None)
group_id = rule.get('source_group_group_id', None)
if cidr_ip and (group_id or group_name):
raise SaltInvocationError('cidr_ip and source groups can not both be specified in security group rules.') # depends on [control=['if'], data=[]]
if group_id and group_name:
raise SaltInvocationError('Either source_group_group_id or source_group_name can be specified in security group rules, but not both.') # depends on [control=['if'], data=[]]
if not (cidr_ip or group_id or group_name):
raise SaltInvocationError('cidr_ip, source_group_group_id, or source_group_name must be provided for security group rules.') # depends on [control=['if'], data=[]]
rule_found = False
# for each rule in existing security group ruleset determine if
# new rule exists
for _rule in _rules:
if _check_rule(rule, _rule):
rule_found = True
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['_rule']]
if not rule_found:
to_create.append(rule) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['rule']]
# for each rule in existing security group configuration
# 1. determine if rules needed to be deleted
for _rule in _rules:
rule_found = False
for rule in rules:
if _check_rule(rule, _rule):
rule_found = True
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['rule']]
if not rule_found:
# Can only supply name or id, not both. Since we're deleting
# entries, it doesn't matter which we pick.
_rule.pop('source_group_name', None)
to_delete.append(_rule) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['_rule']]
log.debug('Rules to be deleted: %s', to_delete)
log.debug('Rules to be created: %s', to_create)
return (to_delete, to_create) |
def missing_host_key(self, client, hostname, key):
"""
Called when an `.SSHClient` receives a server key for a server that
isn't in either the system or local `.HostKeys` object. To accept
the key, simply return. To reject, raised an exception (which will
be passed to the calling application).
"""
self.host_key = key.get_base64()
print("Fetched key is: %s" % self.host_key)
return | def function[missing_host_key, parameter[self, client, hostname, key]]:
constant[
Called when an `.SSHClient` receives a server key for a server that
isn't in either the system or local `.HostKeys` object. To accept
the key, simply return. To reject, raised an exception (which will
be passed to the calling application).
]
name[self].host_key assign[=] call[name[key].get_base64, parameter[]]
call[name[print], parameter[binary_operation[constant[Fetched key is: %s] <ast.Mod object at 0x7da2590d6920> name[self].host_key]]]
return[None] | keyword[def] identifier[missing_host_key] ( identifier[self] , identifier[client] , identifier[hostname] , identifier[key] ):
literal[string]
identifier[self] . identifier[host_key] = identifier[key] . identifier[get_base64] ()
identifier[print] ( literal[string] % identifier[self] . identifier[host_key] )
keyword[return] | def missing_host_key(self, client, hostname, key):
"""
Called when an `.SSHClient` receives a server key for a server that
isn't in either the system or local `.HostKeys` object. To accept
the key, simply return. To reject, raised an exception (which will
be passed to the calling application).
"""
self.host_key = key.get_base64()
print('Fetched key is: %s' % self.host_key)
return |
def _general_error_handler(http_error):
''' Simple error handler for azure.'''
message = str(http_error)
if http_error.respbody is not None:
message += '\n' + http_error.respbody.decode('utf-8-sig')
raise AzureHttpError(message, http_error.status) | def function[_general_error_handler, parameter[http_error]]:
constant[ Simple error handler for azure.]
variable[message] assign[=] call[name[str], parameter[name[http_error]]]
if compare[name[http_error].respbody is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da1b23458a0>
<ast.Raise object at 0x7da1b2347280> | keyword[def] identifier[_general_error_handler] ( identifier[http_error] ):
literal[string]
identifier[message] = identifier[str] ( identifier[http_error] )
keyword[if] identifier[http_error] . identifier[respbody] keyword[is] keyword[not] keyword[None] :
identifier[message] += literal[string] + identifier[http_error] . identifier[respbody] . identifier[decode] ( literal[string] )
keyword[raise] identifier[AzureHttpError] ( identifier[message] , identifier[http_error] . identifier[status] ) | def _general_error_handler(http_error):
""" Simple error handler for azure."""
message = str(http_error)
if http_error.respbody is not None:
message += '\n' + http_error.respbody.decode('utf-8-sig') # depends on [control=['if'], data=[]]
raise AzureHttpError(message, http_error.status) |
def interp_na(self, dim=None, use_coordinate=True, method='linear', limit=None,
**kwargs):
'''Interpolate values according to different methods.'''
if dim is None:
raise NotImplementedError('dim is a required argument')
if limit is not None:
valids = _get_valid_fill_mask(self, dim, limit)
# method
index = get_clean_interp_index(self, dim, use_coordinate=use_coordinate,
**kwargs)
interp_class, kwargs = _get_interpolator(method, **kwargs)
interpolator = partial(func_interpolate_na, interp_class, **kwargs)
with warnings.catch_warnings():
warnings.filterwarnings('ignore', 'overflow', RuntimeWarning)
warnings.filterwarnings('ignore', 'invalid value', RuntimeWarning)
arr = apply_ufunc(interpolator, index, self,
input_core_dims=[[dim], [dim]],
output_core_dims=[[dim]],
output_dtypes=[self.dtype],
dask='parallelized',
vectorize=True,
keep_attrs=True).transpose(*self.dims)
if limit is not None:
arr = arr.where(valids)
return arr | def function[interp_na, parameter[self, dim, use_coordinate, method, limit]]:
constant[Interpolate values according to different methods.]
if compare[name[dim] is constant[None]] begin[:]
<ast.Raise object at 0x7da207f01b40>
if compare[name[limit] is_not constant[None]] begin[:]
variable[valids] assign[=] call[name[_get_valid_fill_mask], parameter[name[self], name[dim], name[limit]]]
variable[index] assign[=] call[name[get_clean_interp_index], parameter[name[self], name[dim]]]
<ast.Tuple object at 0x7da207f01450> assign[=] call[name[_get_interpolator], parameter[name[method]]]
variable[interpolator] assign[=] call[name[partial], parameter[name[func_interpolate_na], name[interp_class]]]
with call[name[warnings].catch_warnings, parameter[]] begin[:]
call[name[warnings].filterwarnings, parameter[constant[ignore], constant[overflow], name[RuntimeWarning]]]
call[name[warnings].filterwarnings, parameter[constant[ignore], constant[invalid value], name[RuntimeWarning]]]
variable[arr] assign[=] call[call[name[apply_ufunc], parameter[name[interpolator], name[index], name[self]]].transpose, parameter[<ast.Starred object at 0x7da18dc99120>]]
if compare[name[limit] is_not constant[None]] begin[:]
variable[arr] assign[=] call[name[arr].where, parameter[name[valids]]]
return[name[arr]] | keyword[def] identifier[interp_na] ( identifier[self] , identifier[dim] = keyword[None] , identifier[use_coordinate] = keyword[True] , identifier[method] = literal[string] , identifier[limit] = keyword[None] ,
** identifier[kwargs] ):
literal[string]
keyword[if] identifier[dim] keyword[is] keyword[None] :
keyword[raise] identifier[NotImplementedError] ( literal[string] )
keyword[if] identifier[limit] keyword[is] keyword[not] keyword[None] :
identifier[valids] = identifier[_get_valid_fill_mask] ( identifier[self] , identifier[dim] , identifier[limit] )
identifier[index] = identifier[get_clean_interp_index] ( identifier[self] , identifier[dim] , identifier[use_coordinate] = identifier[use_coordinate] ,
** identifier[kwargs] )
identifier[interp_class] , identifier[kwargs] = identifier[_get_interpolator] ( identifier[method] ,** identifier[kwargs] )
identifier[interpolator] = identifier[partial] ( identifier[func_interpolate_na] , identifier[interp_class] ,** identifier[kwargs] )
keyword[with] identifier[warnings] . identifier[catch_warnings] ():
identifier[warnings] . identifier[filterwarnings] ( literal[string] , literal[string] , identifier[RuntimeWarning] )
identifier[warnings] . identifier[filterwarnings] ( literal[string] , literal[string] , identifier[RuntimeWarning] )
identifier[arr] = identifier[apply_ufunc] ( identifier[interpolator] , identifier[index] , identifier[self] ,
identifier[input_core_dims] =[[ identifier[dim] ],[ identifier[dim] ]],
identifier[output_core_dims] =[[ identifier[dim] ]],
identifier[output_dtypes] =[ identifier[self] . identifier[dtype] ],
identifier[dask] = literal[string] ,
identifier[vectorize] = keyword[True] ,
identifier[keep_attrs] = keyword[True] ). identifier[transpose] (* identifier[self] . identifier[dims] )
keyword[if] identifier[limit] keyword[is] keyword[not] keyword[None] :
identifier[arr] = identifier[arr] . identifier[where] ( identifier[valids] )
keyword[return] identifier[arr] | def interp_na(self, dim=None, use_coordinate=True, method='linear', limit=None, **kwargs):
"""Interpolate values according to different methods."""
if dim is None:
raise NotImplementedError('dim is a required argument') # depends on [control=['if'], data=[]]
if limit is not None:
valids = _get_valid_fill_mask(self, dim, limit) # depends on [control=['if'], data=['limit']]
# method
index = get_clean_interp_index(self, dim, use_coordinate=use_coordinate, **kwargs)
(interp_class, kwargs) = _get_interpolator(method, **kwargs)
interpolator = partial(func_interpolate_na, interp_class, **kwargs)
with warnings.catch_warnings():
warnings.filterwarnings('ignore', 'overflow', RuntimeWarning)
warnings.filterwarnings('ignore', 'invalid value', RuntimeWarning)
arr = apply_ufunc(interpolator, index, self, input_core_dims=[[dim], [dim]], output_core_dims=[[dim]], output_dtypes=[self.dtype], dask='parallelized', vectorize=True, keep_attrs=True).transpose(*self.dims) # depends on [control=['with'], data=[]]
if limit is not None:
arr = arr.where(valids) # depends on [control=['if'], data=[]]
return arr |
def assoc(inst, **changes):
"""
Copy *inst* and apply *changes*.
:param inst: Instance of a class with ``attrs`` attributes.
:param changes: Keyword changes in the new copy.
:return: A copy of inst with *changes* incorporated.
:raise attr.exceptions.AttrsAttributeNotFoundError: If *attr_name* couldn't
be found on *cls*.
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
class.
.. deprecated:: 17.1.0
Use :func:`evolve` instead.
"""
import warnings
warnings.warn(
"assoc is deprecated and will be removed after 2018/01.",
DeprecationWarning,
stacklevel=2,
)
new = copy.copy(inst)
attrs = fields(inst.__class__)
for k, v in iteritems(changes):
a = getattr(attrs, k, NOTHING)
if a is NOTHING:
raise AttrsAttributeNotFoundError(
"{k} is not an attrs attribute on {cl}.".format(
k=k, cl=new.__class__
)
)
_obj_setattr(new, k, v)
return new | def function[assoc, parameter[inst]]:
constant[
Copy *inst* and apply *changes*.
:param inst: Instance of a class with ``attrs`` attributes.
:param changes: Keyword changes in the new copy.
:return: A copy of inst with *changes* incorporated.
:raise attr.exceptions.AttrsAttributeNotFoundError: If *attr_name* couldn't
be found on *cls*.
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
class.
.. deprecated:: 17.1.0
Use :func:`evolve` instead.
]
import module[warnings]
call[name[warnings].warn, parameter[constant[assoc is deprecated and will be removed after 2018/01.], name[DeprecationWarning]]]
variable[new] assign[=] call[name[copy].copy, parameter[name[inst]]]
variable[attrs] assign[=] call[name[fields], parameter[name[inst].__class__]]
for taget[tuple[[<ast.Name object at 0x7da1b1de19f0>, <ast.Name object at 0x7da1b1de3610>]]] in starred[call[name[iteritems], parameter[name[changes]]]] begin[:]
variable[a] assign[=] call[name[getattr], parameter[name[attrs], name[k], name[NOTHING]]]
if compare[name[a] is name[NOTHING]] begin[:]
<ast.Raise object at 0x7da1b1de1b40>
call[name[_obj_setattr], parameter[name[new], name[k], name[v]]]
return[name[new]] | keyword[def] identifier[assoc] ( identifier[inst] ,** identifier[changes] ):
literal[string]
keyword[import] identifier[warnings]
identifier[warnings] . identifier[warn] (
literal[string] ,
identifier[DeprecationWarning] ,
identifier[stacklevel] = literal[int] ,
)
identifier[new] = identifier[copy] . identifier[copy] ( identifier[inst] )
identifier[attrs] = identifier[fields] ( identifier[inst] . identifier[__class__] )
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[iteritems] ( identifier[changes] ):
identifier[a] = identifier[getattr] ( identifier[attrs] , identifier[k] , identifier[NOTHING] )
keyword[if] identifier[a] keyword[is] identifier[NOTHING] :
keyword[raise] identifier[AttrsAttributeNotFoundError] (
literal[string] . identifier[format] (
identifier[k] = identifier[k] , identifier[cl] = identifier[new] . identifier[__class__]
)
)
identifier[_obj_setattr] ( identifier[new] , identifier[k] , identifier[v] )
keyword[return] identifier[new] | def assoc(inst, **changes):
"""
Copy *inst* and apply *changes*.
:param inst: Instance of a class with ``attrs`` attributes.
:param changes: Keyword changes in the new copy.
:return: A copy of inst with *changes* incorporated.
:raise attr.exceptions.AttrsAttributeNotFoundError: If *attr_name* couldn't
be found on *cls*.
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
class.
.. deprecated:: 17.1.0
Use :func:`evolve` instead.
"""
import warnings
warnings.warn('assoc is deprecated and will be removed after 2018/01.', DeprecationWarning, stacklevel=2)
new = copy.copy(inst)
attrs = fields(inst.__class__)
for (k, v) in iteritems(changes):
a = getattr(attrs, k, NOTHING)
if a is NOTHING:
raise AttrsAttributeNotFoundError('{k} is not an attrs attribute on {cl}.'.format(k=k, cl=new.__class__)) # depends on [control=['if'], data=[]]
_obj_setattr(new, k, v) # depends on [control=['for'], data=[]]
return new |
def inner_fork_insanity_checks(pipeline_string):
"""
This function performs two sanity checks in the pipeline string. The first
check, assures that each fork contains a lane token '|', while the second
check looks for duplicated processes within the same fork.
Parameters
----------
pipeline_string: str
String with the definition of the pipeline, e.g.::
'processA processB processC(ProcessD | ProcessE)'
"""
# first lets get all forks to a list.
list_of_forks = [] # stores forks
left_indexes = [] # stores indexes of left brackets
# iterate through the string looking for '(' and ')'.
for pos, char in enumerate(pipeline_string):
if char == FORK_TOKEN:
# saves pos to left_indexes list
left_indexes.append(pos)
elif char == CLOSE_TOKEN and len(left_indexes) > 0:
# saves fork to list_of_forks
list_of_forks.append(pipeline_string[left_indexes[-1] + 1: pos])
# removes last bracket from left_indexes list
left_indexes = left_indexes[:-1]
# sort list in descending order of number of forks
list_of_forks.sort(key=lambda x: x.count(FORK_TOKEN), reverse=True)
# Now, we can iterate through list_of_forks and check for errors in each
# fork
for fork in list_of_forks:
# remove inner forks for these checks since each fork has its own entry
# in list_of_forks. Note that each fork is now sorted in descending
# order which enables to remove sequentially the string for the fork
# potentially with more inner forks
for subfork in list_of_forks:
# checks if subfork is contained in fork and if they are different,
# avoiding to remove itself
if subfork in list_of_forks and subfork != fork:
# removes inner forks. Note that string has no spaces
fork_simplified = fork.replace("({})".format(subfork), "")
else:
fork_simplified = fork
# Checks if there is no fork separator character '|' within each fork
if not len(fork_simplified.split(LANE_TOKEN)) > 1:
raise SanityError("One of the forks doesn't have '|' "
"separator between the processes to fork. This is"
" the prime suspect: '({})'".format(fork)) | def function[inner_fork_insanity_checks, parameter[pipeline_string]]:
constant[
This function performs two sanity checks in the pipeline string. The first
check, assures that each fork contains a lane token '|', while the second
check looks for duplicated processes within the same fork.
Parameters
----------
pipeline_string: str
String with the definition of the pipeline, e.g.::
'processA processB processC(ProcessD | ProcessE)'
]
variable[list_of_forks] assign[=] list[[]]
variable[left_indexes] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b021bf10>, <ast.Name object at 0x7da1b021b940>]]] in starred[call[name[enumerate], parameter[name[pipeline_string]]]] begin[:]
if compare[name[char] equal[==] name[FORK_TOKEN]] begin[:]
call[name[left_indexes].append, parameter[name[pos]]]
call[name[list_of_forks].sort, parameter[]]
for taget[name[fork]] in starred[name[list_of_forks]] begin[:]
for taget[name[subfork]] in starred[name[list_of_forks]] begin[:]
if <ast.BoolOp object at 0x7da1b03b82e0> begin[:]
variable[fork_simplified] assign[=] call[name[fork].replace, parameter[call[constant[({})].format, parameter[name[subfork]]], constant[]]]
if <ast.UnaryOp object at 0x7da1b03bae60> begin[:]
<ast.Raise object at 0x7da1b03babf0> | keyword[def] identifier[inner_fork_insanity_checks] ( identifier[pipeline_string] ):
literal[string]
identifier[list_of_forks] =[]
identifier[left_indexes] =[]
keyword[for] identifier[pos] , identifier[char] keyword[in] identifier[enumerate] ( identifier[pipeline_string] ):
keyword[if] identifier[char] == identifier[FORK_TOKEN] :
identifier[left_indexes] . identifier[append] ( identifier[pos] )
keyword[elif] identifier[char] == identifier[CLOSE_TOKEN] keyword[and] identifier[len] ( identifier[left_indexes] )> literal[int] :
identifier[list_of_forks] . identifier[append] ( identifier[pipeline_string] [ identifier[left_indexes] [- literal[int] ]+ literal[int] : identifier[pos] ])
identifier[left_indexes] = identifier[left_indexes] [:- literal[int] ]
identifier[list_of_forks] . identifier[sort] ( identifier[key] = keyword[lambda] identifier[x] : identifier[x] . identifier[count] ( identifier[FORK_TOKEN] ), identifier[reverse] = keyword[True] )
keyword[for] identifier[fork] keyword[in] identifier[list_of_forks] :
keyword[for] identifier[subfork] keyword[in] identifier[list_of_forks] :
keyword[if] identifier[subfork] keyword[in] identifier[list_of_forks] keyword[and] identifier[subfork] != identifier[fork] :
identifier[fork_simplified] = identifier[fork] . identifier[replace] ( literal[string] . identifier[format] ( identifier[subfork] ), literal[string] )
keyword[else] :
identifier[fork_simplified] = identifier[fork]
keyword[if] keyword[not] identifier[len] ( identifier[fork_simplified] . identifier[split] ( identifier[LANE_TOKEN] ))> literal[int] :
keyword[raise] identifier[SanityError] ( literal[string]
literal[string]
literal[string] . identifier[format] ( identifier[fork] )) | def inner_fork_insanity_checks(pipeline_string):
"""
This function performs two sanity checks in the pipeline string. The first
check, assures that each fork contains a lane token '|', while the second
check looks for duplicated processes within the same fork.
Parameters
----------
pipeline_string: str
String with the definition of the pipeline, e.g.::
'processA processB processC(ProcessD | ProcessE)'
"""
# first lets get all forks to a list.
list_of_forks = [] # stores forks
left_indexes = [] # stores indexes of left brackets
# iterate through the string looking for '(' and ')'.
for (pos, char) in enumerate(pipeline_string):
if char == FORK_TOKEN:
# saves pos to left_indexes list
left_indexes.append(pos) # depends on [control=['if'], data=[]]
elif char == CLOSE_TOKEN and len(left_indexes) > 0:
# saves fork to list_of_forks
list_of_forks.append(pipeline_string[left_indexes[-1] + 1:pos])
# removes last bracket from left_indexes list
left_indexes = left_indexes[:-1] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
# sort list in descending order of number of forks
list_of_forks.sort(key=lambda x: x.count(FORK_TOKEN), reverse=True)
# Now, we can iterate through list_of_forks and check for errors in each
# fork
for fork in list_of_forks:
# remove inner forks for these checks since each fork has its own entry
# in list_of_forks. Note that each fork is now sorted in descending
# order which enables to remove sequentially the string for the fork
# potentially with more inner forks
for subfork in list_of_forks:
# checks if subfork is contained in fork and if they are different,
# avoiding to remove itself
if subfork in list_of_forks and subfork != fork:
# removes inner forks. Note that string has no spaces
fork_simplified = fork.replace('({})'.format(subfork), '') # depends on [control=['if'], data=[]]
else:
fork_simplified = fork # depends on [control=['for'], data=['subfork']]
# Checks if there is no fork separator character '|' within each fork
if not len(fork_simplified.split(LANE_TOKEN)) > 1:
raise SanityError("One of the forks doesn't have '|' separator between the processes to fork. This is the prime suspect: '({})'".format(fork)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['fork']] |
def parse_bool(cls, value, default=None):
"""Convert ``string`` or ``bool`` to ``bool``."""
if value is None:
return default
elif isinstance(value, bool):
return value
elif isinstance(value, str):
if value == 'True':
return True
elif value == 'False':
return False
raise Exception("Value %s is not boolean." % value) | def function[parse_bool, parameter[cls, value, default]]:
constant[Convert ``string`` or ``bool`` to ``bool``.]
if compare[name[value] is constant[None]] begin[:]
return[name[default]]
<ast.Raise object at 0x7da18f00e290> | keyword[def] identifier[parse_bool] ( identifier[cls] , identifier[value] , identifier[default] = keyword[None] ):
literal[string]
keyword[if] identifier[value] keyword[is] keyword[None] :
keyword[return] identifier[default]
keyword[elif] identifier[isinstance] ( identifier[value] , identifier[bool] ):
keyword[return] identifier[value]
keyword[elif] identifier[isinstance] ( identifier[value] , identifier[str] ):
keyword[if] identifier[value] == literal[string] :
keyword[return] keyword[True]
keyword[elif] identifier[value] == literal[string] :
keyword[return] keyword[False]
keyword[raise] identifier[Exception] ( literal[string] % identifier[value] ) | def parse_bool(cls, value, default=None):
"""Convert ``string`` or ``bool`` to ``bool``."""
if value is None:
return default # depends on [control=['if'], data=[]]
elif isinstance(value, bool):
return value # depends on [control=['if'], data=[]]
elif isinstance(value, str):
if value == 'True':
return True # depends on [control=['if'], data=[]]
elif value == 'False':
return False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
raise Exception('Value %s is not boolean.' % value) |
def press(self, key_code):
""" Sends a 'down' event for the specified scan code """
if key_code >= 128:
# Media key
ev = NSEvent.otherEventWithType_location_modifierFlags_timestamp_windowNumber_context_subtype_data1_data2_(
14, # type
(0, 0), # location
0xa00, # flags
0, # timestamp
0, # window
0, # ctx
8, # subtype
((key_code-128) << 16) | (0xa << 8), # data1
-1 # data2
)
Quartz.CGEventPost(0, ev.CGEvent())
else:
# Regular key
# Apply modifiers if necessary
event_flags = 0
if self.current_modifiers["shift"]:
event_flags += Quartz.kCGEventFlagMaskShift
if self.current_modifiers["caps"]:
event_flags += Quartz.kCGEventFlagMaskAlphaShift
if self.current_modifiers["alt"]:
event_flags += Quartz.kCGEventFlagMaskAlternate
if self.current_modifiers["ctrl"]:
event_flags += Quartz.kCGEventFlagMaskControl
if self.current_modifiers["cmd"]:
event_flags += Quartz.kCGEventFlagMaskCommand
# Update modifiers if necessary
if key_code == 0x37: # cmd
self.current_modifiers["cmd"] = True
elif key_code == 0x38 or key_code == 0x3C: # shift or right shift
self.current_modifiers["shift"] = True
elif key_code == 0x39: # caps lock
self.current_modifiers["caps"] = True
elif key_code == 0x3A: # alt
self.current_modifiers["alt"] = True
elif key_code == 0x3B: # ctrl
self.current_modifiers["ctrl"] = True
event = Quartz.CGEventCreateKeyboardEvent(None, key_code, True)
Quartz.CGEventSetFlags(event, event_flags)
Quartz.CGEventPost(Quartz.kCGHIDEventTap, event)
time.sleep(0.01) | def function[press, parameter[self, key_code]]:
constant[ Sends a 'down' event for the specified scan code ]
if compare[name[key_code] greater_or_equal[>=] constant[128]] begin[:]
variable[ev] assign[=] call[name[NSEvent].otherEventWithType_location_modifierFlags_timestamp_windowNumber_context_subtype_data1_data2_, parameter[constant[14], tuple[[<ast.Constant object at 0x7da1b1b01ba0>, <ast.Constant object at 0x7da1b1b02050>]], constant[2560], constant[0], constant[0], constant[0], constant[8], binary_operation[binary_operation[binary_operation[name[key_code] - constant[128]] <ast.LShift object at 0x7da2590d69e0> constant[16]] <ast.BitOr object at 0x7da2590d6aa0> binary_operation[constant[10] <ast.LShift object at 0x7da2590d69e0> constant[8]]], <ast.UnaryOp object at 0x7da1b1b01d20>]]
call[name[Quartz].CGEventPost, parameter[constant[0], call[name[ev].CGEvent, parameter[]]]] | keyword[def] identifier[press] ( identifier[self] , identifier[key_code] ):
literal[string]
keyword[if] identifier[key_code] >= literal[int] :
identifier[ev] = identifier[NSEvent] . identifier[otherEventWithType_location_modifierFlags_timestamp_windowNumber_context_subtype_data1_data2_] (
literal[int] ,
( literal[int] , literal[int] ),
literal[int] ,
literal[int] ,
literal[int] ,
literal[int] ,
literal[int] ,
(( identifier[key_code] - literal[int] )<< literal[int] )|( literal[int] << literal[int] ),
- literal[int]
)
identifier[Quartz] . identifier[CGEventPost] ( literal[int] , identifier[ev] . identifier[CGEvent] ())
keyword[else] :
identifier[event_flags] = literal[int]
keyword[if] identifier[self] . identifier[current_modifiers] [ literal[string] ]:
identifier[event_flags] += identifier[Quartz] . identifier[kCGEventFlagMaskShift]
keyword[if] identifier[self] . identifier[current_modifiers] [ literal[string] ]:
identifier[event_flags] += identifier[Quartz] . identifier[kCGEventFlagMaskAlphaShift]
keyword[if] identifier[self] . identifier[current_modifiers] [ literal[string] ]:
identifier[event_flags] += identifier[Quartz] . identifier[kCGEventFlagMaskAlternate]
keyword[if] identifier[self] . identifier[current_modifiers] [ literal[string] ]:
identifier[event_flags] += identifier[Quartz] . identifier[kCGEventFlagMaskControl]
keyword[if] identifier[self] . identifier[current_modifiers] [ literal[string] ]:
identifier[event_flags] += identifier[Quartz] . identifier[kCGEventFlagMaskCommand]
keyword[if] identifier[key_code] == literal[int] :
identifier[self] . identifier[current_modifiers] [ literal[string] ]= keyword[True]
keyword[elif] identifier[key_code] == literal[int] keyword[or] identifier[key_code] == literal[int] :
identifier[self] . identifier[current_modifiers] [ literal[string] ]= keyword[True]
keyword[elif] identifier[key_code] == literal[int] :
identifier[self] . identifier[current_modifiers] [ literal[string] ]= keyword[True]
keyword[elif] identifier[key_code] == literal[int] :
identifier[self] . identifier[current_modifiers] [ literal[string] ]= keyword[True]
keyword[elif] identifier[key_code] == literal[int] :
identifier[self] . identifier[current_modifiers] [ literal[string] ]= keyword[True]
identifier[event] = identifier[Quartz] . identifier[CGEventCreateKeyboardEvent] ( keyword[None] , identifier[key_code] , keyword[True] )
identifier[Quartz] . identifier[CGEventSetFlags] ( identifier[event] , identifier[event_flags] )
identifier[Quartz] . identifier[CGEventPost] ( identifier[Quartz] . identifier[kCGHIDEventTap] , identifier[event] )
identifier[time] . identifier[sleep] ( literal[int] ) | def press(self, key_code):
""" Sends a 'down' event for the specified scan code """
if key_code >= 128:
# Media key
# type
# location
# flags
# timestamp
# window
# ctx
# subtype
# data1
# data2
ev = NSEvent.otherEventWithType_location_modifierFlags_timestamp_windowNumber_context_subtype_data1_data2_(14, (0, 0), 2560, 0, 0, 0, 8, key_code - 128 << 16 | 10 << 8, -1)
Quartz.CGEventPost(0, ev.CGEvent()) # depends on [control=['if'], data=['key_code']]
else:
# Regular key
# Apply modifiers if necessary
event_flags = 0
if self.current_modifiers['shift']:
event_flags += Quartz.kCGEventFlagMaskShift # depends on [control=['if'], data=[]]
if self.current_modifiers['caps']:
event_flags += Quartz.kCGEventFlagMaskAlphaShift # depends on [control=['if'], data=[]]
if self.current_modifiers['alt']:
event_flags += Quartz.kCGEventFlagMaskAlternate # depends on [control=['if'], data=[]]
if self.current_modifiers['ctrl']:
event_flags += Quartz.kCGEventFlagMaskControl # depends on [control=['if'], data=[]]
if self.current_modifiers['cmd']:
event_flags += Quartz.kCGEventFlagMaskCommand # depends on [control=['if'], data=[]]
# Update modifiers if necessary
if key_code == 55: # cmd
self.current_modifiers['cmd'] = True # depends on [control=['if'], data=[]]
elif key_code == 56 or key_code == 60: # shift or right shift
self.current_modifiers['shift'] = True # depends on [control=['if'], data=[]]
elif key_code == 57: # caps lock
self.current_modifiers['caps'] = True # depends on [control=['if'], data=[]]
elif key_code == 58: # alt
self.current_modifiers['alt'] = True # depends on [control=['if'], data=[]]
elif key_code == 59: # ctrl
self.current_modifiers['ctrl'] = True # depends on [control=['if'], data=[]]
event = Quartz.CGEventCreateKeyboardEvent(None, key_code, True)
Quartz.CGEventSetFlags(event, event_flags)
Quartz.CGEventPost(Quartz.kCGHIDEventTap, event)
time.sleep(0.01) |
def read_tsplib(filename):
"basic function for reading a symmetric problem in the TSPLIB format"
"data is stored in an upper triangular matrix"
"NOTE: some distance types are not handled yet"
if filename[-3:] == ".gz":
f = gzip.open(filename, "rt")
else:
f = open(filename)
line = f.readline()
while line.find("DIMENSION") == -1:
line = f.readline()
n = int(line.split()[-1])
while line.find("EDGE_WEIGHT_TYPE") == -1:
line = f.readline()
if line.find("EUC_2D") != -1:
dist = distL2
elif line.find("MAN_2D") != -1:
dist = distL1
elif line.find("MAX_2D") != -1:
dist = distLinf
elif line.find("ATT") != -1:
dist = distATT
elif line.find("CEIL_2D") != -1:
dist = distCEIL2D
# elif line.find("GEO") != -1:
# print("geographic"
# dist = distGEO
elif line.find("EXPLICIT") != -1:
while line.find("EDGE_WEIGHT_FORMAT") == -1:
line = f.readline()
if line.find("LOWER_DIAG_ROW") != -1:
while line.find("EDGE_WEIGHT_SECTION") == -1:
line = f.readline()
return read_explicit_lowerdiag(f,n)
if line.find("UPPER_ROW") != -1:
while line.find("EDGE_WEIGHT_SECTION") == -1:
line = f.readline()
return read_explicit_upper(f,n)
if line.find("UPPER_DIAG_ROW") != -1:
while line.find("EDGE_WEIGHT_SECTION") == -1:
line = f.readline()
return read_explicit_upperdiag(f,n)
if line.find("FULL_MATRIX") != -1:
while line.find("EDGE_WEIGHT_SECTION") == -1:
line = f.readline()
return read_explicit_matrix(f,n)
print("error reading line " + line)
raise(Exception)
else:
print("cannot deal with '%s' distances" % line)
raise Exception
while line.find("NODE_COORD_SECTION") == -1:
line = f.readline()
x,y = {},{}
while 1:
line = f.readline()
if line.find("EOF") != -1 or not line: break
(i,xi,yi) = line.split()
x[i] = float(xi)
y[i] = float(yi)
V = x.keys()
c = {} # dictionary to hold n times n matrix
for i in V:
for j in V:
c[i,j] = dist(x[i],y[i],x[j],y[j])
return V,c,x,y | def function[read_tsplib, parameter[filename]]:
constant[basic function for reading a symmetric problem in the TSPLIB format]
constant[data is stored in an upper triangular matrix]
constant[NOTE: some distance types are not handled yet]
if compare[call[name[filename]][<ast.Slice object at 0x7da18f00cfa0>] equal[==] constant[.gz]] begin[:]
variable[f] assign[=] call[name[gzip].open, parameter[name[filename], constant[rt]]]
variable[line] assign[=] call[name[f].readline, parameter[]]
while compare[call[name[line].find, parameter[constant[DIMENSION]]] equal[==] <ast.UnaryOp object at 0x7da18f00cd60>] begin[:]
variable[line] assign[=] call[name[f].readline, parameter[]]
variable[n] assign[=] call[name[int], parameter[call[call[name[line].split, parameter[]]][<ast.UnaryOp object at 0x7da18f00dea0>]]]
while compare[call[name[line].find, parameter[constant[EDGE_WEIGHT_TYPE]]] equal[==] <ast.UnaryOp object at 0x7da18f00c310>] begin[:]
variable[line] assign[=] call[name[f].readline, parameter[]]
if compare[call[name[line].find, parameter[constant[EUC_2D]]] not_equal[!=] <ast.UnaryOp object at 0x7da18f00dc60>] begin[:]
variable[dist] assign[=] name[distL2]
while compare[call[name[line].find, parameter[constant[NODE_COORD_SECTION]]] equal[==] <ast.UnaryOp object at 0x7da1b18e6830>] begin[:]
variable[line] assign[=] call[name[f].readline, parameter[]]
<ast.Tuple object at 0x7da1b18e6f50> assign[=] tuple[[<ast.Dict object at 0x7da1b18e57e0>, <ast.Dict object at 0x7da1b18e7400>]]
while constant[1] begin[:]
variable[line] assign[=] call[name[f].readline, parameter[]]
if <ast.BoolOp object at 0x7da1b18e6590> begin[:]
break
<ast.Tuple object at 0x7da1b18e6a70> assign[=] call[name[line].split, parameter[]]
call[name[x]][name[i]] assign[=] call[name[float], parameter[name[xi]]]
call[name[y]][name[i]] assign[=] call[name[float], parameter[name[yi]]]
variable[V] assign[=] call[name[x].keys, parameter[]]
variable[c] assign[=] dictionary[[], []]
for taget[name[i]] in starred[name[V]] begin[:]
for taget[name[j]] in starred[name[V]] begin[:]
call[name[c]][tuple[[<ast.Name object at 0x7da1b18e59f0>, <ast.Name object at 0x7da1b18e7490>]]] assign[=] call[name[dist], parameter[call[name[x]][name[i]], call[name[y]][name[i]], call[name[x]][name[j]], call[name[y]][name[j]]]]
return[tuple[[<ast.Name object at 0x7da1b18e44f0>, <ast.Name object at 0x7da1b18e7700>, <ast.Name object at 0x7da1b18e7eb0>, <ast.Name object at 0x7da1b18e54e0>]]] | keyword[def] identifier[read_tsplib] ( identifier[filename] ):
literal[string]
literal[string]
literal[string]
keyword[if] identifier[filename] [- literal[int] :]== literal[string] :
identifier[f] = identifier[gzip] . identifier[open] ( identifier[filename] , literal[string] )
keyword[else] :
identifier[f] = identifier[open] ( identifier[filename] )
identifier[line] = identifier[f] . identifier[readline] ()
keyword[while] identifier[line] . identifier[find] ( literal[string] )==- literal[int] :
identifier[line] = identifier[f] . identifier[readline] ()
identifier[n] = identifier[int] ( identifier[line] . identifier[split] ()[- literal[int] ])
keyword[while] identifier[line] . identifier[find] ( literal[string] )==- literal[int] :
identifier[line] = identifier[f] . identifier[readline] ()
keyword[if] identifier[line] . identifier[find] ( literal[string] )!=- literal[int] :
identifier[dist] = identifier[distL2]
keyword[elif] identifier[line] . identifier[find] ( literal[string] )!=- literal[int] :
identifier[dist] = identifier[distL1]
keyword[elif] identifier[line] . identifier[find] ( literal[string] )!=- literal[int] :
identifier[dist] = identifier[distLinf]
keyword[elif] identifier[line] . identifier[find] ( literal[string] )!=- literal[int] :
identifier[dist] = identifier[distATT]
keyword[elif] identifier[line] . identifier[find] ( literal[string] )!=- literal[int] :
identifier[dist] = identifier[distCEIL2D]
keyword[elif] identifier[line] . identifier[find] ( literal[string] )!=- literal[int] :
keyword[while] identifier[line] . identifier[find] ( literal[string] )==- literal[int] :
identifier[line] = identifier[f] . identifier[readline] ()
keyword[if] identifier[line] . identifier[find] ( literal[string] )!=- literal[int] :
keyword[while] identifier[line] . identifier[find] ( literal[string] )==- literal[int] :
identifier[line] = identifier[f] . identifier[readline] ()
keyword[return] identifier[read_explicit_lowerdiag] ( identifier[f] , identifier[n] )
keyword[if] identifier[line] . identifier[find] ( literal[string] )!=- literal[int] :
keyword[while] identifier[line] . identifier[find] ( literal[string] )==- literal[int] :
identifier[line] = identifier[f] . identifier[readline] ()
keyword[return] identifier[read_explicit_upper] ( identifier[f] , identifier[n] )
keyword[if] identifier[line] . identifier[find] ( literal[string] )!=- literal[int] :
keyword[while] identifier[line] . identifier[find] ( literal[string] )==- literal[int] :
identifier[line] = identifier[f] . identifier[readline] ()
keyword[return] identifier[read_explicit_upperdiag] ( identifier[f] , identifier[n] )
keyword[if] identifier[line] . identifier[find] ( literal[string] )!=- literal[int] :
keyword[while] identifier[line] . identifier[find] ( literal[string] )==- literal[int] :
identifier[line] = identifier[f] . identifier[readline] ()
keyword[return] identifier[read_explicit_matrix] ( identifier[f] , identifier[n] )
identifier[print] ( literal[string] + identifier[line] )
keyword[raise] ( identifier[Exception] )
keyword[else] :
identifier[print] ( literal[string] % identifier[line] )
keyword[raise] identifier[Exception]
keyword[while] identifier[line] . identifier[find] ( literal[string] )==- literal[int] :
identifier[line] = identifier[f] . identifier[readline] ()
identifier[x] , identifier[y] ={},{}
keyword[while] literal[int] :
identifier[line] = identifier[f] . identifier[readline] ()
keyword[if] identifier[line] . identifier[find] ( literal[string] )!=- literal[int] keyword[or] keyword[not] identifier[line] : keyword[break]
( identifier[i] , identifier[xi] , identifier[yi] )= identifier[line] . identifier[split] ()
identifier[x] [ identifier[i] ]= identifier[float] ( identifier[xi] )
identifier[y] [ identifier[i] ]= identifier[float] ( identifier[yi] )
identifier[V] = identifier[x] . identifier[keys] ()
identifier[c] ={}
keyword[for] identifier[i] keyword[in] identifier[V] :
keyword[for] identifier[j] keyword[in] identifier[V] :
identifier[c] [ identifier[i] , identifier[j] ]= identifier[dist] ( identifier[x] [ identifier[i] ], identifier[y] [ identifier[i] ], identifier[x] [ identifier[j] ], identifier[y] [ identifier[j] ])
keyword[return] identifier[V] , identifier[c] , identifier[x] , identifier[y] | def read_tsplib(filename):
"""basic function for reading a symmetric problem in the TSPLIB format"""
'data is stored in an upper triangular matrix'
'NOTE: some distance types are not handled yet'
if filename[-3:] == '.gz':
f = gzip.open(filename, 'rt') # depends on [control=['if'], data=[]]
else:
f = open(filename)
line = f.readline()
while line.find('DIMENSION') == -1:
line = f.readline() # depends on [control=['while'], data=[]]
n = int(line.split()[-1])
while line.find('EDGE_WEIGHT_TYPE') == -1:
line = f.readline() # depends on [control=['while'], data=[]]
if line.find('EUC_2D') != -1:
dist = distL2 # depends on [control=['if'], data=[]]
elif line.find('MAN_2D') != -1:
dist = distL1 # depends on [control=['if'], data=[]]
elif line.find('MAX_2D') != -1:
dist = distLinf # depends on [control=['if'], data=[]]
elif line.find('ATT') != -1:
dist = distATT # depends on [control=['if'], data=[]]
elif line.find('CEIL_2D') != -1:
dist = distCEIL2D # depends on [control=['if'], data=[]]
# elif line.find("GEO") != -1:
# print("geographic"
# dist = distGEO
elif line.find('EXPLICIT') != -1:
while line.find('EDGE_WEIGHT_FORMAT') == -1:
line = f.readline() # depends on [control=['while'], data=[]]
if line.find('LOWER_DIAG_ROW') != -1:
while line.find('EDGE_WEIGHT_SECTION') == -1:
line = f.readline() # depends on [control=['while'], data=[]]
return read_explicit_lowerdiag(f, n) # depends on [control=['if'], data=[]]
if line.find('UPPER_ROW') != -1:
while line.find('EDGE_WEIGHT_SECTION') == -1:
line = f.readline() # depends on [control=['while'], data=[]]
return read_explicit_upper(f, n) # depends on [control=['if'], data=[]]
if line.find('UPPER_DIAG_ROW') != -1:
while line.find('EDGE_WEIGHT_SECTION') == -1:
line = f.readline() # depends on [control=['while'], data=[]]
return read_explicit_upperdiag(f, n) # depends on [control=['if'], data=[]]
if line.find('FULL_MATRIX') != -1:
while line.find('EDGE_WEIGHT_SECTION') == -1:
line = f.readline() # depends on [control=['while'], data=[]]
return read_explicit_matrix(f, n) # depends on [control=['if'], data=[]]
print('error reading line ' + line)
raise Exception # depends on [control=['if'], data=[]]
else:
print("cannot deal with '%s' distances" % line)
raise Exception
while line.find('NODE_COORD_SECTION') == -1:
line = f.readline() # depends on [control=['while'], data=[]]
(x, y) = ({}, {})
while 1:
line = f.readline()
if line.find('EOF') != -1 or not line:
break # depends on [control=['if'], data=[]]
(i, xi, yi) = line.split()
x[i] = float(xi)
y[i] = float(yi) # depends on [control=['while'], data=[]]
V = x.keys()
c = {} # dictionary to hold n times n matrix
for i in V:
for j in V:
c[i, j] = dist(x[i], y[i], x[j], y[j]) # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']]
return (V, c, x, y) |
def text(self, tag, textdata, step=None):
"""Saves a text summary.
Args:
tag: str: label for this data
textdata: string, or 1D/2D list/numpy array of strings
step: int: training step
Note: markdown formatting is rendered by tensorboard.
"""
if step is None:
step = self._step
else:
self._step = step
smd = SummaryMetadata(
plugin_data=SummaryMetadata.PluginData(plugin_name='text'))
if isinstance(textdata, (str, bytes)):
tensor = tf.make_tensor_proto(
values=[textdata.encode(encoding='utf_8')], shape=(1,))
else:
textdata = onp.array(textdata) # convert lists, jax arrays, etc.
datashape = onp.shape(textdata)
if len(datashape) == 1:
tensor = tf.make_tensor_proto(
values=[td.encode(encoding='utf_8') for td in textdata],
shape=(datashape[0],))
elif len(datashape) == 2:
tensor = tf.make_tensor_proto(
values=[
td.encode(encoding='utf_8') for td in onp.reshape(textdata, -1)
],
shape=(datashape[0], datashape[1]))
summary = Summary(
value=[Summary.Value(tag=tag, metadata=smd, tensor=tensor)])
self.add_summary(summary, step) | def function[text, parameter[self, tag, textdata, step]]:
constant[Saves a text summary.
Args:
tag: str: label for this data
textdata: string, or 1D/2D list/numpy array of strings
step: int: training step
Note: markdown formatting is rendered by tensorboard.
]
if compare[name[step] is constant[None]] begin[:]
variable[step] assign[=] name[self]._step
variable[smd] assign[=] call[name[SummaryMetadata], parameter[]]
if call[name[isinstance], parameter[name[textdata], tuple[[<ast.Name object at 0x7da207f03e50>, <ast.Name object at 0x7da207f032e0>]]]] begin[:]
variable[tensor] assign[=] call[name[tf].make_tensor_proto, parameter[]]
variable[summary] assign[=] call[name[Summary], parameter[]]
call[name[self].add_summary, parameter[name[summary], name[step]]] | keyword[def] identifier[text] ( identifier[self] , identifier[tag] , identifier[textdata] , identifier[step] = keyword[None] ):
literal[string]
keyword[if] identifier[step] keyword[is] keyword[None] :
identifier[step] = identifier[self] . identifier[_step]
keyword[else] :
identifier[self] . identifier[_step] = identifier[step]
identifier[smd] = identifier[SummaryMetadata] (
identifier[plugin_data] = identifier[SummaryMetadata] . identifier[PluginData] ( identifier[plugin_name] = literal[string] ))
keyword[if] identifier[isinstance] ( identifier[textdata] ,( identifier[str] , identifier[bytes] )):
identifier[tensor] = identifier[tf] . identifier[make_tensor_proto] (
identifier[values] =[ identifier[textdata] . identifier[encode] ( identifier[encoding] = literal[string] )], identifier[shape] =( literal[int] ,))
keyword[else] :
identifier[textdata] = identifier[onp] . identifier[array] ( identifier[textdata] )
identifier[datashape] = identifier[onp] . identifier[shape] ( identifier[textdata] )
keyword[if] identifier[len] ( identifier[datashape] )== literal[int] :
identifier[tensor] = identifier[tf] . identifier[make_tensor_proto] (
identifier[values] =[ identifier[td] . identifier[encode] ( identifier[encoding] = literal[string] ) keyword[for] identifier[td] keyword[in] identifier[textdata] ],
identifier[shape] =( identifier[datashape] [ literal[int] ],))
keyword[elif] identifier[len] ( identifier[datashape] )== literal[int] :
identifier[tensor] = identifier[tf] . identifier[make_tensor_proto] (
identifier[values] =[
identifier[td] . identifier[encode] ( identifier[encoding] = literal[string] ) keyword[for] identifier[td] keyword[in] identifier[onp] . identifier[reshape] ( identifier[textdata] ,- literal[int] )
],
identifier[shape] =( identifier[datashape] [ literal[int] ], identifier[datashape] [ literal[int] ]))
identifier[summary] = identifier[Summary] (
identifier[value] =[ identifier[Summary] . identifier[Value] ( identifier[tag] = identifier[tag] , identifier[metadata] = identifier[smd] , identifier[tensor] = identifier[tensor] )])
identifier[self] . identifier[add_summary] ( identifier[summary] , identifier[step] ) | def text(self, tag, textdata, step=None):
"""Saves a text summary.
Args:
tag: str: label for this data
textdata: string, or 1D/2D list/numpy array of strings
step: int: training step
Note: markdown formatting is rendered by tensorboard.
"""
if step is None:
step = self._step # depends on [control=['if'], data=['step']]
else:
self._step = step
smd = SummaryMetadata(plugin_data=SummaryMetadata.PluginData(plugin_name='text'))
if isinstance(textdata, (str, bytes)):
tensor = tf.make_tensor_proto(values=[textdata.encode(encoding='utf_8')], shape=(1,)) # depends on [control=['if'], data=[]]
else:
textdata = onp.array(textdata) # convert lists, jax arrays, etc.
datashape = onp.shape(textdata)
if len(datashape) == 1:
tensor = tf.make_tensor_proto(values=[td.encode(encoding='utf_8') for td in textdata], shape=(datashape[0],)) # depends on [control=['if'], data=[]]
elif len(datashape) == 2:
tensor = tf.make_tensor_proto(values=[td.encode(encoding='utf_8') for td in onp.reshape(textdata, -1)], shape=(datashape[0], datashape[1])) # depends on [control=['if'], data=[]]
summary = Summary(value=[Summary.Value(tag=tag, metadata=smd, tensor=tensor)])
self.add_summary(summary, step) |
def client_key_loader(self, f):
"""Registers a function to be called to find a client key.
Function you set has to take a client id and return a client key::
@hawk.client_key_loader
def get_client_key(client_id):
if client_id == 'Alice':
return 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn'
else:
raise LookupError()
:param f: The callback for retrieving a client key.
"""
@wraps(f)
def wrapped_f(client_id):
client_key = f(client_id)
return {
'id': client_id,
'key': client_key,
'algorithm': current_app.config['HAWK_ALGORITHM']
}
self._client_key_loader_func = wrapped_f
return wrapped_f | def function[client_key_loader, parameter[self, f]]:
constant[Registers a function to be called to find a client key.
Function you set has to take a client id and return a client key::
@hawk.client_key_loader
def get_client_key(client_id):
if client_id == 'Alice':
return 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn'
else:
raise LookupError()
:param f: The callback for retrieving a client key.
]
def function[wrapped_f, parameter[client_id]]:
variable[client_key] assign[=] call[name[f], parameter[name[client_id]]]
return[dictionary[[<ast.Constant object at 0x7da18bc73310>, <ast.Constant object at 0x7da18bc71a80>, <ast.Constant object at 0x7da18bc706a0>], [<ast.Name object at 0x7da18bc700a0>, <ast.Name object at 0x7da18bc70fd0>, <ast.Subscript object at 0x7da18bc701f0>]]]
name[self]._client_key_loader_func assign[=] name[wrapped_f]
return[name[wrapped_f]] | keyword[def] identifier[client_key_loader] ( identifier[self] , identifier[f] ):
literal[string]
@ identifier[wraps] ( identifier[f] )
keyword[def] identifier[wrapped_f] ( identifier[client_id] ):
identifier[client_key] = identifier[f] ( identifier[client_id] )
keyword[return] {
literal[string] : identifier[client_id] ,
literal[string] : identifier[client_key] ,
literal[string] : identifier[current_app] . identifier[config] [ literal[string] ]
}
identifier[self] . identifier[_client_key_loader_func] = identifier[wrapped_f]
keyword[return] identifier[wrapped_f] | def client_key_loader(self, f):
"""Registers a function to be called to find a client key.
Function you set has to take a client id and return a client key::
@hawk.client_key_loader
def get_client_key(client_id):
if client_id == 'Alice':
return 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn'
else:
raise LookupError()
:param f: The callback for retrieving a client key.
"""
@wraps(f)
def wrapped_f(client_id):
client_key = f(client_id)
return {'id': client_id, 'key': client_key, 'algorithm': current_app.config['HAWK_ALGORITHM']}
self._client_key_loader_func = wrapped_f
return wrapped_f |
def decompress_G2(p: G2Compressed) -> G2Uncompressed:
"""
Recovers x and y coordinates from the compressed point (z1, z2).
"""
z1, z2 = p
# b_flag == 1 indicates the infinity point
b_flag1 = (z1 % POW_2_383) // POW_2_382
if b_flag1 == 1:
return Z2
x1 = z1 % POW_2_381
x2 = z2
# x1 is the imaginary part, x2 is the real part
x = FQ2([x2, x1])
y = modular_squareroot_in_FQ2(x**3 + b2)
if y is None:
raise ValueError("Failed to find a modular squareroot")
# Choose the y whose leftmost bit of the imaginary part is equal to the a_flag1
# If y_im happens to be zero, then use the bit of y_re
a_flag1 = (z1 % POW_2_382) // POW_2_381
y_re, y_im = y.coeffs
if (y_im > 0 and (y_im * 2) // q != a_flag1) or (y_im == 0 and (y_re * 2) // q != a_flag1):
y = FQ2((y * -1).coeffs)
if not is_on_curve((x, y, FQ2([1, 0])), b2):
raise ValueError(
"The given point is not on the twisted curve over FQ**2"
)
return (x, y, FQ2([1, 0])) | def function[decompress_G2, parameter[p]]:
constant[
Recovers x and y coordinates from the compressed point (z1, z2).
]
<ast.Tuple object at 0x7da2041da080> assign[=] name[p]
variable[b_flag1] assign[=] binary_operation[binary_operation[name[z1] <ast.Mod object at 0x7da2590d6920> name[POW_2_383]] <ast.FloorDiv object at 0x7da2590d6bc0> name[POW_2_382]]
if compare[name[b_flag1] equal[==] constant[1]] begin[:]
return[name[Z2]]
variable[x1] assign[=] binary_operation[name[z1] <ast.Mod object at 0x7da2590d6920> name[POW_2_381]]
variable[x2] assign[=] name[z2]
variable[x] assign[=] call[name[FQ2], parameter[list[[<ast.Name object at 0x7da2041d8ac0>, <ast.Name object at 0x7da2041d9990>]]]]
variable[y] assign[=] call[name[modular_squareroot_in_FQ2], parameter[binary_operation[binary_operation[name[x] ** constant[3]] + name[b2]]]]
if compare[name[y] is constant[None]] begin[:]
<ast.Raise object at 0x7da2041dabf0>
variable[a_flag1] assign[=] binary_operation[binary_operation[name[z1] <ast.Mod object at 0x7da2590d6920> name[POW_2_382]] <ast.FloorDiv object at 0x7da2590d6bc0> name[POW_2_381]]
<ast.Tuple object at 0x7da2041db880> assign[=] name[y].coeffs
if <ast.BoolOp object at 0x7da2041d97b0> begin[:]
variable[y] assign[=] call[name[FQ2], parameter[binary_operation[name[y] * <ast.UnaryOp object at 0x7da18fe91cc0>].coeffs]]
if <ast.UnaryOp object at 0x7da18fe91bd0> begin[:]
<ast.Raise object at 0x7da18c4cdba0>
return[tuple[[<ast.Name object at 0x7da18c4cf190>, <ast.Name object at 0x7da18c4ccbb0>, <ast.Call object at 0x7da18c4cc7c0>]]] | keyword[def] identifier[decompress_G2] ( identifier[p] : identifier[G2Compressed] )-> identifier[G2Uncompressed] :
literal[string]
identifier[z1] , identifier[z2] = identifier[p]
identifier[b_flag1] =( identifier[z1] % identifier[POW_2_383] )// identifier[POW_2_382]
keyword[if] identifier[b_flag1] == literal[int] :
keyword[return] identifier[Z2]
identifier[x1] = identifier[z1] % identifier[POW_2_381]
identifier[x2] = identifier[z2]
identifier[x] = identifier[FQ2] ([ identifier[x2] , identifier[x1] ])
identifier[y] = identifier[modular_squareroot_in_FQ2] ( identifier[x] ** literal[int] + identifier[b2] )
keyword[if] identifier[y] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[a_flag1] =( identifier[z1] % identifier[POW_2_382] )// identifier[POW_2_381]
identifier[y_re] , identifier[y_im] = identifier[y] . identifier[coeffs]
keyword[if] ( identifier[y_im] > literal[int] keyword[and] ( identifier[y_im] * literal[int] )// identifier[q] != identifier[a_flag1] ) keyword[or] ( identifier[y_im] == literal[int] keyword[and] ( identifier[y_re] * literal[int] )// identifier[q] != identifier[a_flag1] ):
identifier[y] = identifier[FQ2] (( identifier[y] *- literal[int] ). identifier[coeffs] )
keyword[if] keyword[not] identifier[is_on_curve] (( identifier[x] , identifier[y] , identifier[FQ2] ([ literal[int] , literal[int] ])), identifier[b2] ):
keyword[raise] identifier[ValueError] (
literal[string]
)
keyword[return] ( identifier[x] , identifier[y] , identifier[FQ2] ([ literal[int] , literal[int] ])) | def decompress_G2(p: G2Compressed) -> G2Uncompressed:
"""
Recovers x and y coordinates from the compressed point (z1, z2).
"""
(z1, z2) = p
# b_flag == 1 indicates the infinity point
b_flag1 = z1 % POW_2_383 // POW_2_382
if b_flag1 == 1:
return Z2 # depends on [control=['if'], data=[]]
x1 = z1 % POW_2_381
x2 = z2
# x1 is the imaginary part, x2 is the real part
x = FQ2([x2, x1])
y = modular_squareroot_in_FQ2(x ** 3 + b2)
if y is None:
raise ValueError('Failed to find a modular squareroot') # depends on [control=['if'], data=[]]
# Choose the y whose leftmost bit of the imaginary part is equal to the a_flag1
# If y_im happens to be zero, then use the bit of y_re
a_flag1 = z1 % POW_2_382 // POW_2_381
(y_re, y_im) = y.coeffs
if y_im > 0 and y_im * 2 // q != a_flag1 or (y_im == 0 and y_re * 2 // q != a_flag1):
y = FQ2((y * -1).coeffs) # depends on [control=['if'], data=[]]
if not is_on_curve((x, y, FQ2([1, 0])), b2):
raise ValueError('The given point is not on the twisted curve over FQ**2') # depends on [control=['if'], data=[]]
return (x, y, FQ2([1, 0])) |
def replace_with_text_stream(stream_name):
"""Given a stream name, replace the target stream with a text-converted equivalent
:param str stream_name: The name of a target stream, such as **stdout** or **stderr**
:return: None
"""
new_stream = TEXT_STREAMS.get(stream_name)
if new_stream is not None:
new_stream = new_stream()
setattr(sys, stream_name, new_stream)
return None | def function[replace_with_text_stream, parameter[stream_name]]:
constant[Given a stream name, replace the target stream with a text-converted equivalent
:param str stream_name: The name of a target stream, such as **stdout** or **stderr**
:return: None
]
variable[new_stream] assign[=] call[name[TEXT_STREAMS].get, parameter[name[stream_name]]]
if compare[name[new_stream] is_not constant[None]] begin[:]
variable[new_stream] assign[=] call[name[new_stream], parameter[]]
call[name[setattr], parameter[name[sys], name[stream_name], name[new_stream]]]
return[constant[None]] | keyword[def] identifier[replace_with_text_stream] ( identifier[stream_name] ):
literal[string]
identifier[new_stream] = identifier[TEXT_STREAMS] . identifier[get] ( identifier[stream_name] )
keyword[if] identifier[new_stream] keyword[is] keyword[not] keyword[None] :
identifier[new_stream] = identifier[new_stream] ()
identifier[setattr] ( identifier[sys] , identifier[stream_name] , identifier[new_stream] )
keyword[return] keyword[None] | def replace_with_text_stream(stream_name):
"""Given a stream name, replace the target stream with a text-converted equivalent
:param str stream_name: The name of a target stream, such as **stdout** or **stderr**
:return: None
"""
new_stream = TEXT_STREAMS.get(stream_name)
if new_stream is not None:
new_stream = new_stream()
setattr(sys, stream_name, new_stream) # depends on [control=['if'], data=['new_stream']]
return None |
def _init_deferred_buffers(self):
"""
Initialize or reinitalize all the deferred transfer buffers
Calling this method will drop all pending transactions
so use with care.
"""
# List of transfers that have been started, but
# not completed (started by write_reg, read_reg,
# reg_write_repeat and reg_read_repeat)
self._transfer_list = collections.deque()
# The current packet - this can contain multiple
# different transfers
self._crnt_cmd = _Command(self._packet_size)
# Packets that have been sent but not read
self._commands_to_read = collections.deque()
# Buffer for data returned for completed commands.
# This data will be added to transfers
self._command_response_buf = bytearray() | def function[_init_deferred_buffers, parameter[self]]:
constant[
Initialize or reinitalize all the deferred transfer buffers
Calling this method will drop all pending transactions
so use with care.
]
name[self]._transfer_list assign[=] call[name[collections].deque, parameter[]]
name[self]._crnt_cmd assign[=] call[name[_Command], parameter[name[self]._packet_size]]
name[self]._commands_to_read assign[=] call[name[collections].deque, parameter[]]
name[self]._command_response_buf assign[=] call[name[bytearray], parameter[]] | keyword[def] identifier[_init_deferred_buffers] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_transfer_list] = identifier[collections] . identifier[deque] ()
identifier[self] . identifier[_crnt_cmd] = identifier[_Command] ( identifier[self] . identifier[_packet_size] )
identifier[self] . identifier[_commands_to_read] = identifier[collections] . identifier[deque] ()
identifier[self] . identifier[_command_response_buf] = identifier[bytearray] () | def _init_deferred_buffers(self):
"""
Initialize or reinitalize all the deferred transfer buffers
Calling this method will drop all pending transactions
so use with care.
"""
# List of transfers that have been started, but
# not completed (started by write_reg, read_reg,
# reg_write_repeat and reg_read_repeat)
self._transfer_list = collections.deque()
# The current packet - this can contain multiple
# different transfers
self._crnt_cmd = _Command(self._packet_size)
# Packets that have been sent but not read
self._commands_to_read = collections.deque()
# Buffer for data returned for completed commands.
# This data will be added to transfers
self._command_response_buf = bytearray() |
def visitLanguageRange(self, ctx: ShExDocParser.LanguageRangeContext):
""" ShExC: languageRange : LANGTAG (STEM_MARK languagExclusion*)?
ShExJ: valueSetValue = objectValue | LanguageStem | LanguageStemRange """
baselang = ctx.LANGTAG().getText()
if not ctx.STEM_MARK(): # valueSetValue = objectValue / objectValue = ObjectLiteral
vsvalue = Language()
vsvalue.languageTag = baselang[1:]
else:
if ctx.languageExclusion():
vsvalue = LanguageStemRange(LANGTAG(baselang[1:]), exclusions=[])
self._language_exclusions(vsvalue, ctx.languageExclusion())
else:
vsvalue = LanguageStem(LANGTAG(baselang[1:]))
self.nodeconstraint.values.append(vsvalue) | def function[visitLanguageRange, parameter[self, ctx]]:
constant[ ShExC: languageRange : LANGTAG (STEM_MARK languagExclusion*)?
ShExJ: valueSetValue = objectValue | LanguageStem | LanguageStemRange ]
variable[baselang] assign[=] call[call[name[ctx].LANGTAG, parameter[]].getText, parameter[]]
if <ast.UnaryOp object at 0x7da20c9932e0> begin[:]
variable[vsvalue] assign[=] call[name[Language], parameter[]]
name[vsvalue].languageTag assign[=] call[name[baselang]][<ast.Slice object at 0x7da20c9904c0>]
call[name[self].nodeconstraint.values.append, parameter[name[vsvalue]]] | keyword[def] identifier[visitLanguageRange] ( identifier[self] , identifier[ctx] : identifier[ShExDocParser] . identifier[LanguageRangeContext] ):
literal[string]
identifier[baselang] = identifier[ctx] . identifier[LANGTAG] (). identifier[getText] ()
keyword[if] keyword[not] identifier[ctx] . identifier[STEM_MARK] ():
identifier[vsvalue] = identifier[Language] ()
identifier[vsvalue] . identifier[languageTag] = identifier[baselang] [ literal[int] :]
keyword[else] :
keyword[if] identifier[ctx] . identifier[languageExclusion] ():
identifier[vsvalue] = identifier[LanguageStemRange] ( identifier[LANGTAG] ( identifier[baselang] [ literal[int] :]), identifier[exclusions] =[])
identifier[self] . identifier[_language_exclusions] ( identifier[vsvalue] , identifier[ctx] . identifier[languageExclusion] ())
keyword[else] :
identifier[vsvalue] = identifier[LanguageStem] ( identifier[LANGTAG] ( identifier[baselang] [ literal[int] :]))
identifier[self] . identifier[nodeconstraint] . identifier[values] . identifier[append] ( identifier[vsvalue] ) | def visitLanguageRange(self, ctx: ShExDocParser.LanguageRangeContext):
""" ShExC: languageRange : LANGTAG (STEM_MARK languagExclusion*)?
ShExJ: valueSetValue = objectValue | LanguageStem | LanguageStemRange """
baselang = ctx.LANGTAG().getText()
if not ctx.STEM_MARK(): # valueSetValue = objectValue / objectValue = ObjectLiteral
vsvalue = Language()
vsvalue.languageTag = baselang[1:] # depends on [control=['if'], data=[]]
elif ctx.languageExclusion():
vsvalue = LanguageStemRange(LANGTAG(baselang[1:]), exclusions=[])
self._language_exclusions(vsvalue, ctx.languageExclusion()) # depends on [control=['if'], data=[]]
else:
vsvalue = LanguageStem(LANGTAG(baselang[1:]))
self.nodeconstraint.values.append(vsvalue) |
def set_min_requests_per_connection(self, host_distance, min_requests):
"""
Sets a threshold for concurrent requests per connection, below which
connections will be considered for disposal (down to core connections;
see :meth:`~Cluster.set_core_connections_per_host`).
Pertains to connection pool management in protocol versions {1,2}.
"""
if self.protocol_version >= 3:
raise UnsupportedOperation(
"Cluster.set_min_requests_per_connection() only has an effect "
"when using protocol_version 1 or 2.")
if min_requests < 0 or min_requests > 126 or \
min_requests >= self._max_requests_per_connection[host_distance]:
raise ValueError("min_requests must be 0-126 and less than the max_requests for this host_distance (%d)" %
(self._min_requests_per_connection[host_distance],))
self._min_requests_per_connection[host_distance] = min_requests | def function[set_min_requests_per_connection, parameter[self, host_distance, min_requests]]:
constant[
Sets a threshold for concurrent requests per connection, below which
connections will be considered for disposal (down to core connections;
see :meth:`~Cluster.set_core_connections_per_host`).
Pertains to connection pool management in protocol versions {1,2}.
]
if compare[name[self].protocol_version greater_or_equal[>=] constant[3]] begin[:]
<ast.Raise object at 0x7da1b22bb6a0>
if <ast.BoolOp object at 0x7da1b22b9c90> begin[:]
<ast.Raise object at 0x7da1b22bbd60>
call[name[self]._min_requests_per_connection][name[host_distance]] assign[=] name[min_requests] | keyword[def] identifier[set_min_requests_per_connection] ( identifier[self] , identifier[host_distance] , identifier[min_requests] ):
literal[string]
keyword[if] identifier[self] . identifier[protocol_version] >= literal[int] :
keyword[raise] identifier[UnsupportedOperation] (
literal[string]
literal[string] )
keyword[if] identifier[min_requests] < literal[int] keyword[or] identifier[min_requests] > literal[int] keyword[or] identifier[min_requests] >= identifier[self] . identifier[_max_requests_per_connection] [ identifier[host_distance] ]:
keyword[raise] identifier[ValueError] ( literal[string] %
( identifier[self] . identifier[_min_requests_per_connection] [ identifier[host_distance] ],))
identifier[self] . identifier[_min_requests_per_connection] [ identifier[host_distance] ]= identifier[min_requests] | def set_min_requests_per_connection(self, host_distance, min_requests):
"""
Sets a threshold for concurrent requests per connection, below which
connections will be considered for disposal (down to core connections;
see :meth:`~Cluster.set_core_connections_per_host`).
Pertains to connection pool management in protocol versions {1,2}.
"""
if self.protocol_version >= 3:
raise UnsupportedOperation('Cluster.set_min_requests_per_connection() only has an effect when using protocol_version 1 or 2.') # depends on [control=['if'], data=[]]
if min_requests < 0 or min_requests > 126 or min_requests >= self._max_requests_per_connection[host_distance]:
raise ValueError('min_requests must be 0-126 and less than the max_requests for this host_distance (%d)' % (self._min_requests_per_connection[host_distance],)) # depends on [control=['if'], data=[]]
self._min_requests_per_connection[host_distance] = min_requests |
def XMPP_display(self,*arg):
""" For XMPP Demo
輸出到 XMPP 之樣式。
"""
MA = ''
for i in arg:
MAs = '- MA%02s: %.2f %s(%s)\n' % (
unicode(i),
self.MA(i),
self.MAC(i),
unicode(self.MA_serial(i)[0])
)
MA = MA + MAs
vol = '- Volume: %s %s(%s)' % (
unicode(self.MAVOL(1)/1000),
unicode(self.MACVOL(1)),
unicode(self.MAVOL_serial(1)[0])
)
MAO = self.MAO(3,6)
re = """%(stock_name)s %(stock_no)s
%(stock_date)s: %(stock_price)s %(stock_range)s(%(range_per)+.2f%%)
%(MA)s%(vol)s
- MAO(3-6): %(MAO_v).2f %(MAO_c)s(%(MAO_times)s)
- RABC: %(RABC)s""" % {
'stock_name': unicode(self.stock_name),
'stock_no': unicode(self.stock_no),
'stock_date': unicode(self.data_date[-1]),
'stock_price': unicode(self.raw_data[-1]),
'stock_range': unicode(self.stock_range[-1]),
'range_per': self.range_per,
'MA': MA,
'vol': vol,
'MAO_v': MAO[0][1][-1],
'MAO_c': unicode(MAO[1]),
'MAO_times': unicode(MAO[0][0]),
'RABC': self.RABC
}
return re | def function[XMPP_display, parameter[self]]:
constant[ For XMPP Demo
輸出到 XMPP 之樣式。
]
variable[MA] assign[=] constant[]
for taget[name[i]] in starred[name[arg]] begin[:]
variable[MAs] assign[=] binary_operation[constant[- MA%02s: %.2f %s(%s)
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da18f813790>, <ast.Call object at 0x7da18f8129e0>, <ast.Call object at 0x7da18f810850>, <ast.Call object at 0x7da18f813f70>]]]
variable[MA] assign[=] binary_operation[name[MA] + name[MAs]]
variable[vol] assign[=] binary_operation[constant[- Volume: %s %s(%s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da18f813d00>, <ast.Call object at 0x7da18f811450>, <ast.Call object at 0x7da18f811660>]]]
variable[MAO] assign[=] call[name[self].MAO, parameter[constant[3], constant[6]]]
variable[re] assign[=] binary_operation[constant[%(stock_name)s %(stock_no)s
%(stock_date)s: %(stock_price)s %(stock_range)s(%(range_per)+.2f%%)
%(MA)s%(vol)s
- MAO(3-6): %(MAO_v).2f %(MAO_c)s(%(MAO_times)s)
- RABC: %(RABC)s] <ast.Mod object at 0x7da2590d6920> dictionary[[<ast.Constant object at 0x7da1b197d3f0>, <ast.Constant object at 0x7da1b197d7e0>, <ast.Constant object at 0x7da1b197d5d0>, <ast.Constant object at 0x7da1b197ec20>, <ast.Constant object at 0x7da1b197eb90>, <ast.Constant object at 0x7da1b197f850>, <ast.Constant object at 0x7da1b197eda0>, <ast.Constant object at 0x7da1b197ff70>, <ast.Constant object at 0x7da1b197c550>, <ast.Constant object at 0x7da1b197df90>, <ast.Constant object at 0x7da1b197eb00>, <ast.Constant object at 0x7da1b197d4e0>], [<ast.Call object at 0x7da1b197d420>, <ast.Call object at 0x7da1b197d8a0>, <ast.Call object at 0x7da1b197f730>, <ast.Call object at 0x7da1b197df60>, <ast.Call object at 0x7da1b197fc70>, <ast.Attribute object at 0x7da1b197e0b0>, <ast.Name object at 0x7da1b197c910>, <ast.Name object at 0x7da1b197e740>, <ast.Subscript object at 0x7da1b197c040>, <ast.Call object at 0x7da1b197df30>, <ast.Call object at 0x7da1b197cc40>, <ast.Attribute object at 0x7da1b197c070>]]]
return[name[re]] | keyword[def] identifier[XMPP_display] ( identifier[self] ,* identifier[arg] ):
literal[string]
identifier[MA] = literal[string]
keyword[for] identifier[i] keyword[in] identifier[arg] :
identifier[MAs] = literal[string] %(
identifier[unicode] ( identifier[i] ),
identifier[self] . identifier[MA] ( identifier[i] ),
identifier[self] . identifier[MAC] ( identifier[i] ),
identifier[unicode] ( identifier[self] . identifier[MA_serial] ( identifier[i] )[ literal[int] ])
)
identifier[MA] = identifier[MA] + identifier[MAs]
identifier[vol] = literal[string] %(
identifier[unicode] ( identifier[self] . identifier[MAVOL] ( literal[int] )/ literal[int] ),
identifier[unicode] ( identifier[self] . identifier[MACVOL] ( literal[int] )),
identifier[unicode] ( identifier[self] . identifier[MAVOL_serial] ( literal[int] )[ literal[int] ])
)
identifier[MAO] = identifier[self] . identifier[MAO] ( literal[int] , literal[int] )
identifier[re] = literal[string] %{
literal[string] : identifier[unicode] ( identifier[self] . identifier[stock_name] ),
literal[string] : identifier[unicode] ( identifier[self] . identifier[stock_no] ),
literal[string] : identifier[unicode] ( identifier[self] . identifier[data_date] [- literal[int] ]),
literal[string] : identifier[unicode] ( identifier[self] . identifier[raw_data] [- literal[int] ]),
literal[string] : identifier[unicode] ( identifier[self] . identifier[stock_range] [- literal[int] ]),
literal[string] : identifier[self] . identifier[range_per] ,
literal[string] : identifier[MA] ,
literal[string] : identifier[vol] ,
literal[string] : identifier[MAO] [ literal[int] ][ literal[int] ][- literal[int] ],
literal[string] : identifier[unicode] ( identifier[MAO] [ literal[int] ]),
literal[string] : identifier[unicode] ( identifier[MAO] [ literal[int] ][ literal[int] ]),
literal[string] : identifier[self] . identifier[RABC]
}
keyword[return] identifier[re] | def XMPP_display(self, *arg):
""" For XMPP Demo
輸出到 XMPP 之樣式。
"""
MA = ''
for i in arg:
MAs = '- MA%02s: %.2f %s(%s)\n' % (unicode(i), self.MA(i), self.MAC(i), unicode(self.MA_serial(i)[0]))
MA = MA + MAs # depends on [control=['for'], data=['i']]
vol = '- Volume: %s %s(%s)' % (unicode(self.MAVOL(1) / 1000), unicode(self.MACVOL(1)), unicode(self.MAVOL_serial(1)[0]))
MAO = self.MAO(3, 6)
re = '%(stock_name)s %(stock_no)s\n%(stock_date)s: %(stock_price)s %(stock_range)s(%(range_per)+.2f%%)\n%(MA)s%(vol)s\n- MAO(3-6): %(MAO_v).2f %(MAO_c)s(%(MAO_times)s)\n- RABC: %(RABC)s' % {'stock_name': unicode(self.stock_name), 'stock_no': unicode(self.stock_no), 'stock_date': unicode(self.data_date[-1]), 'stock_price': unicode(self.raw_data[-1]), 'stock_range': unicode(self.stock_range[-1]), 'range_per': self.range_per, 'MA': MA, 'vol': vol, 'MAO_v': MAO[0][1][-1], 'MAO_c': unicode(MAO[1]), 'MAO_times': unicode(MAO[0][0]), 'RABC': self.RABC}
return re |
def _handle_magic_exception(self, mod, exc):
"""
Beginning with Ansible >2.6, some modules (file.py) install a
sys.excepthook which is a closure over AnsibleModule, redirecting the
magical exception to AnsibleModule.fail_json().
For extra special needs bonus points, the class is not defined in
module_utils, but is defined in the module itself, meaning there is no
type for isinstance() that outlasts the invocation.
"""
klass = getattr(mod, 'AnsibleModuleError', None)
if klass and isinstance(exc, klass):
mod.module.fail_json(**exc.results) | def function[_handle_magic_exception, parameter[self, mod, exc]]:
constant[
Beginning with Ansible >2.6, some modules (file.py) install a
sys.excepthook which is a closure over AnsibleModule, redirecting the
magical exception to AnsibleModule.fail_json().
For extra special needs bonus points, the class is not defined in
module_utils, but is defined in the module itself, meaning there is no
type for isinstance() that outlasts the invocation.
]
variable[klass] assign[=] call[name[getattr], parameter[name[mod], constant[AnsibleModuleError], constant[None]]]
if <ast.BoolOp object at 0x7da1b1d51ea0> begin[:]
call[name[mod].module.fail_json, parameter[]] | keyword[def] identifier[_handle_magic_exception] ( identifier[self] , identifier[mod] , identifier[exc] ):
literal[string]
identifier[klass] = identifier[getattr] ( identifier[mod] , literal[string] , keyword[None] )
keyword[if] identifier[klass] keyword[and] identifier[isinstance] ( identifier[exc] , identifier[klass] ):
identifier[mod] . identifier[module] . identifier[fail_json] (** identifier[exc] . identifier[results] ) | def _handle_magic_exception(self, mod, exc):
"""
Beginning with Ansible >2.6, some modules (file.py) install a
sys.excepthook which is a closure over AnsibleModule, redirecting the
magical exception to AnsibleModule.fail_json().
For extra special needs bonus points, the class is not defined in
module_utils, but is defined in the module itself, meaning there is no
type for isinstance() that outlasts the invocation.
"""
klass = getattr(mod, 'AnsibleModuleError', None)
if klass and isinstance(exc, klass):
mod.module.fail_json(**exc.results) # depends on [control=['if'], data=[]] |
def search_template_present(name, definition):
'''
Ensure that the named search template is present.
name
Name of the search template to add
definition
Required dict for creation parameters as per http://www.elastic.co/guide/en/elasticsearch/reference/current/search-template.html
**Example:**
.. code-block:: yaml
test_pipeline:
elasticsearch.search_template_present:
- definition:
inline:
size: 10
'''
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
try:
template = __salt__['elasticsearch.search_template_get'](id=name)
old = {}
if template:
old = salt.utils.json.loads(template["template"])
ret['changes'] = __utils__['dictdiffer.deep_diff'](old, definition)
if ret['changes'] or not definition:
if __opts__['test']:
if not template:
ret['comment'] = 'Search template {0} does not exist and will be created'.format(name)
else:
ret['comment'] = 'Search template {0} exists with wrong configuration and will be overridden'.format(name)
ret['result'] = None
else:
output = __salt__['elasticsearch.search_template_create'](id=name, body=definition)
if output:
if not template:
ret['comment'] = 'Successfully created search template {0}'.format(name)
else:
ret['comment'] = 'Successfully replaced search template {0}'.format(name)
else:
ret['result'] = False
ret['comment'] = 'Cannot create search template {0}, {1}'.format(name, output)
else:
ret['comment'] = 'Search template {0} is already present'.format(name)
except Exception as err:
ret['result'] = False
ret['comment'] = six.text_type(err)
return ret | def function[search_template_present, parameter[name, definition]]:
constant[
Ensure that the named search template is present.
name
Name of the search template to add
definition
Required dict for creation parameters as per http://www.elastic.co/guide/en/elasticsearch/reference/current/search-template.html
**Example:**
.. code-block:: yaml
test_pipeline:
elasticsearch.search_template_present:
- definition:
inline:
size: 10
]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b1c16cb0>, <ast.Constant object at 0x7da1b1c16dd0>, <ast.Constant object at 0x7da1b1c172e0>, <ast.Constant object at 0x7da1b1c155d0>], [<ast.Name object at 0x7da1b1c167a0>, <ast.Dict object at 0x7da1b1c14700>, <ast.Constant object at 0x7da1b1c159f0>, <ast.Constant object at 0x7da1b1c17a30>]]
<ast.Try object at 0x7da1b1c14160>
return[name[ret]] | keyword[def] identifier[search_template_present] ( identifier[name] , identifier[definition] ):
literal[string]
identifier[ret] ={ literal[string] : identifier[name] , literal[string] :{}, literal[string] : keyword[True] , literal[string] : literal[string] }
keyword[try] :
identifier[template] = identifier[__salt__] [ literal[string] ]( identifier[id] = identifier[name] )
identifier[old] ={}
keyword[if] identifier[template] :
identifier[old] = identifier[salt] . identifier[utils] . identifier[json] . identifier[loads] ( identifier[template] [ literal[string] ])
identifier[ret] [ literal[string] ]= identifier[__utils__] [ literal[string] ]( identifier[old] , identifier[definition] )
keyword[if] identifier[ret] [ literal[string] ] keyword[or] keyword[not] identifier[definition] :
keyword[if] identifier[__opts__] [ literal[string] ]:
keyword[if] keyword[not] identifier[template] :
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
keyword[else] :
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
identifier[ret] [ literal[string] ]= keyword[None]
keyword[else] :
identifier[output] = identifier[__salt__] [ literal[string] ]( identifier[id] = identifier[name] , identifier[body] = identifier[definition] )
keyword[if] identifier[output] :
keyword[if] keyword[not] identifier[template] :
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
keyword[else] :
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
keyword[else] :
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] , identifier[output] )
keyword[else] :
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
keyword[except] identifier[Exception] keyword[as] identifier[err] :
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]= identifier[six] . identifier[text_type] ( identifier[err] )
keyword[return] identifier[ret] | def search_template_present(name, definition):
"""
Ensure that the named search template is present.
name
Name of the search template to add
definition
Required dict for creation parameters as per http://www.elastic.co/guide/en/elasticsearch/reference/current/search-template.html
**Example:**
.. code-block:: yaml
test_pipeline:
elasticsearch.search_template_present:
- definition:
inline:
size: 10
"""
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
try:
template = __salt__['elasticsearch.search_template_get'](id=name)
old = {}
if template:
old = salt.utils.json.loads(template['template']) # depends on [control=['if'], data=[]]
ret['changes'] = __utils__['dictdiffer.deep_diff'](old, definition)
if ret['changes'] or not definition:
if __opts__['test']:
if not template:
ret['comment'] = 'Search template {0} does not exist and will be created'.format(name) # depends on [control=['if'], data=[]]
else:
ret['comment'] = 'Search template {0} exists with wrong configuration and will be overridden'.format(name)
ret['result'] = None # depends on [control=['if'], data=[]]
else:
output = __salt__['elasticsearch.search_template_create'](id=name, body=definition)
if output:
if not template:
ret['comment'] = 'Successfully created search template {0}'.format(name) # depends on [control=['if'], data=[]]
else:
ret['comment'] = 'Successfully replaced search template {0}'.format(name) # depends on [control=['if'], data=[]]
else:
ret['result'] = False
ret['comment'] = 'Cannot create search template {0}, {1}'.format(name, output) # depends on [control=['if'], data=[]]
else:
ret['comment'] = 'Search template {0} is already present'.format(name) # depends on [control=['try'], data=[]]
except Exception as err:
ret['result'] = False
ret['comment'] = six.text_type(err) # depends on [control=['except'], data=['err']]
return ret |
def format_list(extracted_list):
"""Format a list of traceback entry tuples for printing.
Given a list of tuples as returned by extract_tb() or
extract_stack(), return a list of strings ready for printing.
Each string in the resulting list corresponds to the item with the
same index in the argument list. Each string ends in a newline;
the strings may contain internal newlines as well, for those items
whose source text line is not None.
"""
list = []
for filename, lineno, name, line in extracted_list:
item = ' File "%s", line %d, in %s\n' % (filename,lineno,name)
if line:
item = item + ' %s\n' % line.strip()
list.append(item)
return list | def function[format_list, parameter[extracted_list]]:
constant[Format a list of traceback entry tuples for printing.
Given a list of tuples as returned by extract_tb() or
extract_stack(), return a list of strings ready for printing.
Each string in the resulting list corresponds to the item with the
same index in the argument list. Each string ends in a newline;
the strings may contain internal newlines as well, for those items
whose source text line is not None.
]
variable[list] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da18dc9ac20>, <ast.Name object at 0x7da18dc9a3e0>, <ast.Name object at 0x7da18dc9aa70>, <ast.Name object at 0x7da18dc98550>]]] in starred[name[extracted_list]] begin[:]
variable[item] assign[=] binary_operation[constant[ File "%s", line %d, in %s
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18dc9a2f0>, <ast.Name object at 0x7da18dc99420>, <ast.Name object at 0x7da18dc9a320>]]]
if name[line] begin[:]
variable[item] assign[=] binary_operation[name[item] + binary_operation[constant[ %s
] <ast.Mod object at 0x7da2590d6920> call[name[line].strip, parameter[]]]]
call[name[list].append, parameter[name[item]]]
return[name[list]] | keyword[def] identifier[format_list] ( identifier[extracted_list] ):
literal[string]
identifier[list] =[]
keyword[for] identifier[filename] , identifier[lineno] , identifier[name] , identifier[line] keyword[in] identifier[extracted_list] :
identifier[item] = literal[string] %( identifier[filename] , identifier[lineno] , identifier[name] )
keyword[if] identifier[line] :
identifier[item] = identifier[item] + literal[string] % identifier[line] . identifier[strip] ()
identifier[list] . identifier[append] ( identifier[item] )
keyword[return] identifier[list] | def format_list(extracted_list):
"""Format a list of traceback entry tuples for printing.
Given a list of tuples as returned by extract_tb() or
extract_stack(), return a list of strings ready for printing.
Each string in the resulting list corresponds to the item with the
same index in the argument list. Each string ends in a newline;
the strings may contain internal newlines as well, for those items
whose source text line is not None.
"""
list = []
for (filename, lineno, name, line) in extracted_list:
item = ' File "%s", line %d, in %s\n' % (filename, lineno, name)
if line:
item = item + ' %s\n' % line.strip() # depends on [control=['if'], data=[]]
list.append(item) # depends on [control=['for'], data=[]]
return list |
def CreateProductPartition(client, adgroup_id):
"""Creates a ProductPartition tree for the given AdGroup ID.
Args:
client: an AdWordsClient instance.
adgroup_id: a str AdGroup ID.
Returns:
The ProductPartition tree as a sudsobject.
"""
ad_group_criterion_service = client.GetService('AdGroupCriterionService',
'v201809')
helper = ProductPartitionHelper(adgroup_id)
root = helper.CreateSubdivision()
new_product_canonical_condition = {
'xsi_type': 'ProductCanonicalCondition',
'condition': 'NEW'
}
used_product_canonical_condition = {
'xsi_type': 'ProductCanonicalCondition',
'condition': 'USED'
}
other_product_canonical_condition = {
'xsi_type': 'ProductCanonicalCondition',
}
helper.CreateUnit(root, new_product_canonical_condition)
helper.CreateUnit(root, used_product_canonical_condition)
helper.CreateUnit(root, other_product_canonical_condition)
result = ad_group_criterion_service.mutate(helper.operations)
return result['value'] | def function[CreateProductPartition, parameter[client, adgroup_id]]:
constant[Creates a ProductPartition tree for the given AdGroup ID.
Args:
client: an AdWordsClient instance.
adgroup_id: a str AdGroup ID.
Returns:
The ProductPartition tree as a sudsobject.
]
variable[ad_group_criterion_service] assign[=] call[name[client].GetService, parameter[constant[AdGroupCriterionService], constant[v201809]]]
variable[helper] assign[=] call[name[ProductPartitionHelper], parameter[name[adgroup_id]]]
variable[root] assign[=] call[name[helper].CreateSubdivision, parameter[]]
variable[new_product_canonical_condition] assign[=] dictionary[[<ast.Constant object at 0x7da1b1c0d180>, <ast.Constant object at 0x7da1b1c0c3a0>], [<ast.Constant object at 0x7da1b1c0df90>, <ast.Constant object at 0x7da1b1c0c250>]]
variable[used_product_canonical_condition] assign[=] dictionary[[<ast.Constant object at 0x7da1b1c0dd50>, <ast.Constant object at 0x7da1b1c0dd80>], [<ast.Constant object at 0x7da1b1c0c910>, <ast.Constant object at 0x7da1b1c0c430>]]
variable[other_product_canonical_condition] assign[=] dictionary[[<ast.Constant object at 0x7da1b1c0e680>], [<ast.Constant object at 0x7da1b1c0c1c0>]]
call[name[helper].CreateUnit, parameter[name[root], name[new_product_canonical_condition]]]
call[name[helper].CreateUnit, parameter[name[root], name[used_product_canonical_condition]]]
call[name[helper].CreateUnit, parameter[name[root], name[other_product_canonical_condition]]]
variable[result] assign[=] call[name[ad_group_criterion_service].mutate, parameter[name[helper].operations]]
return[call[name[result]][constant[value]]] | keyword[def] identifier[CreateProductPartition] ( identifier[client] , identifier[adgroup_id] ):
literal[string]
identifier[ad_group_criterion_service] = identifier[client] . identifier[GetService] ( literal[string] ,
literal[string] )
identifier[helper] = identifier[ProductPartitionHelper] ( identifier[adgroup_id] )
identifier[root] = identifier[helper] . identifier[CreateSubdivision] ()
identifier[new_product_canonical_condition] ={
literal[string] : literal[string] ,
literal[string] : literal[string]
}
identifier[used_product_canonical_condition] ={
literal[string] : literal[string] ,
literal[string] : literal[string]
}
identifier[other_product_canonical_condition] ={
literal[string] : literal[string] ,
}
identifier[helper] . identifier[CreateUnit] ( identifier[root] , identifier[new_product_canonical_condition] )
identifier[helper] . identifier[CreateUnit] ( identifier[root] , identifier[used_product_canonical_condition] )
identifier[helper] . identifier[CreateUnit] ( identifier[root] , identifier[other_product_canonical_condition] )
identifier[result] = identifier[ad_group_criterion_service] . identifier[mutate] ( identifier[helper] . identifier[operations] )
keyword[return] identifier[result] [ literal[string] ] | def CreateProductPartition(client, adgroup_id):
"""Creates a ProductPartition tree for the given AdGroup ID.
Args:
client: an AdWordsClient instance.
adgroup_id: a str AdGroup ID.
Returns:
The ProductPartition tree as a sudsobject.
"""
ad_group_criterion_service = client.GetService('AdGroupCriterionService', 'v201809')
helper = ProductPartitionHelper(adgroup_id)
root = helper.CreateSubdivision()
new_product_canonical_condition = {'xsi_type': 'ProductCanonicalCondition', 'condition': 'NEW'}
used_product_canonical_condition = {'xsi_type': 'ProductCanonicalCondition', 'condition': 'USED'}
other_product_canonical_condition = {'xsi_type': 'ProductCanonicalCondition'}
helper.CreateUnit(root, new_product_canonical_condition)
helper.CreateUnit(root, used_product_canonical_condition)
helper.CreateUnit(root, other_product_canonical_condition)
result = ad_group_criterion_service.mutate(helper.operations)
return result['value'] |
def set_value(self, value):
"""Set value of the checkbox.
Parameters
----------
value : bool
value for the checkbox
"""
if value:
self.setCheckState(Qt.Checked)
else:
self.setCheckState(Qt.Unchecked) | def function[set_value, parameter[self, value]]:
constant[Set value of the checkbox.
Parameters
----------
value : bool
value for the checkbox
]
if name[value] begin[:]
call[name[self].setCheckState, parameter[name[Qt].Checked]] | keyword[def] identifier[set_value] ( identifier[self] , identifier[value] ):
literal[string]
keyword[if] identifier[value] :
identifier[self] . identifier[setCheckState] ( identifier[Qt] . identifier[Checked] )
keyword[else] :
identifier[self] . identifier[setCheckState] ( identifier[Qt] . identifier[Unchecked] ) | def set_value(self, value):
"""Set value of the checkbox.
Parameters
----------
value : bool
value for the checkbox
"""
if value:
self.setCheckState(Qt.Checked) # depends on [control=['if'], data=[]]
else:
self.setCheckState(Qt.Unchecked) |
def getElementsByClassName(self, className, root='root', useIndex=True):
'''
getElementsByClassName - Searches and returns all elements containing a given class name.
@param className <str> - A one-word class name
@param root <AdvancedTag/'root'> - Search starting at a specific node, if provided. if string 'root', the root of the parsed tree will be used.
@param useIndex <bool> If useIndex is True and class names are indexed [see constructor] only the index will be used. Otherwise a full search is performed.
'''
(root, isFromRoot) = self._handleRootArg(root)
if useIndex is True and self.indexClassNames is True:
elements = self._classNameMap.get(className, [])
if isFromRoot is False:
_hasTagInParentLine = self._hasTagInParentLine
elements = [x for x in elements if _hasTagInParentLine(x, root)]
return TagCollection(elements)
return AdvancedHTMLParser.getElementsByClassName(self, className, root) | def function[getElementsByClassName, parameter[self, className, root, useIndex]]:
constant[
getElementsByClassName - Searches and returns all elements containing a given class name.
@param className <str> - A one-word class name
@param root <AdvancedTag/'root'> - Search starting at a specific node, if provided. if string 'root', the root of the parsed tree will be used.
@param useIndex <bool> If useIndex is True and class names are indexed [see constructor] only the index will be used. Otherwise a full search is performed.
]
<ast.Tuple object at 0x7da1b0f51420> assign[=] call[name[self]._handleRootArg, parameter[name[root]]]
if <ast.BoolOp object at 0x7da1b0f51c60> begin[:]
variable[elements] assign[=] call[name[self]._classNameMap.get, parameter[name[className], list[[]]]]
if compare[name[isFromRoot] is constant[False]] begin[:]
variable[_hasTagInParentLine] assign[=] name[self]._hasTagInParentLine
variable[elements] assign[=] <ast.ListComp object at 0x7da1b10c0070>
return[call[name[TagCollection], parameter[name[elements]]]]
return[call[name[AdvancedHTMLParser].getElementsByClassName, parameter[name[self], name[className], name[root]]]] | keyword[def] identifier[getElementsByClassName] ( identifier[self] , identifier[className] , identifier[root] = literal[string] , identifier[useIndex] = keyword[True] ):
literal[string]
( identifier[root] , identifier[isFromRoot] )= identifier[self] . identifier[_handleRootArg] ( identifier[root] )
keyword[if] identifier[useIndex] keyword[is] keyword[True] keyword[and] identifier[self] . identifier[indexClassNames] keyword[is] keyword[True] :
identifier[elements] = identifier[self] . identifier[_classNameMap] . identifier[get] ( identifier[className] ,[])
keyword[if] identifier[isFromRoot] keyword[is] keyword[False] :
identifier[_hasTagInParentLine] = identifier[self] . identifier[_hasTagInParentLine]
identifier[elements] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[elements] keyword[if] identifier[_hasTagInParentLine] ( identifier[x] , identifier[root] )]
keyword[return] identifier[TagCollection] ( identifier[elements] )
keyword[return] identifier[AdvancedHTMLParser] . identifier[getElementsByClassName] ( identifier[self] , identifier[className] , identifier[root] ) | def getElementsByClassName(self, className, root='root', useIndex=True):
"""
getElementsByClassName - Searches and returns all elements containing a given class name.
@param className <str> - A one-word class name
@param root <AdvancedTag/'root'> - Search starting at a specific node, if provided. if string 'root', the root of the parsed tree will be used.
@param useIndex <bool> If useIndex is True and class names are indexed [see constructor] only the index will be used. Otherwise a full search is performed.
"""
(root, isFromRoot) = self._handleRootArg(root)
if useIndex is True and self.indexClassNames is True:
elements = self._classNameMap.get(className, [])
if isFromRoot is False:
_hasTagInParentLine = self._hasTagInParentLine
elements = [x for x in elements if _hasTagInParentLine(x, root)] # depends on [control=['if'], data=[]]
return TagCollection(elements) # depends on [control=['if'], data=[]]
return AdvancedHTMLParser.getElementsByClassName(self, className, root) |
def set_IC(self, values):
r"""
A method to set simulation initial conditions
Parameters
----------
values : ND-array or scalar
Set the initial conditions using an 'Np' long array. 'Np' being
the number of pores. If a scalar is given, the same value is
imposed to all pores.
"""
self[self.settings['quantity']] = values
converted_array = self[self.settings['quantity']].astype('float64')
self[self.settings['quantity']] = converted_array | def function[set_IC, parameter[self, values]]:
constant[
A method to set simulation initial conditions
Parameters
----------
values : ND-array or scalar
Set the initial conditions using an 'Np' long array. 'Np' being
the number of pores. If a scalar is given, the same value is
imposed to all pores.
]
call[name[self]][call[name[self].settings][constant[quantity]]] assign[=] name[values]
variable[converted_array] assign[=] call[call[name[self]][call[name[self].settings][constant[quantity]]].astype, parameter[constant[float64]]]
call[name[self]][call[name[self].settings][constant[quantity]]] assign[=] name[converted_array] | keyword[def] identifier[set_IC] ( identifier[self] , identifier[values] ):
literal[string]
identifier[self] [ identifier[self] . identifier[settings] [ literal[string] ]]= identifier[values]
identifier[converted_array] = identifier[self] [ identifier[self] . identifier[settings] [ literal[string] ]]. identifier[astype] ( literal[string] )
identifier[self] [ identifier[self] . identifier[settings] [ literal[string] ]]= identifier[converted_array] | def set_IC(self, values):
"""
A method to set simulation initial conditions
Parameters
----------
values : ND-array or scalar
Set the initial conditions using an 'Np' long array. 'Np' being
the number of pores. If a scalar is given, the same value is
imposed to all pores.
"""
self[self.settings['quantity']] = values
converted_array = self[self.settings['quantity']].astype('float64')
self[self.settings['quantity']] = converted_array |
def df2chucks(din,chunksize,outd,fn,return_fmt='\t',force=False):
"""
:param return_fmt: '\t': tab-sep file, lly, '.', 'list': returns a list
"""
from os.path import exists#,splitext,dirname,splitext,basename,realpath
from os import makedirs
din.index=range(0,len(din),1)
chunkrange=list(np.arange(0,len(din),chunksize))
chunkrange=list(zip([c+1 if ci!=0 else 0 for ci,c in enumerate(chunkrange)],chunkrange[1:]+[len(din)-1]))
chunk2range={}
for ri,r in enumerate(chunkrange):
chunk2range[ri+1]=r
if not exists(outd):
makedirs(outd)
chunks=[]
chunkps=[]
for chunk in chunk2range:
chunkp='{}/{}_chunk{:08d}.tsv'.format(outd,fn,chunk)
rnge=chunk2range[chunk]
din_=din.loc[rnge[0]:rnge[1],:]
if not exists(chunkp) or force:
if return_fmt=='list':
chunks.append(din_)
else:
din_.to_csv(chunkp,sep=return_fmt)
del din_
chunkps.append(chunkp)
if return_fmt=='list':
return chunks
else:
return chunkps | def function[df2chucks, parameter[din, chunksize, outd, fn, return_fmt, force]]:
constant[
:param return_fmt: ' ': tab-sep file, lly, '.', 'list': returns a list
]
from relative_module[os.path] import module[exists]
from relative_module[os] import module[makedirs]
name[din].index assign[=] call[name[range], parameter[constant[0], call[name[len], parameter[name[din]]], constant[1]]]
variable[chunkrange] assign[=] call[name[list], parameter[call[name[np].arange, parameter[constant[0], call[name[len], parameter[name[din]]], name[chunksize]]]]]
variable[chunkrange] assign[=] call[name[list], parameter[call[name[zip], parameter[<ast.ListComp object at 0x7da1b1ff1600>, binary_operation[call[name[chunkrange]][<ast.Slice object at 0x7da1b1ff1210>] + list[[<ast.BinOp object at 0x7da1b1ff28f0>]]]]]]]
variable[chunk2range] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b1ff2560>, <ast.Name object at 0x7da1b1ff1c00>]]] in starred[call[name[enumerate], parameter[name[chunkrange]]]] begin[:]
call[name[chunk2range]][binary_operation[name[ri] + constant[1]]] assign[=] name[r]
if <ast.UnaryOp object at 0x7da1b1ec1e40> begin[:]
call[name[makedirs], parameter[name[outd]]]
variable[chunks] assign[=] list[[]]
variable[chunkps] assign[=] list[[]]
for taget[name[chunk]] in starred[name[chunk2range]] begin[:]
variable[chunkp] assign[=] call[constant[{}/{}_chunk{:08d}.tsv].format, parameter[name[outd], name[fn], name[chunk]]]
variable[rnge] assign[=] call[name[chunk2range]][name[chunk]]
variable[din_] assign[=] call[name[din].loc][tuple[[<ast.Slice object at 0x7da1b1ec1bd0>, <ast.Slice object at 0x7da1b1ec22c0>]]]
if <ast.BoolOp object at 0x7da1b1ec1330> begin[:]
if compare[name[return_fmt] equal[==] constant[list]] begin[:]
call[name[chunks].append, parameter[name[din_]]]
<ast.Delete object at 0x7da1b1ff82b0>
call[name[chunkps].append, parameter[name[chunkp]]]
if compare[name[return_fmt] equal[==] constant[list]] begin[:]
return[name[chunks]] | keyword[def] identifier[df2chucks] ( identifier[din] , identifier[chunksize] , identifier[outd] , identifier[fn] , identifier[return_fmt] = literal[string] , identifier[force] = keyword[False] ):
literal[string]
keyword[from] identifier[os] . identifier[path] keyword[import] identifier[exists]
keyword[from] identifier[os] keyword[import] identifier[makedirs]
identifier[din] . identifier[index] = identifier[range] ( literal[int] , identifier[len] ( identifier[din] ), literal[int] )
identifier[chunkrange] = identifier[list] ( identifier[np] . identifier[arange] ( literal[int] , identifier[len] ( identifier[din] ), identifier[chunksize] ))
identifier[chunkrange] = identifier[list] ( identifier[zip] ([ identifier[c] + literal[int] keyword[if] identifier[ci] != literal[int] keyword[else] literal[int] keyword[for] identifier[ci] , identifier[c] keyword[in] identifier[enumerate] ( identifier[chunkrange] )], identifier[chunkrange] [ literal[int] :]+[ identifier[len] ( identifier[din] )- literal[int] ]))
identifier[chunk2range] ={}
keyword[for] identifier[ri] , identifier[r] keyword[in] identifier[enumerate] ( identifier[chunkrange] ):
identifier[chunk2range] [ identifier[ri] + literal[int] ]= identifier[r]
keyword[if] keyword[not] identifier[exists] ( identifier[outd] ):
identifier[makedirs] ( identifier[outd] )
identifier[chunks] =[]
identifier[chunkps] =[]
keyword[for] identifier[chunk] keyword[in] identifier[chunk2range] :
identifier[chunkp] = literal[string] . identifier[format] ( identifier[outd] , identifier[fn] , identifier[chunk] )
identifier[rnge] = identifier[chunk2range] [ identifier[chunk] ]
identifier[din_] = identifier[din] . identifier[loc] [ identifier[rnge] [ literal[int] ]: identifier[rnge] [ literal[int] ],:]
keyword[if] keyword[not] identifier[exists] ( identifier[chunkp] ) keyword[or] identifier[force] :
keyword[if] identifier[return_fmt] == literal[string] :
identifier[chunks] . identifier[append] ( identifier[din_] )
keyword[else] :
identifier[din_] . identifier[to_csv] ( identifier[chunkp] , identifier[sep] = identifier[return_fmt] )
keyword[del] identifier[din_]
identifier[chunkps] . identifier[append] ( identifier[chunkp] )
keyword[if] identifier[return_fmt] == literal[string] :
keyword[return] identifier[chunks]
keyword[else] :
keyword[return] identifier[chunkps] | def df2chucks(din, chunksize, outd, fn, return_fmt='\t', force=False):
"""
:param return_fmt: ' ': tab-sep file, lly, '.', 'list': returns a list
"""
from os.path import exists #,splitext,dirname,splitext,basename,realpath
from os import makedirs
din.index = range(0, len(din), 1)
chunkrange = list(np.arange(0, len(din), chunksize))
chunkrange = list(zip([c + 1 if ci != 0 else 0 for (ci, c) in enumerate(chunkrange)], chunkrange[1:] + [len(din) - 1]))
chunk2range = {}
for (ri, r) in enumerate(chunkrange):
chunk2range[ri + 1] = r # depends on [control=['for'], data=[]]
if not exists(outd):
makedirs(outd) # depends on [control=['if'], data=[]]
chunks = []
chunkps = []
for chunk in chunk2range:
chunkp = '{}/{}_chunk{:08d}.tsv'.format(outd, fn, chunk)
rnge = chunk2range[chunk]
din_ = din.loc[rnge[0]:rnge[1], :]
if not exists(chunkp) or force:
if return_fmt == 'list':
chunks.append(din_) # depends on [control=['if'], data=[]]
else:
din_.to_csv(chunkp, sep=return_fmt)
del din_ # depends on [control=['if'], data=[]]
chunkps.append(chunkp) # depends on [control=['for'], data=['chunk']]
if return_fmt == 'list':
return chunks # depends on [control=['if'], data=[]]
else:
return chunkps |
def requires_submit(func):
"""
Decorator to ensure that a submit has been performed before
calling the method.
Args:
func (callable): test function to be decorated.
Returns:
callable: the decorated function.
"""
@functools.wraps(func)
def _wrapper(self, *args, **kwargs):
if self._future is None:
raise JobError("Job not submitted yet!. You have to .submit() first!")
return func(self, *args, **kwargs)
return _wrapper | def function[requires_submit, parameter[func]]:
constant[
Decorator to ensure that a submit has been performed before
calling the method.
Args:
func (callable): test function to be decorated.
Returns:
callable: the decorated function.
]
def function[_wrapper, parameter[self]]:
if compare[name[self]._future is constant[None]] begin[:]
<ast.Raise object at 0x7da1b05323e0>
return[call[name[func], parameter[name[self], <ast.Starred object at 0x7da1b0533850>]]]
return[name[_wrapper]] | keyword[def] identifier[requires_submit] ( identifier[func] ):
literal[string]
@ identifier[functools] . identifier[wraps] ( identifier[func] )
keyword[def] identifier[_wrapper] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
keyword[if] identifier[self] . identifier[_future] keyword[is] keyword[None] :
keyword[raise] identifier[JobError] ( literal[string] )
keyword[return] identifier[func] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[_wrapper] | def requires_submit(func):
"""
Decorator to ensure that a submit has been performed before
calling the method.
Args:
func (callable): test function to be decorated.
Returns:
callable: the decorated function.
"""
@functools.wraps(func)
def _wrapper(self, *args, **kwargs):
if self._future is None:
raise JobError('Job not submitted yet!. You have to .submit() first!') # depends on [control=['if'], data=[]]
return func(self, *args, **kwargs)
return _wrapper |
def set_logging_settings(profile, setting, value, store='local'):
r'''
Configure logging settings for the Windows firewall.
.. versionadded:: 2018.3.4
.. versionadded:: 2019.2.0
Args:
profile (str):
The firewall profile to configure. Valid options are:
- domain
- public
- private
setting (str):
The logging setting to configure. Valid options are:
- allowedconnections
- droppedconnections
- filename
- maxfilesize
value (str):
The value to apply to the setting. Valid values are dependent upon
the setting being configured. Valid options are:
allowedconnections:
- enable
- disable
- notconfigured
droppedconnections:
- enable
- disable
- notconfigured
filename:
- Full path and name of the firewall log file
- notconfigured
maxfilesize:
- 1 - 32767
- notconfigured
.. note::
``notconfigured`` can only be used when using the lgpo store
store (str):
The store to use. This is either the local firewall policy or the
policy defined by local group policy. Valid options are:
- lgpo
- local
Default is ``local``
Returns:
bool: ``True`` if successful
Raises:
CommandExecutionError: If an error occurs
ValueError: If the parameters are incorrect
CLI Example:
.. code-block:: bash
# Log allowed connections and set that in local group policy
salt * firewall.set_logging_settings domain allowedconnections enable lgpo
# Don't log dropped connections
salt * firewall.set_logging_settings profile=private setting=droppedconnections value=disable
# Set the location of the log file
salt * firewall.set_logging_settings domain filename C:\windows\logs\firewall.log
# You can also use environment variables
salt * firewall.set_logging_settings domain filename %systemroot%\system32\LogFiles\Firewall\pfirewall.log
# Set the max file size of the log to 2048 Kb
salt * firewall.set_logging_settings domain maxfilesize 2048
'''
return salt.utils.win_lgpo_netsh.set_logging_settings(profile=profile,
setting=setting,
value=value,
store=store) | def function[set_logging_settings, parameter[profile, setting, value, store]]:
constant[
Configure logging settings for the Windows firewall.
.. versionadded:: 2018.3.4
.. versionadded:: 2019.2.0
Args:
profile (str):
The firewall profile to configure. Valid options are:
- domain
- public
- private
setting (str):
The logging setting to configure. Valid options are:
- allowedconnections
- droppedconnections
- filename
- maxfilesize
value (str):
The value to apply to the setting. Valid values are dependent upon
the setting being configured. Valid options are:
allowedconnections:
- enable
- disable
- notconfigured
droppedconnections:
- enable
- disable
- notconfigured
filename:
- Full path and name of the firewall log file
- notconfigured
maxfilesize:
- 1 - 32767
- notconfigured
.. note::
``notconfigured`` can only be used when using the lgpo store
store (str):
The store to use. This is either the local firewall policy or the
policy defined by local group policy. Valid options are:
- lgpo
- local
Default is ``local``
Returns:
bool: ``True`` if successful
Raises:
CommandExecutionError: If an error occurs
ValueError: If the parameters are incorrect
CLI Example:
.. code-block:: bash
# Log allowed connections and set that in local group policy
salt * firewall.set_logging_settings domain allowedconnections enable lgpo
# Don't log dropped connections
salt * firewall.set_logging_settings profile=private setting=droppedconnections value=disable
# Set the location of the log file
salt * firewall.set_logging_settings domain filename C:\windows\logs\firewall.log
# You can also use environment variables
salt * firewall.set_logging_settings domain filename %systemroot%\system32\LogFiles\Firewall\pfirewall.log
# Set the max file size of the log to 2048 Kb
salt * firewall.set_logging_settings domain maxfilesize 2048
]
return[call[name[salt].utils.win_lgpo_netsh.set_logging_settings, parameter[]]] | keyword[def] identifier[set_logging_settings] ( identifier[profile] , identifier[setting] , identifier[value] , identifier[store] = literal[string] ):
literal[string]
keyword[return] identifier[salt] . identifier[utils] . identifier[win_lgpo_netsh] . identifier[set_logging_settings] ( identifier[profile] = identifier[profile] ,
identifier[setting] = identifier[setting] ,
identifier[value] = identifier[value] ,
identifier[store] = identifier[store] ) | def set_logging_settings(profile, setting, value, store='local'):
"""
Configure logging settings for the Windows firewall.
.. versionadded:: 2018.3.4
.. versionadded:: 2019.2.0
Args:
profile (str):
The firewall profile to configure. Valid options are:
- domain
- public
- private
setting (str):
The logging setting to configure. Valid options are:
- allowedconnections
- droppedconnections
- filename
- maxfilesize
value (str):
The value to apply to the setting. Valid values are dependent upon
the setting being configured. Valid options are:
allowedconnections:
- enable
- disable
- notconfigured
droppedconnections:
- enable
- disable
- notconfigured
filename:
- Full path and name of the firewall log file
- notconfigured
maxfilesize:
- 1 - 32767
- notconfigured
.. note::
``notconfigured`` can only be used when using the lgpo store
store (str):
The store to use. This is either the local firewall policy or the
policy defined by local group policy. Valid options are:
- lgpo
- local
Default is ``local``
Returns:
bool: ``True`` if successful
Raises:
CommandExecutionError: If an error occurs
ValueError: If the parameters are incorrect
CLI Example:
.. code-block:: bash
# Log allowed connections and set that in local group policy
salt * firewall.set_logging_settings domain allowedconnections enable lgpo
# Don't log dropped connections
salt * firewall.set_logging_settings profile=private setting=droppedconnections value=disable
# Set the location of the log file
salt * firewall.set_logging_settings domain filename C:\\windows\\logs\\firewall.log
# You can also use environment variables
salt * firewall.set_logging_settings domain filename %systemroot%\\system32\\LogFiles\\Firewall\\pfirewall.log
# Set the max file size of the log to 2048 Kb
salt * firewall.set_logging_settings domain maxfilesize 2048
"""
return salt.utils.win_lgpo_netsh.set_logging_settings(profile=profile, setting=setting, value=value, store=store) |
def get_search_fields(self):
"""Return list of lookup names."""
if self.search_fields:
return self.search_fields
raise NotImplementedError('%s, must implement "search_fields".' % self.__class__.__name__) | def function[get_search_fields, parameter[self]]:
constant[Return list of lookup names.]
if name[self].search_fields begin[:]
return[name[self].search_fields]
<ast.Raise object at 0x7da18fe92d10> | keyword[def] identifier[get_search_fields] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[search_fields] :
keyword[return] identifier[self] . identifier[search_fields]
keyword[raise] identifier[NotImplementedError] ( literal[string] % identifier[self] . identifier[__class__] . identifier[__name__] ) | def get_search_fields(self):
"""Return list of lookup names."""
if self.search_fields:
return self.search_fields # depends on [control=['if'], data=[]]
raise NotImplementedError('%s, must implement "search_fields".' % self.__class__.__name__) |
def add_jars_for_targets(self, targets, conf, resolved_jars):
"""Adds jar classpath elements to the products of the provided targets.
The resolved jars are added in a way that works with excludes.
:param targets: The targets to add the jars for.
:param conf: The configuration.
:param resolved_jars: A list of ResolvedJars.
"""
classpath_entries = []
for jar in resolved_jars:
if not jar.pants_path:
raise TaskError('Jar: {!s} has no specified path.'.format(jar.coordinate))
cp_entry = ArtifactClasspathEntry(jar.pants_path, jar.coordinate, jar.cache_path, jar.directory_digest)
classpath_entries.append((conf, cp_entry))
for target in targets:
self._add_elements_for_target(target, classpath_entries) | def function[add_jars_for_targets, parameter[self, targets, conf, resolved_jars]]:
constant[Adds jar classpath elements to the products of the provided targets.
The resolved jars are added in a way that works with excludes.
:param targets: The targets to add the jars for.
:param conf: The configuration.
:param resolved_jars: A list of ResolvedJars.
]
variable[classpath_entries] assign[=] list[[]]
for taget[name[jar]] in starred[name[resolved_jars]] begin[:]
if <ast.UnaryOp object at 0x7da1b22a6230> begin[:]
<ast.Raise object at 0x7da1b22a47c0>
variable[cp_entry] assign[=] call[name[ArtifactClasspathEntry], parameter[name[jar].pants_path, name[jar].coordinate, name[jar].cache_path, name[jar].directory_digest]]
call[name[classpath_entries].append, parameter[tuple[[<ast.Name object at 0x7da1b227b610>, <ast.Name object at 0x7da1b2279090>]]]]
for taget[name[target]] in starred[name[targets]] begin[:]
call[name[self]._add_elements_for_target, parameter[name[target], name[classpath_entries]]] | keyword[def] identifier[add_jars_for_targets] ( identifier[self] , identifier[targets] , identifier[conf] , identifier[resolved_jars] ):
literal[string]
identifier[classpath_entries] =[]
keyword[for] identifier[jar] keyword[in] identifier[resolved_jars] :
keyword[if] keyword[not] identifier[jar] . identifier[pants_path] :
keyword[raise] identifier[TaskError] ( literal[string] . identifier[format] ( identifier[jar] . identifier[coordinate] ))
identifier[cp_entry] = identifier[ArtifactClasspathEntry] ( identifier[jar] . identifier[pants_path] , identifier[jar] . identifier[coordinate] , identifier[jar] . identifier[cache_path] , identifier[jar] . identifier[directory_digest] )
identifier[classpath_entries] . identifier[append] (( identifier[conf] , identifier[cp_entry] ))
keyword[for] identifier[target] keyword[in] identifier[targets] :
identifier[self] . identifier[_add_elements_for_target] ( identifier[target] , identifier[classpath_entries] ) | def add_jars_for_targets(self, targets, conf, resolved_jars):
"""Adds jar classpath elements to the products of the provided targets.
The resolved jars are added in a way that works with excludes.
:param targets: The targets to add the jars for.
:param conf: The configuration.
:param resolved_jars: A list of ResolvedJars.
"""
classpath_entries = []
for jar in resolved_jars:
if not jar.pants_path:
raise TaskError('Jar: {!s} has no specified path.'.format(jar.coordinate)) # depends on [control=['if'], data=[]]
cp_entry = ArtifactClasspathEntry(jar.pants_path, jar.coordinate, jar.cache_path, jar.directory_digest)
classpath_entries.append((conf, cp_entry)) # depends on [control=['for'], data=['jar']]
for target in targets:
self._add_elements_for_target(target, classpath_entries) # depends on [control=['for'], data=['target']] |
def create_proxy_model(self, model):
"""Create a sort filter proxy model for the given model
:param model: the model to wrap in a proxy
:type model: :class:`QtGui.QAbstractItemModel`
:returns: a new proxy model that can be used for sorting and filtering
:rtype: :class:`QtGui.QAbstractItemModel`
:raises: None
"""
proxy = ReftrackSortFilterModel(self)
proxy.setSourceModel(model)
model.rowsInserted.connect(self.sort_model)
return proxy | def function[create_proxy_model, parameter[self, model]]:
constant[Create a sort filter proxy model for the given model
:param model: the model to wrap in a proxy
:type model: :class:`QtGui.QAbstractItemModel`
:returns: a new proxy model that can be used for sorting and filtering
:rtype: :class:`QtGui.QAbstractItemModel`
:raises: None
]
variable[proxy] assign[=] call[name[ReftrackSortFilterModel], parameter[name[self]]]
call[name[proxy].setSourceModel, parameter[name[model]]]
call[name[model].rowsInserted.connect, parameter[name[self].sort_model]]
return[name[proxy]] | keyword[def] identifier[create_proxy_model] ( identifier[self] , identifier[model] ):
literal[string]
identifier[proxy] = identifier[ReftrackSortFilterModel] ( identifier[self] )
identifier[proxy] . identifier[setSourceModel] ( identifier[model] )
identifier[model] . identifier[rowsInserted] . identifier[connect] ( identifier[self] . identifier[sort_model] )
keyword[return] identifier[proxy] | def create_proxy_model(self, model):
"""Create a sort filter proxy model for the given model
:param model: the model to wrap in a proxy
:type model: :class:`QtGui.QAbstractItemModel`
:returns: a new proxy model that can be used for sorting and filtering
:rtype: :class:`QtGui.QAbstractItemModel`
:raises: None
"""
proxy = ReftrackSortFilterModel(self)
proxy.setSourceModel(model)
model.rowsInserted.connect(self.sort_model)
return proxy |
def map(self, func, *iterables, **kwargs):
"""Apply *func* to the elements of the sequences in *iterables*.
All invocations of *func* are run in the pool. If multiple iterables
are provided, then *func* must take this many arguments, and is applied
with one element from each iterable. All iterables must yield the same
number of elements.
An optional *timeout* keyword argument may be provided to specify a
timeout.
This returns a generator yielding the results.
"""
with self._lock:
if self._closing:
raise RuntimeError('pool is closing/closed')
timeout = kwargs.pop('timeout', None)
futures = []
for args in zip(*iterables):
result = Future()
self._queue.put_nowait((func, args, result))
futures.append(result)
self._spawn_workers()
try:
with switch_back(timeout):
for future in futures:
yield future.result()
except Exception:
# Timeout, GeneratorExit or future.set_exception()
for future in futures:
if not future.done():
future.cancel()
raise | def function[map, parameter[self, func]]:
constant[Apply *func* to the elements of the sequences in *iterables*.
All invocations of *func* are run in the pool. If multiple iterables
are provided, then *func* must take this many arguments, and is applied
with one element from each iterable. All iterables must yield the same
number of elements.
An optional *timeout* keyword argument may be provided to specify a
timeout.
This returns a generator yielding the results.
]
with name[self]._lock begin[:]
if name[self]._closing begin[:]
<ast.Raise object at 0x7da1b02e7400>
variable[timeout] assign[=] call[name[kwargs].pop, parameter[constant[timeout], constant[None]]]
variable[futures] assign[=] list[[]]
for taget[name[args]] in starred[call[name[zip], parameter[<ast.Starred object at 0x7da20c991270>]]] begin[:]
variable[result] assign[=] call[name[Future], parameter[]]
call[name[self]._queue.put_nowait, parameter[tuple[[<ast.Name object at 0x7da1b033b1c0>, <ast.Name object at 0x7da1b033a6b0>, <ast.Name object at 0x7da1b033bb50>]]]]
call[name[futures].append, parameter[name[result]]]
call[name[self]._spawn_workers, parameter[]]
<ast.Try object at 0x7da1b033a560> | keyword[def] identifier[map] ( identifier[self] , identifier[func] ,* identifier[iterables] ,** identifier[kwargs] ):
literal[string]
keyword[with] identifier[self] . identifier[_lock] :
keyword[if] identifier[self] . identifier[_closing] :
keyword[raise] identifier[RuntimeError] ( literal[string] )
identifier[timeout] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] )
identifier[futures] =[]
keyword[for] identifier[args] keyword[in] identifier[zip] (* identifier[iterables] ):
identifier[result] = identifier[Future] ()
identifier[self] . identifier[_queue] . identifier[put_nowait] (( identifier[func] , identifier[args] , identifier[result] ))
identifier[futures] . identifier[append] ( identifier[result] )
identifier[self] . identifier[_spawn_workers] ()
keyword[try] :
keyword[with] identifier[switch_back] ( identifier[timeout] ):
keyword[for] identifier[future] keyword[in] identifier[futures] :
keyword[yield] identifier[future] . identifier[result] ()
keyword[except] identifier[Exception] :
keyword[for] identifier[future] keyword[in] identifier[futures] :
keyword[if] keyword[not] identifier[future] . identifier[done] ():
identifier[future] . identifier[cancel] ()
keyword[raise] | def map(self, func, *iterables, **kwargs):
"""Apply *func* to the elements of the sequences in *iterables*.
All invocations of *func* are run in the pool. If multiple iterables
are provided, then *func* must take this many arguments, and is applied
with one element from each iterable. All iterables must yield the same
number of elements.
An optional *timeout* keyword argument may be provided to specify a
timeout.
This returns a generator yielding the results.
"""
with self._lock:
if self._closing:
raise RuntimeError('pool is closing/closed') # depends on [control=['if'], data=[]]
timeout = kwargs.pop('timeout', None)
futures = []
for args in zip(*iterables):
result = Future()
self._queue.put_nowait((func, args, result))
futures.append(result) # depends on [control=['for'], data=['args']]
self._spawn_workers() # depends on [control=['with'], data=[]]
try:
with switch_back(timeout):
for future in futures:
yield future.result() # depends on [control=['for'], data=['future']] # depends on [control=['with'], data=[]] # depends on [control=['try'], data=[]]
except Exception:
# Timeout, GeneratorExit or future.set_exception()
for future in futures:
if not future.done():
future.cancel() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['future']]
raise # depends on [control=['except'], data=[]] |
def mac_app_exists(app):
'''Check if 'app' is installed (OS X).
Check if the given applications is installed on this OS X system.
Args:
app (str): The application name.
Returns:
bool: Is the app installed or not?
'''
APP_CHECK_APPLESCRIPT = '''try
tell application "Finder"
set appname to name of application file id "%s"
return 0
end tell
on error err_msg number err_num
return 1
end try'''
with open('/tmp/app_check.AppleScript', 'w') as f:
f.write(APP_CHECK_APPLESCRIPT % app)
app_check_proc = sp.Popen(
['osascript', '-e', '/tmp/app_check.AppleScript'])
if app_check_proc.wait() != 0:
return False
else:
return True | def function[mac_app_exists, parameter[app]]:
constant[Check if 'app' is installed (OS X).
Check if the given applications is installed on this OS X system.
Args:
app (str): The application name.
Returns:
bool: Is the app installed or not?
]
variable[APP_CHECK_APPLESCRIPT] assign[=] constant[try
tell application "Finder"
set appname to name of application file id "%s"
return 0
end tell
on error err_msg number err_num
return 1
end try]
with call[name[open], parameter[constant[/tmp/app_check.AppleScript], constant[w]]] begin[:]
call[name[f].write, parameter[binary_operation[name[APP_CHECK_APPLESCRIPT] <ast.Mod object at 0x7da2590d6920> name[app]]]]
variable[app_check_proc] assign[=] call[name[sp].Popen, parameter[list[[<ast.Constant object at 0x7da1b0a2e1a0>, <ast.Constant object at 0x7da1b0a2f400>, <ast.Constant object at 0x7da1b0a2fc10>]]]]
if compare[call[name[app_check_proc].wait, parameter[]] not_equal[!=] constant[0]] begin[:]
return[constant[False]] | keyword[def] identifier[mac_app_exists] ( identifier[app] ):
literal[string]
identifier[APP_CHECK_APPLESCRIPT] = literal[string]
keyword[with] identifier[open] ( literal[string] , literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( identifier[APP_CHECK_APPLESCRIPT] % identifier[app] )
identifier[app_check_proc] = identifier[sp] . identifier[Popen] (
[ literal[string] , literal[string] , literal[string] ])
keyword[if] identifier[app_check_proc] . identifier[wait] ()!= literal[int] :
keyword[return] keyword[False]
keyword[else] :
keyword[return] keyword[True] | def mac_app_exists(app):
"""Check if 'app' is installed (OS X).
Check if the given applications is installed on this OS X system.
Args:
app (str): The application name.
Returns:
bool: Is the app installed or not?
"""
APP_CHECK_APPLESCRIPT = 'try\n\ttell application "Finder"\n\t\tset appname to name of application file id "%s"\n\t\treturn 0\n\tend tell\n\ton error err_msg number err_num\n\t\treturn 1\n\tend try'
with open('/tmp/app_check.AppleScript', 'w') as f:
f.write(APP_CHECK_APPLESCRIPT % app) # depends on [control=['with'], data=['f']]
app_check_proc = sp.Popen(['osascript', '-e', '/tmp/app_check.AppleScript'])
if app_check_proc.wait() != 0:
return False # depends on [control=['if'], data=[]]
else:
return True |
def file_dict(*packages, **kwargs):
'''
List the files that belong to a package, grouped by package. Not
specifying any packages will return a list of _every_ file on the system's
package database (not generally recommended).
CLI Examples:
.. code-block:: bash
salt '*' pkg.file_list httpd
salt '*' pkg.file_list httpd postfix
salt '*' pkg.file_list
'''
errors = []
ret = {}
cmd_files = ['apk', 'info', '-L']
if not packages:
return 'Package name should be provided'
for package in packages:
files = []
cmd = cmd_files[:]
cmd.append(package)
out = __salt__['cmd.run_all'](cmd,
output_loglevel='trace',
python_shell=False)
for line in out['stdout'].splitlines():
if line.endswith('contains:'):
continue
else:
files.append(line)
if files:
ret[package] = files
return {'errors': errors, 'packages': ret} | def function[file_dict, parameter[]]:
constant[
List the files that belong to a package, grouped by package. Not
specifying any packages will return a list of _every_ file on the system's
package database (not generally recommended).
CLI Examples:
.. code-block:: bash
salt '*' pkg.file_list httpd
salt '*' pkg.file_list httpd postfix
salt '*' pkg.file_list
]
variable[errors] assign[=] list[[]]
variable[ret] assign[=] dictionary[[], []]
variable[cmd_files] assign[=] list[[<ast.Constant object at 0x7da207f992d0>, <ast.Constant object at 0x7da207f99210>, <ast.Constant object at 0x7da207f9a980>]]
if <ast.UnaryOp object at 0x7da207f98550> begin[:]
return[constant[Package name should be provided]]
for taget[name[package]] in starred[name[packages]] begin[:]
variable[files] assign[=] list[[]]
variable[cmd] assign[=] call[name[cmd_files]][<ast.Slice object at 0x7da1b21ef0d0>]
call[name[cmd].append, parameter[name[package]]]
variable[out] assign[=] call[call[name[__salt__]][constant[cmd.run_all]], parameter[name[cmd]]]
for taget[name[line]] in starred[call[call[name[out]][constant[stdout]].splitlines, parameter[]]] begin[:]
if call[name[line].endswith, parameter[constant[contains:]]] begin[:]
continue
if name[files] begin[:]
call[name[ret]][name[package]] assign[=] name[files]
return[dictionary[[<ast.Constant object at 0x7da1b21ef550>, <ast.Constant object at 0x7da1b21edb40>], [<ast.Name object at 0x7da1b21ed5d0>, <ast.Name object at 0x7da1b21ed810>]]] | keyword[def] identifier[file_dict] (* identifier[packages] ,** identifier[kwargs] ):
literal[string]
identifier[errors] =[]
identifier[ret] ={}
identifier[cmd_files] =[ literal[string] , literal[string] , literal[string] ]
keyword[if] keyword[not] identifier[packages] :
keyword[return] literal[string]
keyword[for] identifier[package] keyword[in] identifier[packages] :
identifier[files] =[]
identifier[cmd] = identifier[cmd_files] [:]
identifier[cmd] . identifier[append] ( identifier[package] )
identifier[out] = identifier[__salt__] [ literal[string] ]( identifier[cmd] ,
identifier[output_loglevel] = literal[string] ,
identifier[python_shell] = keyword[False] )
keyword[for] identifier[line] keyword[in] identifier[out] [ literal[string] ]. identifier[splitlines] ():
keyword[if] identifier[line] . identifier[endswith] ( literal[string] ):
keyword[continue]
keyword[else] :
identifier[files] . identifier[append] ( identifier[line] )
keyword[if] identifier[files] :
identifier[ret] [ identifier[package] ]= identifier[files]
keyword[return] { literal[string] : identifier[errors] , literal[string] : identifier[ret] } | def file_dict(*packages, **kwargs):
"""
List the files that belong to a package, grouped by package. Not
specifying any packages will return a list of _every_ file on the system's
package database (not generally recommended).
CLI Examples:
.. code-block:: bash
salt '*' pkg.file_list httpd
salt '*' pkg.file_list httpd postfix
salt '*' pkg.file_list
"""
errors = []
ret = {}
cmd_files = ['apk', 'info', '-L']
if not packages:
return 'Package name should be provided' # depends on [control=['if'], data=[]]
for package in packages:
files = []
cmd = cmd_files[:]
cmd.append(package)
out = __salt__['cmd.run_all'](cmd, output_loglevel='trace', python_shell=False)
for line in out['stdout'].splitlines():
if line.endswith('contains:'):
continue # depends on [control=['if'], data=[]]
else:
files.append(line) # depends on [control=['for'], data=['line']]
if files:
ret[package] = files # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['package']]
return {'errors': errors, 'packages': ret} |
def set_evernote_filter(self, date_triggered, trigger):
"""
build the filter that will be used by evernote
:param date_triggered:
:param trigger:
:return: filter
"""
new_date_triggered = arrow.get(str(date_triggered)[:-6],
'YYYY-MM-DD HH:mm:ss')
new_date_triggered = str(new_date_triggered).replace(
':', '').replace('-', '').replace(' ', '')
date_filter = "created:{} ".format(new_date_triggered[:-6])
notebook_filter = ''
if trigger.notebook:
notebook_filter = "notebook:{} ".format(trigger.notebook)
tag_filter = "tag:{} ".format(trigger.tag) if trigger.tag != '' else ''
complet_filter = ''.join((notebook_filter, tag_filter, date_filter))
return complet_filter | def function[set_evernote_filter, parameter[self, date_triggered, trigger]]:
constant[
build the filter that will be used by evernote
:param date_triggered:
:param trigger:
:return: filter
]
variable[new_date_triggered] assign[=] call[name[arrow].get, parameter[call[call[name[str], parameter[name[date_triggered]]]][<ast.Slice object at 0x7da20c6e4640>], constant[YYYY-MM-DD HH:mm:ss]]]
variable[new_date_triggered] assign[=] call[call[call[call[name[str], parameter[name[new_date_triggered]]].replace, parameter[constant[:], constant[]]].replace, parameter[constant[-], constant[]]].replace, parameter[constant[ ], constant[]]]
variable[date_filter] assign[=] call[constant[created:{} ].format, parameter[call[name[new_date_triggered]][<ast.Slice object at 0x7da20c6e5d20>]]]
variable[notebook_filter] assign[=] constant[]
if name[trigger].notebook begin[:]
variable[notebook_filter] assign[=] call[constant[notebook:{} ].format, parameter[name[trigger].notebook]]
variable[tag_filter] assign[=] <ast.IfExp object at 0x7da20c6e5cc0>
variable[complet_filter] assign[=] call[constant[].join, parameter[tuple[[<ast.Name object at 0x7da20c6e7fa0>, <ast.Name object at 0x7da20c6e7670>, <ast.Name object at 0x7da20c6e5570>]]]]
return[name[complet_filter]] | keyword[def] identifier[set_evernote_filter] ( identifier[self] , identifier[date_triggered] , identifier[trigger] ):
literal[string]
identifier[new_date_triggered] = identifier[arrow] . identifier[get] ( identifier[str] ( identifier[date_triggered] )[:- literal[int] ],
literal[string] )
identifier[new_date_triggered] = identifier[str] ( identifier[new_date_triggered] ). identifier[replace] (
literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[date_filter] = literal[string] . identifier[format] ( identifier[new_date_triggered] [:- literal[int] ])
identifier[notebook_filter] = literal[string]
keyword[if] identifier[trigger] . identifier[notebook] :
identifier[notebook_filter] = literal[string] . identifier[format] ( identifier[trigger] . identifier[notebook] )
identifier[tag_filter] = literal[string] . identifier[format] ( identifier[trigger] . identifier[tag] ) keyword[if] identifier[trigger] . identifier[tag] != literal[string] keyword[else] literal[string]
identifier[complet_filter] = literal[string] . identifier[join] (( identifier[notebook_filter] , identifier[tag_filter] , identifier[date_filter] ))
keyword[return] identifier[complet_filter] | def set_evernote_filter(self, date_triggered, trigger):
"""
build the filter that will be used by evernote
:param date_triggered:
:param trigger:
:return: filter
"""
new_date_triggered = arrow.get(str(date_triggered)[:-6], 'YYYY-MM-DD HH:mm:ss')
new_date_triggered = str(new_date_triggered).replace(':', '').replace('-', '').replace(' ', '')
date_filter = 'created:{} '.format(new_date_triggered[:-6])
notebook_filter = ''
if trigger.notebook:
notebook_filter = 'notebook:{} '.format(trigger.notebook) # depends on [control=['if'], data=[]]
tag_filter = 'tag:{} '.format(trigger.tag) if trigger.tag != '' else ''
complet_filter = ''.join((notebook_filter, tag_filter, date_filter))
return complet_filter |
def process_request(self, request):
"""
Process a Django request and authenticate users.
If a JWT authentication header is detected and it is determined to be valid, the user is set as
``request.user`` and CSRF protection is disabled (``request._dont_enforce_csrf_checks = True``) on
the request.
:param request: Django Request instance
"""
if 'HTTP_AUTHORIZATION' not in request.META:
return
try:
method, claim = request.META['HTTP_AUTHORIZATION'].split(' ', 1)
except ValueError:
return
if method.upper() != AUTH_METHOD:
return
username = token.get_claimed_username(claim)
if not username:
return
User = get_user_model()
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
return
claim_data = None
for public in user.public_keys.all():
claim_data = token.verify(claim, public.key, validate_nonce=self.validate_nonce)
if claim_data:
break
if not claim_data:
return
logger.debug('Successfully authenticated %s using JWT', user.username)
request._dont_enforce_csrf_checks = True
request.user = user | def function[process_request, parameter[self, request]]:
constant[
Process a Django request and authenticate users.
If a JWT authentication header is detected and it is determined to be valid, the user is set as
``request.user`` and CSRF protection is disabled (``request._dont_enforce_csrf_checks = True``) on
the request.
:param request: Django Request instance
]
if compare[constant[HTTP_AUTHORIZATION] <ast.NotIn object at 0x7da2590d7190> name[request].META] begin[:]
return[None]
<ast.Try object at 0x7da2041db520>
if compare[call[name[method].upper, parameter[]] not_equal[!=] name[AUTH_METHOD]] begin[:]
return[None]
variable[username] assign[=] call[name[token].get_claimed_username, parameter[name[claim]]]
if <ast.UnaryOp object at 0x7da1b1b7f100> begin[:]
return[None]
variable[User] assign[=] call[name[get_user_model], parameter[]]
<ast.Try object at 0x7da1b1b7e7a0>
variable[claim_data] assign[=] constant[None]
for taget[name[public]] in starred[call[name[user].public_keys.all, parameter[]]] begin[:]
variable[claim_data] assign[=] call[name[token].verify, parameter[name[claim], name[public].key]]
if name[claim_data] begin[:]
break
if <ast.UnaryOp object at 0x7da18c4cd2d0> begin[:]
return[None]
call[name[logger].debug, parameter[constant[Successfully authenticated %s using JWT], name[user].username]]
name[request]._dont_enforce_csrf_checks assign[=] constant[True]
name[request].user assign[=] name[user] | keyword[def] identifier[process_request] ( identifier[self] , identifier[request] ):
literal[string]
keyword[if] literal[string] keyword[not] keyword[in] identifier[request] . identifier[META] :
keyword[return]
keyword[try] :
identifier[method] , identifier[claim] = identifier[request] . identifier[META] [ literal[string] ]. identifier[split] ( literal[string] , literal[int] )
keyword[except] identifier[ValueError] :
keyword[return]
keyword[if] identifier[method] . identifier[upper] ()!= identifier[AUTH_METHOD] :
keyword[return]
identifier[username] = identifier[token] . identifier[get_claimed_username] ( identifier[claim] )
keyword[if] keyword[not] identifier[username] :
keyword[return]
identifier[User] = identifier[get_user_model] ()
keyword[try] :
identifier[user] = identifier[User] . identifier[objects] . identifier[get] ( identifier[username] = identifier[username] )
keyword[except] identifier[User] . identifier[DoesNotExist] :
keyword[return]
identifier[claim_data] = keyword[None]
keyword[for] identifier[public] keyword[in] identifier[user] . identifier[public_keys] . identifier[all] ():
identifier[claim_data] = identifier[token] . identifier[verify] ( identifier[claim] , identifier[public] . identifier[key] , identifier[validate_nonce] = identifier[self] . identifier[validate_nonce] )
keyword[if] identifier[claim_data] :
keyword[break]
keyword[if] keyword[not] identifier[claim_data] :
keyword[return]
identifier[logger] . identifier[debug] ( literal[string] , identifier[user] . identifier[username] )
identifier[request] . identifier[_dont_enforce_csrf_checks] = keyword[True]
identifier[request] . identifier[user] = identifier[user] | def process_request(self, request):
"""
Process a Django request and authenticate users.
If a JWT authentication header is detected and it is determined to be valid, the user is set as
``request.user`` and CSRF protection is disabled (``request._dont_enforce_csrf_checks = True``) on
the request.
:param request: Django Request instance
"""
if 'HTTP_AUTHORIZATION' not in request.META:
return # depends on [control=['if'], data=[]]
try:
(method, claim) = request.META['HTTP_AUTHORIZATION'].split(' ', 1) # depends on [control=['try'], data=[]]
except ValueError:
return # depends on [control=['except'], data=[]]
if method.upper() != AUTH_METHOD:
return # depends on [control=['if'], data=[]]
username = token.get_claimed_username(claim)
if not username:
return # depends on [control=['if'], data=[]]
User = get_user_model()
try:
user = User.objects.get(username=username) # depends on [control=['try'], data=[]]
except User.DoesNotExist:
return # depends on [control=['except'], data=[]]
claim_data = None
for public in user.public_keys.all():
claim_data = token.verify(claim, public.key, validate_nonce=self.validate_nonce)
if claim_data:
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['public']]
if not claim_data:
return # depends on [control=['if'], data=[]]
logger.debug('Successfully authenticated %s using JWT', user.username)
request._dont_enforce_csrf_checks = True
request.user = user |
def print_async_event(self, suffix, event):
'''
Print all of the events with the prefix 'tag'
'''
if not isinstance(event, dict):
return
# if we are "quiet", don't print
if self.opts.get('quiet', False):
return
# some suffixes we don't want to print
if suffix in ('new',):
return
try:
outputter = self.opts.get('output', event.get('outputter', None) or event.get('return').get('outputter'))
except AttributeError:
outputter = None
# if this is a ret, we have our own set of rules
if suffix == 'ret':
# Check if outputter was passed in the return data. If this is the case,
# then the return data will be a dict two keys: 'data' and 'outputter'
if isinstance(event.get('return'), dict) \
and set(event['return']) == set(('data', 'outputter')):
event_data = event['return']['data']
outputter = event['return']['outputter']
else:
event_data = event['return']
else:
event_data = {'suffix': suffix, 'event': event}
salt.output.display_output(event_data, outputter, self.opts) | def function[print_async_event, parameter[self, suffix, event]]:
constant[
Print all of the events with the prefix 'tag'
]
if <ast.UnaryOp object at 0x7da20c794070> begin[:]
return[None]
if call[name[self].opts.get, parameter[constant[quiet], constant[False]]] begin[:]
return[None]
if compare[name[suffix] in tuple[[<ast.Constant object at 0x7da20c6e53f0>]]] begin[:]
return[None]
<ast.Try object at 0x7da20c6e6200>
if compare[name[suffix] equal[==] constant[ret]] begin[:]
if <ast.BoolOp object at 0x7da20c6e57b0> begin[:]
variable[event_data] assign[=] call[call[name[event]][constant[return]]][constant[data]]
variable[outputter] assign[=] call[call[name[event]][constant[return]]][constant[outputter]]
call[name[salt].output.display_output, parameter[name[event_data], name[outputter], name[self].opts]] | keyword[def] identifier[print_async_event] ( identifier[self] , identifier[suffix] , identifier[event] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[event] , identifier[dict] ):
keyword[return]
keyword[if] identifier[self] . identifier[opts] . identifier[get] ( literal[string] , keyword[False] ):
keyword[return]
keyword[if] identifier[suffix] keyword[in] ( literal[string] ,):
keyword[return]
keyword[try] :
identifier[outputter] = identifier[self] . identifier[opts] . identifier[get] ( literal[string] , identifier[event] . identifier[get] ( literal[string] , keyword[None] ) keyword[or] identifier[event] . identifier[get] ( literal[string] ). identifier[get] ( literal[string] ))
keyword[except] identifier[AttributeError] :
identifier[outputter] = keyword[None]
keyword[if] identifier[suffix] == literal[string] :
keyword[if] identifier[isinstance] ( identifier[event] . identifier[get] ( literal[string] ), identifier[dict] ) keyword[and] identifier[set] ( identifier[event] [ literal[string] ])== identifier[set] (( literal[string] , literal[string] )):
identifier[event_data] = identifier[event] [ literal[string] ][ literal[string] ]
identifier[outputter] = identifier[event] [ literal[string] ][ literal[string] ]
keyword[else] :
identifier[event_data] = identifier[event] [ literal[string] ]
keyword[else] :
identifier[event_data] ={ literal[string] : identifier[suffix] , literal[string] : identifier[event] }
identifier[salt] . identifier[output] . identifier[display_output] ( identifier[event_data] , identifier[outputter] , identifier[self] . identifier[opts] ) | def print_async_event(self, suffix, event):
"""
Print all of the events with the prefix 'tag'
"""
if not isinstance(event, dict):
return # depends on [control=['if'], data=[]]
# if we are "quiet", don't print
if self.opts.get('quiet', False):
return # depends on [control=['if'], data=[]]
# some suffixes we don't want to print
if suffix in ('new',):
return # depends on [control=['if'], data=[]]
try:
outputter = self.opts.get('output', event.get('outputter', None) or event.get('return').get('outputter')) # depends on [control=['try'], data=[]]
except AttributeError:
outputter = None # depends on [control=['except'], data=[]]
# if this is a ret, we have our own set of rules
if suffix == 'ret':
# Check if outputter was passed in the return data. If this is the case,
# then the return data will be a dict two keys: 'data' and 'outputter'
if isinstance(event.get('return'), dict) and set(event['return']) == set(('data', 'outputter')):
event_data = event['return']['data']
outputter = event['return']['outputter'] # depends on [control=['if'], data=[]]
else:
event_data = event['return'] # depends on [control=['if'], data=[]]
else:
event_data = {'suffix': suffix, 'event': event}
salt.output.display_output(event_data, outputter, self.opts) |
def _load_pretrained_tok2vec(nlp, loc):
"""Load pre-trained weights for the 'token-to-vector' part of the component
models, which is typically a CNN. See 'spacy pretrain'. Experimental.
"""
with loc.open("rb") as file_:
weights_data = file_.read()
loaded = []
for name, component in nlp.pipeline:
if hasattr(component, "model") and hasattr(component.model, "tok2vec"):
component.tok2vec.from_bytes(weights_data)
loaded.append(name)
return loaded | def function[_load_pretrained_tok2vec, parameter[nlp, loc]]:
constant[Load pre-trained weights for the 'token-to-vector' part of the component
models, which is typically a CNN. See 'spacy pretrain'. Experimental.
]
with call[name[loc].open, parameter[constant[rb]]] begin[:]
variable[weights_data] assign[=] call[name[file_].read, parameter[]]
variable[loaded] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b2031060>, <ast.Name object at 0x7da1b2030c10>]]] in starred[name[nlp].pipeline] begin[:]
if <ast.BoolOp object at 0x7da1b2032110> begin[:]
call[name[component].tok2vec.from_bytes, parameter[name[weights_data]]]
call[name[loaded].append, parameter[name[name]]]
return[name[loaded]] | keyword[def] identifier[_load_pretrained_tok2vec] ( identifier[nlp] , identifier[loc] ):
literal[string]
keyword[with] identifier[loc] . identifier[open] ( literal[string] ) keyword[as] identifier[file_] :
identifier[weights_data] = identifier[file_] . identifier[read] ()
identifier[loaded] =[]
keyword[for] identifier[name] , identifier[component] keyword[in] identifier[nlp] . identifier[pipeline] :
keyword[if] identifier[hasattr] ( identifier[component] , literal[string] ) keyword[and] identifier[hasattr] ( identifier[component] . identifier[model] , literal[string] ):
identifier[component] . identifier[tok2vec] . identifier[from_bytes] ( identifier[weights_data] )
identifier[loaded] . identifier[append] ( identifier[name] )
keyword[return] identifier[loaded] | def _load_pretrained_tok2vec(nlp, loc):
"""Load pre-trained weights for the 'token-to-vector' part of the component
models, which is typically a CNN. See 'spacy pretrain'. Experimental.
"""
with loc.open('rb') as file_:
weights_data = file_.read() # depends on [control=['with'], data=['file_']]
loaded = []
for (name, component) in nlp.pipeline:
if hasattr(component, 'model') and hasattr(component.model, 'tok2vec'):
component.tok2vec.from_bytes(weights_data)
loaded.append(name) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return loaded |
def get():
"""Return list of Scheduling Blocks Instances known to SDP ."""
LOG.debug('GET list of SBIs.')
# Construct response object.
_url = get_root_url()
response = dict(scheduling_blocks=[],
links=dict(home='{}'.format(_url)))
# Get ordered list of SBI ID's.
block_ids = DB.get_sched_block_instance_ids()
# Loop over SBIs and add summary of each to the list of SBIs in the
# response.
for block in DB.get_block_details(block_ids):
block_id = block['id']
LOG.debug('Adding SBI %s to list', block_id)
LOG.debug(block)
block['num_processing_blocks'] = len(block['processing_block_ids'])
temp = ['OK'] * 10 + ['WAITING'] * 4 + ['FAILED'] * 2
block['status'] = choice(temp)
try:
del block['processing_block_ids']
except KeyError:
pass
block['links'] = {
'detail': '{}/scheduling-block/{}' .format(_url, block_id)
}
response['scheduling_blocks'].append(block)
return response, HTTPStatus.OK | def function[get, parameter[]]:
constant[Return list of Scheduling Blocks Instances known to SDP .]
call[name[LOG].debug, parameter[constant[GET list of SBIs.]]]
variable[_url] assign[=] call[name[get_root_url], parameter[]]
variable[response] assign[=] call[name[dict], parameter[]]
variable[block_ids] assign[=] call[name[DB].get_sched_block_instance_ids, parameter[]]
for taget[name[block]] in starred[call[name[DB].get_block_details, parameter[name[block_ids]]]] begin[:]
variable[block_id] assign[=] call[name[block]][constant[id]]
call[name[LOG].debug, parameter[constant[Adding SBI %s to list], name[block_id]]]
call[name[LOG].debug, parameter[name[block]]]
call[name[block]][constant[num_processing_blocks]] assign[=] call[name[len], parameter[call[name[block]][constant[processing_block_ids]]]]
variable[temp] assign[=] binary_operation[binary_operation[binary_operation[list[[<ast.Constant object at 0x7da1b03343d0>]] * constant[10]] + binary_operation[list[[<ast.Constant object at 0x7da1b0335990>]] * constant[4]]] + binary_operation[list[[<ast.Constant object at 0x7da1b0335ea0>]] * constant[2]]]
call[name[block]][constant[status]] assign[=] call[name[choice], parameter[name[temp]]]
<ast.Try object at 0x7da1b0336d40>
call[name[block]][constant[links]] assign[=] dictionary[[<ast.Constant object at 0x7da1b0336020>], [<ast.Call object at 0x7da1b0336e00>]]
call[call[name[response]][constant[scheduling_blocks]].append, parameter[name[block]]]
return[tuple[[<ast.Name object at 0x7da1b03a4a00>, <ast.Attribute object at 0x7da1b03a4df0>]]] | keyword[def] identifier[get] ():
literal[string]
identifier[LOG] . identifier[debug] ( literal[string] )
identifier[_url] = identifier[get_root_url] ()
identifier[response] = identifier[dict] ( identifier[scheduling_blocks] =[],
identifier[links] = identifier[dict] ( identifier[home] = literal[string] . identifier[format] ( identifier[_url] )))
identifier[block_ids] = identifier[DB] . identifier[get_sched_block_instance_ids] ()
keyword[for] identifier[block] keyword[in] identifier[DB] . identifier[get_block_details] ( identifier[block_ids] ):
identifier[block_id] = identifier[block] [ literal[string] ]
identifier[LOG] . identifier[debug] ( literal[string] , identifier[block_id] )
identifier[LOG] . identifier[debug] ( identifier[block] )
identifier[block] [ literal[string] ]= identifier[len] ( identifier[block] [ literal[string] ])
identifier[temp] =[ literal[string] ]* literal[int] +[ literal[string] ]* literal[int] +[ literal[string] ]* literal[int]
identifier[block] [ literal[string] ]= identifier[choice] ( identifier[temp] )
keyword[try] :
keyword[del] identifier[block] [ literal[string] ]
keyword[except] identifier[KeyError] :
keyword[pass]
identifier[block] [ literal[string] ]={
literal[string] : literal[string] . identifier[format] ( identifier[_url] , identifier[block_id] )
}
identifier[response] [ literal[string] ]. identifier[append] ( identifier[block] )
keyword[return] identifier[response] , identifier[HTTPStatus] . identifier[OK] | def get():
"""Return list of Scheduling Blocks Instances known to SDP ."""
LOG.debug('GET list of SBIs.')
# Construct response object.
_url = get_root_url()
response = dict(scheduling_blocks=[], links=dict(home='{}'.format(_url)))
# Get ordered list of SBI ID's.
block_ids = DB.get_sched_block_instance_ids()
# Loop over SBIs and add summary of each to the list of SBIs in the
# response.
for block in DB.get_block_details(block_ids):
block_id = block['id']
LOG.debug('Adding SBI %s to list', block_id)
LOG.debug(block)
block['num_processing_blocks'] = len(block['processing_block_ids'])
temp = ['OK'] * 10 + ['WAITING'] * 4 + ['FAILED'] * 2
block['status'] = choice(temp)
try:
del block['processing_block_ids'] # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]]
block['links'] = {'detail': '{}/scheduling-block/{}'.format(_url, block_id)}
response['scheduling_blocks'].append(block) # depends on [control=['for'], data=['block']]
return (response, HTTPStatus.OK) |
def redeem(ctx, htlc_id, secret, account):
""" Redeem an HTLC contract
"""
print_tx(ctx.blockchain.htlc_redeem(htlc_id, secret, account=account)) | def function[redeem, parameter[ctx, htlc_id, secret, account]]:
constant[ Redeem an HTLC contract
]
call[name[print_tx], parameter[call[name[ctx].blockchain.htlc_redeem, parameter[name[htlc_id], name[secret]]]]] | keyword[def] identifier[redeem] ( identifier[ctx] , identifier[htlc_id] , identifier[secret] , identifier[account] ):
literal[string]
identifier[print_tx] ( identifier[ctx] . identifier[blockchain] . identifier[htlc_redeem] ( identifier[htlc_id] , identifier[secret] , identifier[account] = identifier[account] )) | def redeem(ctx, htlc_id, secret, account):
""" Redeem an HTLC contract
"""
print_tx(ctx.blockchain.htlc_redeem(htlc_id, secret, account=account)) |
def enqueue_sync(self, func, *func_args):
'''
Enqueue an arbitrary synchronous function.
Deprecated: Use async version instead
'''
worker = self.pick_sticky(0) # just pick first always
args = (func,) + func_args
coro = worker.enqueue(enums.Task.FUNC, args)
asyncio.ensure_future(coro) | def function[enqueue_sync, parameter[self, func]]:
constant[
Enqueue an arbitrary synchronous function.
Deprecated: Use async version instead
]
variable[worker] assign[=] call[name[self].pick_sticky, parameter[constant[0]]]
variable[args] assign[=] binary_operation[tuple[[<ast.Name object at 0x7da1b2344580>]] + name[func_args]]
variable[coro] assign[=] call[name[worker].enqueue, parameter[name[enums].Task.FUNC, name[args]]]
call[name[asyncio].ensure_future, parameter[name[coro]]] | keyword[def] identifier[enqueue_sync] ( identifier[self] , identifier[func] ,* identifier[func_args] ):
literal[string]
identifier[worker] = identifier[self] . identifier[pick_sticky] ( literal[int] )
identifier[args] =( identifier[func] ,)+ identifier[func_args]
identifier[coro] = identifier[worker] . identifier[enqueue] ( identifier[enums] . identifier[Task] . identifier[FUNC] , identifier[args] )
identifier[asyncio] . identifier[ensure_future] ( identifier[coro] ) | def enqueue_sync(self, func, *func_args):
"""
Enqueue an arbitrary synchronous function.
Deprecated: Use async version instead
"""
worker = self.pick_sticky(0) # just pick first always
args = (func,) + func_args
coro = worker.enqueue(enums.Task.FUNC, args)
asyncio.ensure_future(coro) |
def data_to_imagesurface (data, **kwargs):
"""Turn arbitrary data values into a Cairo ImageSurface.
The method and arguments are the same as data_to_argb32, except that the
data array will be treated as 2D, and higher dimensionalities are not
allowed. The return value is a Cairo ImageSurface object.
Combined with the write_to_png() method on ImageSurfaces, this is an easy
way to quickly visualize 2D data.
"""
import cairo
data = np.atleast_2d (data)
if data.ndim != 2:
raise ValueError ('input array may not have more than 2 dimensions')
argb32 = data_to_argb32 (data, **kwargs)
format = cairo.FORMAT_ARGB32
height, width = argb32.shape
stride = cairo.ImageSurface.format_stride_for_width (format, width)
if argb32.strides[0] != stride:
raise ValueError ('stride of data array not compatible with ARGB32')
return cairo.ImageSurface.create_for_data (argb32, format,
width, height, stride) | def function[data_to_imagesurface, parameter[data]]:
constant[Turn arbitrary data values into a Cairo ImageSurface.
The method and arguments are the same as data_to_argb32, except that the
data array will be treated as 2D, and higher dimensionalities are not
allowed. The return value is a Cairo ImageSurface object.
Combined with the write_to_png() method on ImageSurfaces, this is an easy
way to quickly visualize 2D data.
]
import module[cairo]
variable[data] assign[=] call[name[np].atleast_2d, parameter[name[data]]]
if compare[name[data].ndim not_equal[!=] constant[2]] begin[:]
<ast.Raise object at 0x7da1b263b6a0>
variable[argb32] assign[=] call[name[data_to_argb32], parameter[name[data]]]
variable[format] assign[=] name[cairo].FORMAT_ARGB32
<ast.Tuple object at 0x7da1b263b010> assign[=] name[argb32].shape
variable[stride] assign[=] call[name[cairo].ImageSurface.format_stride_for_width, parameter[name[format], name[width]]]
if compare[call[name[argb32].strides][constant[0]] not_equal[!=] name[stride]] begin[:]
<ast.Raise object at 0x7da1b2639570>
return[call[name[cairo].ImageSurface.create_for_data, parameter[name[argb32], name[format], name[width], name[height], name[stride]]]] | keyword[def] identifier[data_to_imagesurface] ( identifier[data] ,** identifier[kwargs] ):
literal[string]
keyword[import] identifier[cairo]
identifier[data] = identifier[np] . identifier[atleast_2d] ( identifier[data] )
keyword[if] identifier[data] . identifier[ndim] != literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[argb32] = identifier[data_to_argb32] ( identifier[data] ,** identifier[kwargs] )
identifier[format] = identifier[cairo] . identifier[FORMAT_ARGB32]
identifier[height] , identifier[width] = identifier[argb32] . identifier[shape]
identifier[stride] = identifier[cairo] . identifier[ImageSurface] . identifier[format_stride_for_width] ( identifier[format] , identifier[width] )
keyword[if] identifier[argb32] . identifier[strides] [ literal[int] ]!= identifier[stride] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] identifier[cairo] . identifier[ImageSurface] . identifier[create_for_data] ( identifier[argb32] , identifier[format] ,
identifier[width] , identifier[height] , identifier[stride] ) | def data_to_imagesurface(data, **kwargs):
"""Turn arbitrary data values into a Cairo ImageSurface.
The method and arguments are the same as data_to_argb32, except that the
data array will be treated as 2D, and higher dimensionalities are not
allowed. The return value is a Cairo ImageSurface object.
Combined with the write_to_png() method on ImageSurfaces, this is an easy
way to quickly visualize 2D data.
"""
import cairo
data = np.atleast_2d(data)
if data.ndim != 2:
raise ValueError('input array may not have more than 2 dimensions') # depends on [control=['if'], data=[]]
argb32 = data_to_argb32(data, **kwargs)
format = cairo.FORMAT_ARGB32
(height, width) = argb32.shape
stride = cairo.ImageSurface.format_stride_for_width(format, width)
if argb32.strides[0] != stride:
raise ValueError('stride of data array not compatible with ARGB32') # depends on [control=['if'], data=[]]
return cairo.ImageSurface.create_for_data(argb32, format, width, height, stride) |
def _sensoryComputeLearningMode(self, anchorInput):
"""
Associate this location with a sensory input. Subsequently, anchorInput will
activate the current location during anchor().
@param anchorInput (numpy array)
A sensory input. This will often come from a feature-location pair layer.
"""
overlaps = self.connections.computeActivity(anchorInput,
self.connectedPermanence)
activeSegments = np.where(overlaps >= self.activationThreshold)[0]
potentialOverlaps = self.connections.computeActivity(anchorInput)
matchingSegments = np.where(potentialOverlaps >=
self.learningThreshold)[0]
# Cells with a active segment: reinforce the segment
cellsForActiveSegments = self.connections.mapSegmentsToCells(
activeSegments)
learningActiveSegments = activeSegments[
np.in1d(cellsForActiveSegments, self.activeCells)]
remainingCells = np.setdiff1d(self.activeCells, cellsForActiveSegments)
# Remaining cells with a matching segment: reinforce the best
# matching segment.
candidateSegments = self.connections.filterSegmentsByCell(
matchingSegments, remainingCells)
cellsForCandidateSegments = (
self.connections.mapSegmentsToCells(candidateSegments))
candidateSegments = candidateSegments[
np.in1d(cellsForCandidateSegments, remainingCells)]
onePerCellFilter = np2.argmaxMulti(potentialOverlaps[candidateSegments],
cellsForCandidateSegments)
learningMatchingSegments = candidateSegments[onePerCellFilter]
newSegmentCells = np.setdiff1d(remainingCells, cellsForCandidateSegments)
for learningSegments in (learningActiveSegments,
learningMatchingSegments):
self._learn(self.connections, self.rng, learningSegments,
anchorInput, potentialOverlaps,
self.initialPermanence, self.sampleSize,
self.permanenceIncrement, self.permanenceDecrement,
self.maxSynapsesPerSegment)
# Remaining cells without a matching segment: grow one.
numNewSynapses = len(anchorInput)
if self.sampleSize != -1:
numNewSynapses = min(numNewSynapses, self.sampleSize)
if self.maxSynapsesPerSegment != -1:
numNewSynapses = min(numNewSynapses, self.maxSynapsesPerSegment)
newSegments = self.connections.createSegments(newSegmentCells)
self.connections.growSynapsesToSample(
newSegments, anchorInput, numNewSynapses,
self.initialPermanence, self.rng)
self.activeSegments = activeSegments
self.sensoryAssociatedCells = self.activeCells | def function[_sensoryComputeLearningMode, parameter[self, anchorInput]]:
constant[
Associate this location with a sensory input. Subsequently, anchorInput will
activate the current location during anchor().
@param anchorInput (numpy array)
A sensory input. This will often come from a feature-location pair layer.
]
variable[overlaps] assign[=] call[name[self].connections.computeActivity, parameter[name[anchorInput], name[self].connectedPermanence]]
variable[activeSegments] assign[=] call[call[name[np].where, parameter[compare[name[overlaps] greater_or_equal[>=] name[self].activationThreshold]]]][constant[0]]
variable[potentialOverlaps] assign[=] call[name[self].connections.computeActivity, parameter[name[anchorInput]]]
variable[matchingSegments] assign[=] call[call[name[np].where, parameter[compare[name[potentialOverlaps] greater_or_equal[>=] name[self].learningThreshold]]]][constant[0]]
variable[cellsForActiveSegments] assign[=] call[name[self].connections.mapSegmentsToCells, parameter[name[activeSegments]]]
variable[learningActiveSegments] assign[=] call[name[activeSegments]][call[name[np].in1d, parameter[name[cellsForActiveSegments], name[self].activeCells]]]
variable[remainingCells] assign[=] call[name[np].setdiff1d, parameter[name[self].activeCells, name[cellsForActiveSegments]]]
variable[candidateSegments] assign[=] call[name[self].connections.filterSegmentsByCell, parameter[name[matchingSegments], name[remainingCells]]]
variable[cellsForCandidateSegments] assign[=] call[name[self].connections.mapSegmentsToCells, parameter[name[candidateSegments]]]
variable[candidateSegments] assign[=] call[name[candidateSegments]][call[name[np].in1d, parameter[name[cellsForCandidateSegments], name[remainingCells]]]]
variable[onePerCellFilter] assign[=] call[name[np2].argmaxMulti, parameter[call[name[potentialOverlaps]][name[candidateSegments]], name[cellsForCandidateSegments]]]
variable[learningMatchingSegments] assign[=] call[name[candidateSegments]][name[onePerCellFilter]]
variable[newSegmentCells] assign[=] call[name[np].setdiff1d, parameter[name[remainingCells], name[cellsForCandidateSegments]]]
for taget[name[learningSegments]] in starred[tuple[[<ast.Name object at 0x7da1b08c5600>, <ast.Name object at 0x7da1b08c4c70>]]] begin[:]
call[name[self]._learn, parameter[name[self].connections, name[self].rng, name[learningSegments], name[anchorInput], name[potentialOverlaps], name[self].initialPermanence, name[self].sampleSize, name[self].permanenceIncrement, name[self].permanenceDecrement, name[self].maxSynapsesPerSegment]]
variable[numNewSynapses] assign[=] call[name[len], parameter[name[anchorInput]]]
if compare[name[self].sampleSize not_equal[!=] <ast.UnaryOp object at 0x7da1b08c7b20>] begin[:]
variable[numNewSynapses] assign[=] call[name[min], parameter[name[numNewSynapses], name[self].sampleSize]]
if compare[name[self].maxSynapsesPerSegment not_equal[!=] <ast.UnaryOp object at 0x7da1b08c72e0>] begin[:]
variable[numNewSynapses] assign[=] call[name[min], parameter[name[numNewSynapses], name[self].maxSynapsesPerSegment]]
variable[newSegments] assign[=] call[name[self].connections.createSegments, parameter[name[newSegmentCells]]]
call[name[self].connections.growSynapsesToSample, parameter[name[newSegments], name[anchorInput], name[numNewSynapses], name[self].initialPermanence, name[self].rng]]
name[self].activeSegments assign[=] name[activeSegments]
name[self].sensoryAssociatedCells assign[=] name[self].activeCells | keyword[def] identifier[_sensoryComputeLearningMode] ( identifier[self] , identifier[anchorInput] ):
literal[string]
identifier[overlaps] = identifier[self] . identifier[connections] . identifier[computeActivity] ( identifier[anchorInput] ,
identifier[self] . identifier[connectedPermanence] )
identifier[activeSegments] = identifier[np] . identifier[where] ( identifier[overlaps] >= identifier[self] . identifier[activationThreshold] )[ literal[int] ]
identifier[potentialOverlaps] = identifier[self] . identifier[connections] . identifier[computeActivity] ( identifier[anchorInput] )
identifier[matchingSegments] = identifier[np] . identifier[where] ( identifier[potentialOverlaps] >=
identifier[self] . identifier[learningThreshold] )[ literal[int] ]
identifier[cellsForActiveSegments] = identifier[self] . identifier[connections] . identifier[mapSegmentsToCells] (
identifier[activeSegments] )
identifier[learningActiveSegments] = identifier[activeSegments] [
identifier[np] . identifier[in1d] ( identifier[cellsForActiveSegments] , identifier[self] . identifier[activeCells] )]
identifier[remainingCells] = identifier[np] . identifier[setdiff1d] ( identifier[self] . identifier[activeCells] , identifier[cellsForActiveSegments] )
identifier[candidateSegments] = identifier[self] . identifier[connections] . identifier[filterSegmentsByCell] (
identifier[matchingSegments] , identifier[remainingCells] )
identifier[cellsForCandidateSegments] =(
identifier[self] . identifier[connections] . identifier[mapSegmentsToCells] ( identifier[candidateSegments] ))
identifier[candidateSegments] = identifier[candidateSegments] [
identifier[np] . identifier[in1d] ( identifier[cellsForCandidateSegments] , identifier[remainingCells] )]
identifier[onePerCellFilter] = identifier[np2] . identifier[argmaxMulti] ( identifier[potentialOverlaps] [ identifier[candidateSegments] ],
identifier[cellsForCandidateSegments] )
identifier[learningMatchingSegments] = identifier[candidateSegments] [ identifier[onePerCellFilter] ]
identifier[newSegmentCells] = identifier[np] . identifier[setdiff1d] ( identifier[remainingCells] , identifier[cellsForCandidateSegments] )
keyword[for] identifier[learningSegments] keyword[in] ( identifier[learningActiveSegments] ,
identifier[learningMatchingSegments] ):
identifier[self] . identifier[_learn] ( identifier[self] . identifier[connections] , identifier[self] . identifier[rng] , identifier[learningSegments] ,
identifier[anchorInput] , identifier[potentialOverlaps] ,
identifier[self] . identifier[initialPermanence] , identifier[self] . identifier[sampleSize] ,
identifier[self] . identifier[permanenceIncrement] , identifier[self] . identifier[permanenceDecrement] ,
identifier[self] . identifier[maxSynapsesPerSegment] )
identifier[numNewSynapses] = identifier[len] ( identifier[anchorInput] )
keyword[if] identifier[self] . identifier[sampleSize] !=- literal[int] :
identifier[numNewSynapses] = identifier[min] ( identifier[numNewSynapses] , identifier[self] . identifier[sampleSize] )
keyword[if] identifier[self] . identifier[maxSynapsesPerSegment] !=- literal[int] :
identifier[numNewSynapses] = identifier[min] ( identifier[numNewSynapses] , identifier[self] . identifier[maxSynapsesPerSegment] )
identifier[newSegments] = identifier[self] . identifier[connections] . identifier[createSegments] ( identifier[newSegmentCells] )
identifier[self] . identifier[connections] . identifier[growSynapsesToSample] (
identifier[newSegments] , identifier[anchorInput] , identifier[numNewSynapses] ,
identifier[self] . identifier[initialPermanence] , identifier[self] . identifier[rng] )
identifier[self] . identifier[activeSegments] = identifier[activeSegments]
identifier[self] . identifier[sensoryAssociatedCells] = identifier[self] . identifier[activeCells] | def _sensoryComputeLearningMode(self, anchorInput):
"""
Associate this location with a sensory input. Subsequently, anchorInput will
activate the current location during anchor().
@param anchorInput (numpy array)
A sensory input. This will often come from a feature-location pair layer.
"""
overlaps = self.connections.computeActivity(anchorInput, self.connectedPermanence)
activeSegments = np.where(overlaps >= self.activationThreshold)[0]
potentialOverlaps = self.connections.computeActivity(anchorInput)
matchingSegments = np.where(potentialOverlaps >= self.learningThreshold)[0]
# Cells with a active segment: reinforce the segment
cellsForActiveSegments = self.connections.mapSegmentsToCells(activeSegments)
learningActiveSegments = activeSegments[np.in1d(cellsForActiveSegments, self.activeCells)]
remainingCells = np.setdiff1d(self.activeCells, cellsForActiveSegments)
# Remaining cells with a matching segment: reinforce the best
# matching segment.
candidateSegments = self.connections.filterSegmentsByCell(matchingSegments, remainingCells)
cellsForCandidateSegments = self.connections.mapSegmentsToCells(candidateSegments)
candidateSegments = candidateSegments[np.in1d(cellsForCandidateSegments, remainingCells)]
onePerCellFilter = np2.argmaxMulti(potentialOverlaps[candidateSegments], cellsForCandidateSegments)
learningMatchingSegments = candidateSegments[onePerCellFilter]
newSegmentCells = np.setdiff1d(remainingCells, cellsForCandidateSegments)
for learningSegments in (learningActiveSegments, learningMatchingSegments):
self._learn(self.connections, self.rng, learningSegments, anchorInput, potentialOverlaps, self.initialPermanence, self.sampleSize, self.permanenceIncrement, self.permanenceDecrement, self.maxSynapsesPerSegment) # depends on [control=['for'], data=['learningSegments']]
# Remaining cells without a matching segment: grow one.
numNewSynapses = len(anchorInput)
if self.sampleSize != -1:
numNewSynapses = min(numNewSynapses, self.sampleSize) # depends on [control=['if'], data=[]]
if self.maxSynapsesPerSegment != -1:
numNewSynapses = min(numNewSynapses, self.maxSynapsesPerSegment) # depends on [control=['if'], data=[]]
newSegments = self.connections.createSegments(newSegmentCells)
self.connections.growSynapsesToSample(newSegments, anchorInput, numNewSynapses, self.initialPermanence, self.rng)
self.activeSegments = activeSegments
self.sensoryAssociatedCells = self.activeCells |
def _build_matches(matches, uuids, no_filtered, fastmode=False):
"""Build a list with matching subsets"""
result = []
for m in matches:
mk = m[0].uuid if not fastmode else m[0]
subset = [uuids[mk]]
for id_ in m[1:]:
uk = id_.uuid if not fastmode else id_
u = uuids[uk]
if u not in subset:
subset.append(u)
result.append(subset)
result += no_filtered
result.sort(key=len, reverse=True)
sresult = []
for r in result:
r.sort(key=lambda id_: id_.uuid)
sresult.append(r)
return sresult | def function[_build_matches, parameter[matches, uuids, no_filtered, fastmode]]:
constant[Build a list with matching subsets]
variable[result] assign[=] list[[]]
for taget[name[m]] in starred[name[matches]] begin[:]
variable[mk] assign[=] <ast.IfExp object at 0x7da1b0e9fbb0>
variable[subset] assign[=] list[[<ast.Subscript object at 0x7da1b0e9ff70>]]
for taget[name[id_]] in starred[call[name[m]][<ast.Slice object at 0x7da1b0e9fac0>]] begin[:]
variable[uk] assign[=] <ast.IfExp object at 0x7da1b0e9ed10>
variable[u] assign[=] call[name[uuids]][name[uk]]
if compare[name[u] <ast.NotIn object at 0x7da2590d7190> name[subset]] begin[:]
call[name[subset].append, parameter[name[u]]]
call[name[result].append, parameter[name[subset]]]
<ast.AugAssign object at 0x7da1b0e27970>
call[name[result].sort, parameter[]]
variable[sresult] assign[=] list[[]]
for taget[name[r]] in starred[name[result]] begin[:]
call[name[r].sort, parameter[]]
call[name[sresult].append, parameter[name[r]]]
return[name[sresult]] | keyword[def] identifier[_build_matches] ( identifier[matches] , identifier[uuids] , identifier[no_filtered] , identifier[fastmode] = keyword[False] ):
literal[string]
identifier[result] =[]
keyword[for] identifier[m] keyword[in] identifier[matches] :
identifier[mk] = identifier[m] [ literal[int] ]. identifier[uuid] keyword[if] keyword[not] identifier[fastmode] keyword[else] identifier[m] [ literal[int] ]
identifier[subset] =[ identifier[uuids] [ identifier[mk] ]]
keyword[for] identifier[id_] keyword[in] identifier[m] [ literal[int] :]:
identifier[uk] = identifier[id_] . identifier[uuid] keyword[if] keyword[not] identifier[fastmode] keyword[else] identifier[id_]
identifier[u] = identifier[uuids] [ identifier[uk] ]
keyword[if] identifier[u] keyword[not] keyword[in] identifier[subset] :
identifier[subset] . identifier[append] ( identifier[u] )
identifier[result] . identifier[append] ( identifier[subset] )
identifier[result] += identifier[no_filtered]
identifier[result] . identifier[sort] ( identifier[key] = identifier[len] , identifier[reverse] = keyword[True] )
identifier[sresult] =[]
keyword[for] identifier[r] keyword[in] identifier[result] :
identifier[r] . identifier[sort] ( identifier[key] = keyword[lambda] identifier[id_] : identifier[id_] . identifier[uuid] )
identifier[sresult] . identifier[append] ( identifier[r] )
keyword[return] identifier[sresult] | def _build_matches(matches, uuids, no_filtered, fastmode=False):
"""Build a list with matching subsets"""
result = []
for m in matches:
mk = m[0].uuid if not fastmode else m[0]
subset = [uuids[mk]]
for id_ in m[1:]:
uk = id_.uuid if not fastmode else id_
u = uuids[uk]
if u not in subset:
subset.append(u) # depends on [control=['if'], data=['u', 'subset']] # depends on [control=['for'], data=['id_']]
result.append(subset) # depends on [control=['for'], data=['m']]
result += no_filtered
result.sort(key=len, reverse=True)
sresult = []
for r in result:
r.sort(key=lambda id_: id_.uuid)
sresult.append(r) # depends on [control=['for'], data=['r']]
return sresult |
def add(name,
uid=None,
gid=None,
groups=None,
home=None,
shell=None,
fullname=None,
createhome=True,
**kwargs):
'''
Add a user to the minion
CLI Example:
.. code-block:: bash
salt '*' user.add name <uid> <gid> <groups> <home> <shell>
'''
if info(name):
raise CommandExecutionError('User \'{0}\' already exists'.format(name))
if salt.utils.stringutils.contains_whitespace(name):
raise SaltInvocationError('Username cannot contain whitespace')
if uid is None:
uid = _first_avail_uid()
if gid is None:
gid = 20 # gid 20 == 'staff', the default group
if home is None:
home = '/Users/{0}'.format(name)
if shell is None:
shell = '/bin/bash'
if fullname is None:
fullname = ''
if not isinstance(uid, int):
raise SaltInvocationError('uid must be an integer')
if not isinstance(gid, int):
raise SaltInvocationError('gid must be an integer')
name_path = '/Users/{0}'.format(name)
_dscl([name_path, 'UniqueID', uid])
_dscl([name_path, 'PrimaryGroupID', gid])
_dscl([name_path, 'UserShell', shell])
_dscl([name_path, 'NFSHomeDirectory', home])
_dscl([name_path, 'RealName', fullname])
# Make sure home directory exists
if createhome:
__salt__['file.mkdir'](home, user=uid, group=gid)
# dscl buffers changes, sleep before setting group membership
time.sleep(1)
if groups:
chgroups(name, groups)
return True | def function[add, parameter[name, uid, gid, groups, home, shell, fullname, createhome]]:
constant[
Add a user to the minion
CLI Example:
.. code-block:: bash
salt '*' user.add name <uid> <gid> <groups> <home> <shell>
]
if call[name[info], parameter[name[name]]] begin[:]
<ast.Raise object at 0x7da1b1f34550>
if call[name[salt].utils.stringutils.contains_whitespace, parameter[name[name]]] begin[:]
<ast.Raise object at 0x7da1b2024370>
if compare[name[uid] is constant[None]] begin[:]
variable[uid] assign[=] call[name[_first_avail_uid], parameter[]]
if compare[name[gid] is constant[None]] begin[:]
variable[gid] assign[=] constant[20]
if compare[name[home] is constant[None]] begin[:]
variable[home] assign[=] call[constant[/Users/{0}].format, parameter[name[name]]]
if compare[name[shell] is constant[None]] begin[:]
variable[shell] assign[=] constant[/bin/bash]
if compare[name[fullname] is constant[None]] begin[:]
variable[fullname] assign[=] constant[]
if <ast.UnaryOp object at 0x7da1b2025b70> begin[:]
<ast.Raise object at 0x7da1b2026440>
if <ast.UnaryOp object at 0x7da1b2025bd0> begin[:]
<ast.Raise object at 0x7da1b20255a0>
variable[name_path] assign[=] call[constant[/Users/{0}].format, parameter[name[name]]]
call[name[_dscl], parameter[list[[<ast.Name object at 0x7da1b20258a0>, <ast.Constant object at 0x7da1b20260b0>, <ast.Name object at 0x7da1b2026200>]]]]
call[name[_dscl], parameter[list[[<ast.Name object at 0x7da1b2025c30>, <ast.Constant object at 0x7da1b2025a50>, <ast.Name object at 0x7da1b20250c0>]]]]
call[name[_dscl], parameter[list[[<ast.Name object at 0x7da1b1f834f0>, <ast.Constant object at 0x7da1b1f82080>, <ast.Name object at 0x7da1b1f83250>]]]]
call[name[_dscl], parameter[list[[<ast.Name object at 0x7da1b1f83b80>, <ast.Constant object at 0x7da1b1f82b60>, <ast.Name object at 0x7da1b1f832b0>]]]]
call[name[_dscl], parameter[list[[<ast.Name object at 0x7da1b1f82590>, <ast.Constant object at 0x7da1b1f81e10>, <ast.Name object at 0x7da1b1f832e0>]]]]
if name[createhome] begin[:]
call[call[name[__salt__]][constant[file.mkdir]], parameter[name[home]]]
call[name[time].sleep, parameter[constant[1]]]
if name[groups] begin[:]
call[name[chgroups], parameter[name[name], name[groups]]]
return[constant[True]] | keyword[def] identifier[add] ( identifier[name] ,
identifier[uid] = keyword[None] ,
identifier[gid] = keyword[None] ,
identifier[groups] = keyword[None] ,
identifier[home] = keyword[None] ,
identifier[shell] = keyword[None] ,
identifier[fullname] = keyword[None] ,
identifier[createhome] = keyword[True] ,
** identifier[kwargs] ):
literal[string]
keyword[if] identifier[info] ( identifier[name] ):
keyword[raise] identifier[CommandExecutionError] ( literal[string] . identifier[format] ( identifier[name] ))
keyword[if] identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[contains_whitespace] ( identifier[name] ):
keyword[raise] identifier[SaltInvocationError] ( literal[string] )
keyword[if] identifier[uid] keyword[is] keyword[None] :
identifier[uid] = identifier[_first_avail_uid] ()
keyword[if] identifier[gid] keyword[is] keyword[None] :
identifier[gid] = literal[int]
keyword[if] identifier[home] keyword[is] keyword[None] :
identifier[home] = literal[string] . identifier[format] ( identifier[name] )
keyword[if] identifier[shell] keyword[is] keyword[None] :
identifier[shell] = literal[string]
keyword[if] identifier[fullname] keyword[is] keyword[None] :
identifier[fullname] = literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[uid] , identifier[int] ):
keyword[raise] identifier[SaltInvocationError] ( literal[string] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[gid] , identifier[int] ):
keyword[raise] identifier[SaltInvocationError] ( literal[string] )
identifier[name_path] = literal[string] . identifier[format] ( identifier[name] )
identifier[_dscl] ([ identifier[name_path] , literal[string] , identifier[uid] ])
identifier[_dscl] ([ identifier[name_path] , literal[string] , identifier[gid] ])
identifier[_dscl] ([ identifier[name_path] , literal[string] , identifier[shell] ])
identifier[_dscl] ([ identifier[name_path] , literal[string] , identifier[home] ])
identifier[_dscl] ([ identifier[name_path] , literal[string] , identifier[fullname] ])
keyword[if] identifier[createhome] :
identifier[__salt__] [ literal[string] ]( identifier[home] , identifier[user] = identifier[uid] , identifier[group] = identifier[gid] )
identifier[time] . identifier[sleep] ( literal[int] )
keyword[if] identifier[groups] :
identifier[chgroups] ( identifier[name] , identifier[groups] )
keyword[return] keyword[True] | def add(name, uid=None, gid=None, groups=None, home=None, shell=None, fullname=None, createhome=True, **kwargs):
"""
Add a user to the minion
CLI Example:
.. code-block:: bash
salt '*' user.add name <uid> <gid> <groups> <home> <shell>
"""
if info(name):
raise CommandExecutionError("User '{0}' already exists".format(name)) # depends on [control=['if'], data=[]]
if salt.utils.stringutils.contains_whitespace(name):
raise SaltInvocationError('Username cannot contain whitespace') # depends on [control=['if'], data=[]]
if uid is None:
uid = _first_avail_uid() # depends on [control=['if'], data=['uid']]
if gid is None:
gid = 20 # gid 20 == 'staff', the default group # depends on [control=['if'], data=['gid']]
if home is None:
home = '/Users/{0}'.format(name) # depends on [control=['if'], data=['home']]
if shell is None:
shell = '/bin/bash' # depends on [control=['if'], data=['shell']]
if fullname is None:
fullname = '' # depends on [control=['if'], data=['fullname']]
if not isinstance(uid, int):
raise SaltInvocationError('uid must be an integer') # depends on [control=['if'], data=[]]
if not isinstance(gid, int):
raise SaltInvocationError('gid must be an integer') # depends on [control=['if'], data=[]]
name_path = '/Users/{0}'.format(name)
_dscl([name_path, 'UniqueID', uid])
_dscl([name_path, 'PrimaryGroupID', gid])
_dscl([name_path, 'UserShell', shell])
_dscl([name_path, 'NFSHomeDirectory', home])
_dscl([name_path, 'RealName', fullname])
# Make sure home directory exists
if createhome:
__salt__['file.mkdir'](home, user=uid, group=gid) # depends on [control=['if'], data=[]]
# dscl buffers changes, sleep before setting group membership
time.sleep(1)
if groups:
chgroups(name, groups) # depends on [control=['if'], data=[]]
return True |
def badge_form(model):
'''A form factory for a given model badges'''
class BadgeForm(ModelForm):
model_class = Badge
kind = fields.RadioField(
_('Kind'), [validators.DataRequired()],
choices=model.__badges__.items(),
description=_('Kind of badge (certified, etc)'))
return BadgeForm | def function[badge_form, parameter[model]]:
constant[A form factory for a given model badges]
class class[BadgeForm, parameter[]] begin[:]
variable[model_class] assign[=] name[Badge]
variable[kind] assign[=] call[name[fields].RadioField, parameter[call[name[_], parameter[constant[Kind]]], list[[<ast.Call object at 0x7da18bc70e20>]]]]
return[name[BadgeForm]] | keyword[def] identifier[badge_form] ( identifier[model] ):
literal[string]
keyword[class] identifier[BadgeForm] ( identifier[ModelForm] ):
identifier[model_class] = identifier[Badge]
identifier[kind] = identifier[fields] . identifier[RadioField] (
identifier[_] ( literal[string] ),[ identifier[validators] . identifier[DataRequired] ()],
identifier[choices] = identifier[model] . identifier[__badges__] . identifier[items] (),
identifier[description] = identifier[_] ( literal[string] ))
keyword[return] identifier[BadgeForm] | def badge_form(model):
"""A form factory for a given model badges"""
class BadgeForm(ModelForm):
model_class = Badge
kind = fields.RadioField(_('Kind'), [validators.DataRequired()], choices=model.__badges__.items(), description=_('Kind of badge (certified, etc)'))
return BadgeForm |
def geo_filter(d):
"""Inspects the given Wikipedia article dict for geo-coordinates.
If no coordinates are found, returns None. Otherwise, returns a new dict
with the title and URL of the original article, along with coordinates."""
page = d["page"]
if not "revision" in page:
return None
title = page["title"]
if skip_article(title):
LOG.info("Skipping low-value article %s", title)
return None
text = page["revision"]["text"]
if not utils.is_str_type(text):
if "#text" in text:
text = text["#text"]
else:
return None
LOG.debug("--------------------------------------------------------------")
LOG.debug(title)
LOG.debug("--------------------------------------------------------------")
LOG.debug(text)
c = find_geo_coords(text)
u = wikip_url(title)
"""
m = hashlib.md5()
m.update(u.encode("UTF-8") if hasattr(u, 'encode') else u)
i = base64.urlsafe_b64encode(m.digest()).replace('=', '')
"""
return {
#"id": i,
"title": title,
"url": u,
"coords": c,
"updated": page["revision"].get("timestamp")
} if c else None | def function[geo_filter, parameter[d]]:
constant[Inspects the given Wikipedia article dict for geo-coordinates.
If no coordinates are found, returns None. Otherwise, returns a new dict
with the title and URL of the original article, along with coordinates.]
variable[page] assign[=] call[name[d]][constant[page]]
if <ast.UnaryOp object at 0x7da20e963a90> begin[:]
return[constant[None]]
variable[title] assign[=] call[name[page]][constant[title]]
if call[name[skip_article], parameter[name[title]]] begin[:]
call[name[LOG].info, parameter[constant[Skipping low-value article %s], name[title]]]
return[constant[None]]
variable[text] assign[=] call[call[name[page]][constant[revision]]][constant[text]]
if <ast.UnaryOp object at 0x7da204621f60> begin[:]
if compare[constant[#text] in name[text]] begin[:]
variable[text] assign[=] call[name[text]][constant[#text]]
call[name[LOG].debug, parameter[constant[--------------------------------------------------------------]]]
call[name[LOG].debug, parameter[name[title]]]
call[name[LOG].debug, parameter[constant[--------------------------------------------------------------]]]
call[name[LOG].debug, parameter[name[text]]]
variable[c] assign[=] call[name[find_geo_coords], parameter[name[text]]]
variable[u] assign[=] call[name[wikip_url], parameter[name[title]]]
constant[
m = hashlib.md5()
m.update(u.encode("UTF-8") if hasattr(u, 'encode') else u)
i = base64.urlsafe_b64encode(m.digest()).replace('=', '')
]
return[<ast.IfExp object at 0x7da204621150>] | keyword[def] identifier[geo_filter] ( identifier[d] ):
literal[string]
identifier[page] = identifier[d] [ literal[string] ]
keyword[if] keyword[not] literal[string] keyword[in] identifier[page] :
keyword[return] keyword[None]
identifier[title] = identifier[page] [ literal[string] ]
keyword[if] identifier[skip_article] ( identifier[title] ):
identifier[LOG] . identifier[info] ( literal[string] , identifier[title] )
keyword[return] keyword[None]
identifier[text] = identifier[page] [ literal[string] ][ literal[string] ]
keyword[if] keyword[not] identifier[utils] . identifier[is_str_type] ( identifier[text] ):
keyword[if] literal[string] keyword[in] identifier[text] :
identifier[text] = identifier[text] [ literal[string] ]
keyword[else] :
keyword[return] keyword[None]
identifier[LOG] . identifier[debug] ( literal[string] )
identifier[LOG] . identifier[debug] ( identifier[title] )
identifier[LOG] . identifier[debug] ( literal[string] )
identifier[LOG] . identifier[debug] ( identifier[text] )
identifier[c] = identifier[find_geo_coords] ( identifier[text] )
identifier[u] = identifier[wikip_url] ( identifier[title] )
literal[string]
keyword[return] {
literal[string] : identifier[title] ,
literal[string] : identifier[u] ,
literal[string] : identifier[c] ,
literal[string] : identifier[page] [ literal[string] ]. identifier[get] ( literal[string] )
} keyword[if] identifier[c] keyword[else] keyword[None] | def geo_filter(d):
"""Inspects the given Wikipedia article dict for geo-coordinates.
If no coordinates are found, returns None. Otherwise, returns a new dict
with the title and URL of the original article, along with coordinates."""
page = d['page']
if not 'revision' in page:
return None # depends on [control=['if'], data=[]]
title = page['title']
if skip_article(title):
LOG.info('Skipping low-value article %s', title)
return None # depends on [control=['if'], data=[]]
text = page['revision']['text']
if not utils.is_str_type(text):
if '#text' in text:
text = text['#text'] # depends on [control=['if'], data=['text']]
else:
return None # depends on [control=['if'], data=[]]
LOG.debug('--------------------------------------------------------------')
LOG.debug(title)
LOG.debug('--------------------------------------------------------------')
LOG.debug(text)
c = find_geo_coords(text)
u = wikip_url(title)
'\n m = hashlib.md5()\n m.update(u.encode("UTF-8") if hasattr(u, \'encode\') else u)\n i = base64.urlsafe_b64encode(m.digest()).replace(\'=\', \'\')\n '
#"id": i,
return {'title': title, 'url': u, 'coords': c, 'updated': page['revision'].get('timestamp')} if c else None |
def deregister(self, pin_num=None, direction=None):
"""De-registers callback functions
:param pin_num: The pin number. If None then all functions are de-registered
:type pin_num: int
:param direction: The event direction. If None then all functions for the
given pin are de-registered
:type direction:int
"""
to_delete = []
for i, function_map in enumerate(self.pin_function_maps):
if ( pin_num == None
or ( function_map.pin_num == pin_num
and ( direction == None
or function_map.direction == direction ) ) ):
to_delete.append(i)
for i in reversed(to_delete):
del self.pin_function_maps[i] | def function[deregister, parameter[self, pin_num, direction]]:
constant[De-registers callback functions
:param pin_num: The pin number. If None then all functions are de-registered
:type pin_num: int
:param direction: The event direction. If None then all functions for the
given pin are de-registered
:type direction:int
]
variable[to_delete] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da18fe92110>, <ast.Name object at 0x7da18fe90a30>]]] in starred[call[name[enumerate], parameter[name[self].pin_function_maps]]] begin[:]
if <ast.BoolOp object at 0x7da18fe92f20> begin[:]
call[name[to_delete].append, parameter[name[i]]]
for taget[name[i]] in starred[call[name[reversed], parameter[name[to_delete]]]] begin[:]
<ast.Delete object at 0x7da18fe92b00> | keyword[def] identifier[deregister] ( identifier[self] , identifier[pin_num] = keyword[None] , identifier[direction] = keyword[None] ):
literal[string]
identifier[to_delete] =[]
keyword[for] identifier[i] , identifier[function_map] keyword[in] identifier[enumerate] ( identifier[self] . identifier[pin_function_maps] ):
keyword[if] ( identifier[pin_num] == keyword[None]
keyword[or] ( identifier[function_map] . identifier[pin_num] == identifier[pin_num]
keyword[and] ( identifier[direction] == keyword[None]
keyword[or] identifier[function_map] . identifier[direction] == identifier[direction] ))):
identifier[to_delete] . identifier[append] ( identifier[i] )
keyword[for] identifier[i] keyword[in] identifier[reversed] ( identifier[to_delete] ):
keyword[del] identifier[self] . identifier[pin_function_maps] [ identifier[i] ] | def deregister(self, pin_num=None, direction=None):
"""De-registers callback functions
:param pin_num: The pin number. If None then all functions are de-registered
:type pin_num: int
:param direction: The event direction. If None then all functions for the
given pin are de-registered
:type direction:int
"""
to_delete = []
for (i, function_map) in enumerate(self.pin_function_maps):
if pin_num == None or (function_map.pin_num == pin_num and (direction == None or function_map.direction == direction)):
to_delete.append(i) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
for i in reversed(to_delete):
del self.pin_function_maps[i] # depends on [control=['for'], data=['i']] |
def _wrap_run_cmd(jsonfile, mode='replay'):
"""Wrapper around :func:`run_cmd` for the testing using a record-replay
model
"""
logger = logging.getLogger(__name__)
records = []
counter = 0
json_opts = {'indent': 2, 'separators':(',',': '), 'sort_keys': True}
def run_cmd_record(*args, **kwargs):
response = run_cmd(*args, **kwargs)
records.append({'args': args, 'kwargs': kwargs, 'response': response})
with open(jsonfile, 'w') as out_fh:
json.dump(records, out_fh, **json_opts)
return response
def run_cmd_replay(*args, **kwargs):
record = records.pop(0)
logger.debug("cached run_cmd, args=%s, kwargs=%s"
% (str(args), str(kwargs)) )
assert list(record['args']) == list(args), \
"run_cmd call #%d: Obtained args: '%s'; Expected args: '%s'" \
% (counter+1, str(args), str(record['args']))
assert record['kwargs'] == kwargs, \
"run_cmd call #%d: Obtained kwargs: '%s'; Expected kwargs: '%s'" \
% (counter+1, str(kwargs), str(record['kwargs']))
response = record['response']
if "\n" in response:
if len(response.splitlines()) == 1:
logger.debug("cached response: %s", response)
else:
logger.debug("cached response: ---\n%s\n---", response)
else:
logger.debug("cached response: '%s'", response)
return response
if mode == 'replay':
with open(jsonfile) as in_fh:
records = json.load(in_fh)
return run_cmd_replay
elif mode == 'record':
return run_cmd_record
else:
raise ValueError("Invalid mode") | def function[_wrap_run_cmd, parameter[jsonfile, mode]]:
constant[Wrapper around :func:`run_cmd` for the testing using a record-replay
model
]
variable[logger] assign[=] call[name[logging].getLogger, parameter[name[__name__]]]
variable[records] assign[=] list[[]]
variable[counter] assign[=] constant[0]
variable[json_opts] assign[=] dictionary[[<ast.Constant object at 0x7da1b0ba20b0>, <ast.Constant object at 0x7da1b0ba24d0>, <ast.Constant object at 0x7da1b0ba20e0>], [<ast.Constant object at 0x7da1b0ba3370>, <ast.Tuple object at 0x7da1b0ba0d00>, <ast.Constant object at 0x7da1b0ba2b00>]]
def function[run_cmd_record, parameter[]]:
variable[response] assign[=] call[name[run_cmd], parameter[<ast.Starred object at 0x7da1b0ba1fc0>]]
call[name[records].append, parameter[dictionary[[<ast.Constant object at 0x7da1b0ba0eb0>, <ast.Constant object at 0x7da1b0ba14b0>, <ast.Constant object at 0x7da1b0ba37c0>], [<ast.Name object at 0x7da1b0ba2b90>, <ast.Name object at 0x7da1b0ba1630>, <ast.Name object at 0x7da1b0ba17b0>]]]]
with call[name[open], parameter[name[jsonfile], constant[w]]] begin[:]
call[name[json].dump, parameter[name[records], name[out_fh]]]
return[name[response]]
def function[run_cmd_replay, parameter[]]:
variable[record] assign[=] call[name[records].pop, parameter[constant[0]]]
call[name[logger].debug, parameter[binary_operation[constant[cached run_cmd, args=%s, kwargs=%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b0bd8910>, <ast.Call object at 0x7da1b0bd9660>]]]]]
assert[compare[call[name[list], parameter[call[name[record]][constant[args]]]] equal[==] call[name[list], parameter[name[args]]]]]
assert[compare[call[name[record]][constant[kwargs]] equal[==] name[kwargs]]]
variable[response] assign[=] call[name[record]][constant[response]]
if compare[constant[
] in name[response]] begin[:]
if compare[call[name[len], parameter[call[name[response].splitlines, parameter[]]]] equal[==] constant[1]] begin[:]
call[name[logger].debug, parameter[constant[cached response: %s], name[response]]]
return[name[response]]
if compare[name[mode] equal[==] constant[replay]] begin[:]
with call[name[open], parameter[name[jsonfile]]] begin[:]
variable[records] assign[=] call[name[json].load, parameter[name[in_fh]]]
return[name[run_cmd_replay]] | keyword[def] identifier[_wrap_run_cmd] ( identifier[jsonfile] , identifier[mode] = literal[string] ):
literal[string]
identifier[logger] = identifier[logging] . identifier[getLogger] ( identifier[__name__] )
identifier[records] =[]
identifier[counter] = literal[int]
identifier[json_opts] ={ literal[string] : literal[int] , literal[string] :( literal[string] , literal[string] ), literal[string] : keyword[True] }
keyword[def] identifier[run_cmd_record] (* identifier[args] ,** identifier[kwargs] ):
identifier[response] = identifier[run_cmd] (* identifier[args] ,** identifier[kwargs] )
identifier[records] . identifier[append] ({ literal[string] : identifier[args] , literal[string] : identifier[kwargs] , literal[string] : identifier[response] })
keyword[with] identifier[open] ( identifier[jsonfile] , literal[string] ) keyword[as] identifier[out_fh] :
identifier[json] . identifier[dump] ( identifier[records] , identifier[out_fh] ,** identifier[json_opts] )
keyword[return] identifier[response]
keyword[def] identifier[run_cmd_replay] (* identifier[args] ,** identifier[kwargs] ):
identifier[record] = identifier[records] . identifier[pop] ( literal[int] )
identifier[logger] . identifier[debug] ( literal[string]
%( identifier[str] ( identifier[args] ), identifier[str] ( identifier[kwargs] )))
keyword[assert] identifier[list] ( identifier[record] [ literal[string] ])== identifier[list] ( identifier[args] ), literal[string] %( identifier[counter] + literal[int] , identifier[str] ( identifier[args] ), identifier[str] ( identifier[record] [ literal[string] ]))
keyword[assert] identifier[record] [ literal[string] ]== identifier[kwargs] , literal[string] %( identifier[counter] + literal[int] , identifier[str] ( identifier[kwargs] ), identifier[str] ( identifier[record] [ literal[string] ]))
identifier[response] = identifier[record] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[response] :
keyword[if] identifier[len] ( identifier[response] . identifier[splitlines] ())== literal[int] :
identifier[logger] . identifier[debug] ( literal[string] , identifier[response] )
keyword[else] :
identifier[logger] . identifier[debug] ( literal[string] , identifier[response] )
keyword[else] :
identifier[logger] . identifier[debug] ( literal[string] , identifier[response] )
keyword[return] identifier[response]
keyword[if] identifier[mode] == literal[string] :
keyword[with] identifier[open] ( identifier[jsonfile] ) keyword[as] identifier[in_fh] :
identifier[records] = identifier[json] . identifier[load] ( identifier[in_fh] )
keyword[return] identifier[run_cmd_replay]
keyword[elif] identifier[mode] == literal[string] :
keyword[return] identifier[run_cmd_record]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] ) | def _wrap_run_cmd(jsonfile, mode='replay'):
"""Wrapper around :func:`run_cmd` for the testing using a record-replay
model
"""
logger = logging.getLogger(__name__)
records = []
counter = 0
json_opts = {'indent': 2, 'separators': (',', ': '), 'sort_keys': True}
def run_cmd_record(*args, **kwargs):
response = run_cmd(*args, **kwargs)
records.append({'args': args, 'kwargs': kwargs, 'response': response})
with open(jsonfile, 'w') as out_fh:
json.dump(records, out_fh, **json_opts) # depends on [control=['with'], data=['out_fh']]
return response
def run_cmd_replay(*args, **kwargs):
record = records.pop(0)
logger.debug('cached run_cmd, args=%s, kwargs=%s' % (str(args), str(kwargs)))
assert list(record['args']) == list(args), "run_cmd call #%d: Obtained args: '%s'; Expected args: '%s'" % (counter + 1, str(args), str(record['args']))
assert record['kwargs'] == kwargs, "run_cmd call #%d: Obtained kwargs: '%s'; Expected kwargs: '%s'" % (counter + 1, str(kwargs), str(record['kwargs']))
response = record['response']
if '\n' in response:
if len(response.splitlines()) == 1:
logger.debug('cached response: %s', response) # depends on [control=['if'], data=[]]
else:
logger.debug('cached response: ---\n%s\n---', response) # depends on [control=['if'], data=['response']]
else:
logger.debug("cached response: '%s'", response)
return response
if mode == 'replay':
with open(jsonfile) as in_fh:
records = json.load(in_fh) # depends on [control=['with'], data=['in_fh']]
return run_cmd_replay # depends on [control=['if'], data=[]]
elif mode == 'record':
return run_cmd_record # depends on [control=['if'], data=[]]
else:
raise ValueError('Invalid mode') |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.