code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def getType(self, short=False):
"""Return a string describing the type of the location, i.e. origin, on axis, off axis etc.
::
>>> l = Location()
>>> l.getType()
'origin'
>>> l = Location(pop=1)
>>> l.getType()
'on-axis, pop'
>>> l = Location(pop=1, snap=1)
>>> l.getType()
'off-axis, pop snap'
>>> l = Location(pop=(1,2))
>>> l.getType()
'on-axis, pop, split'
"""
if self.isOrigin():
return "origin"
t = []
onAxis = self.isOnAxis()
if onAxis is False:
if short:
t.append("off-axis")
else:
t.append("off-axis, "+ " ".join(self.getActiveAxes()))
else:
if short:
t.append("on-axis")
else:
t.append("on-axis, %s"%onAxis)
if self.isAmbivalent():
t.append("split")
return ', '.join(t) | def function[getType, parameter[self, short]]:
constant[Return a string describing the type of the location, i.e. origin, on axis, off axis etc.
::
>>> l = Location()
>>> l.getType()
'origin'
>>> l = Location(pop=1)
>>> l.getType()
'on-axis, pop'
>>> l = Location(pop=1, snap=1)
>>> l.getType()
'off-axis, pop snap'
>>> l = Location(pop=(1,2))
>>> l.getType()
'on-axis, pop, split'
]
if call[name[self].isOrigin, parameter[]] begin[:]
return[constant[origin]]
variable[t] assign[=] list[[]]
variable[onAxis] assign[=] call[name[self].isOnAxis, parameter[]]
if compare[name[onAxis] is constant[False]] begin[:]
if name[short] begin[:]
call[name[t].append, parameter[constant[off-axis]]]
if call[name[self].isAmbivalent, parameter[]] begin[:]
call[name[t].append, parameter[constant[split]]]
return[call[constant[, ].join, parameter[name[t]]]] | keyword[def] identifier[getType] ( identifier[self] , identifier[short] = keyword[False] ):
literal[string]
keyword[if] identifier[self] . identifier[isOrigin] ():
keyword[return] literal[string]
identifier[t] =[]
identifier[onAxis] = identifier[self] . identifier[isOnAxis] ()
keyword[if] identifier[onAxis] keyword[is] keyword[False] :
keyword[if] identifier[short] :
identifier[t] . identifier[append] ( literal[string] )
keyword[else] :
identifier[t] . identifier[append] ( literal[string] + literal[string] . identifier[join] ( identifier[self] . identifier[getActiveAxes] ()))
keyword[else] :
keyword[if] identifier[short] :
identifier[t] . identifier[append] ( literal[string] )
keyword[else] :
identifier[t] . identifier[append] ( literal[string] % identifier[onAxis] )
keyword[if] identifier[self] . identifier[isAmbivalent] ():
identifier[t] . identifier[append] ( literal[string] )
keyword[return] literal[string] . identifier[join] ( identifier[t] ) | def getType(self, short=False):
"""Return a string describing the type of the location, i.e. origin, on axis, off axis etc.
::
>>> l = Location()
>>> l.getType()
'origin'
>>> l = Location(pop=1)
>>> l.getType()
'on-axis, pop'
>>> l = Location(pop=1, snap=1)
>>> l.getType()
'off-axis, pop snap'
>>> l = Location(pop=(1,2))
>>> l.getType()
'on-axis, pop, split'
"""
if self.isOrigin():
return 'origin' # depends on [control=['if'], data=[]]
t = []
onAxis = self.isOnAxis()
if onAxis is False:
if short:
t.append('off-axis') # depends on [control=['if'], data=[]]
else:
t.append('off-axis, ' + ' '.join(self.getActiveAxes())) # depends on [control=['if'], data=[]]
elif short:
t.append('on-axis') # depends on [control=['if'], data=[]]
else:
t.append('on-axis, %s' % onAxis)
if self.isAmbivalent():
t.append('split') # depends on [control=['if'], data=[]]
return ', '.join(t) |
def is_dir_or_file(dirname):
'''Checks if a path is an actual directory that exists or a file'''
if not os.path.isdir(dirname) and not os.path.isfile(dirname):
msg = "{0} is not a directory nor a file".format(dirname)
raise argparse.ArgumentTypeError(msg)
else:
return dirname | def function[is_dir_or_file, parameter[dirname]]:
constant[Checks if a path is an actual directory that exists or a file]
if <ast.BoolOp object at 0x7da18f09cfd0> begin[:]
variable[msg] assign[=] call[constant[{0} is not a directory nor a file].format, parameter[name[dirname]]]
<ast.Raise object at 0x7da18f09e680> | keyword[def] identifier[is_dir_or_file] ( identifier[dirname] ):
literal[string]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[dirname] ) keyword[and] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[dirname] ):
identifier[msg] = literal[string] . identifier[format] ( identifier[dirname] )
keyword[raise] identifier[argparse] . identifier[ArgumentTypeError] ( identifier[msg] )
keyword[else] :
keyword[return] identifier[dirname] | def is_dir_or_file(dirname):
"""Checks if a path is an actual directory that exists or a file"""
if not os.path.isdir(dirname) and (not os.path.isfile(dirname)):
msg = '{0} is not a directory nor a file'.format(dirname)
raise argparse.ArgumentTypeError(msg) # depends on [control=['if'], data=[]]
else:
return dirname |
def add_behave_arguments(parser): # noqa
"""
Additional command line arguments extracted directly from behave
"""
# Option strings that conflict with Django
conflicts = [
'--no-color',
'--version',
'-c',
'-k',
'-v',
'-S',
'--simple',
]
parser.add_argument(
'paths',
action='store',
nargs='*',
help="Feature directory, file or file location (FILE:LINE)."
)
for fixed, keywords in behave_options:
keywords = keywords.copy()
# Configfile only entries are ignored
if not fixed:
continue
# Build option strings
option_strings = []
for option in fixed:
# Prefix conflicting option strings with `--behave`
if option in conflicts:
prefix = '--' if option.startswith('--') else '-'
option = option.replace(prefix, '--behave-', 1)
option_strings.append(option)
# config_help isn't a valid keyword for add_argument
if 'config_help' in keywords:
keywords['help'] = keywords['config_help']
del keywords['config_help']
parser.add_argument(*option_strings, **keywords) | def function[add_behave_arguments, parameter[parser]]:
constant[
Additional command line arguments extracted directly from behave
]
variable[conflicts] assign[=] list[[<ast.Constant object at 0x7da1b2344460>, <ast.Constant object at 0x7da1b2346da0>, <ast.Constant object at 0x7da1b2344130>, <ast.Constant object at 0x7da1b2345c60>, <ast.Constant object at 0x7da1b2346f50>, <ast.Constant object at 0x7da1b23450c0>, <ast.Constant object at 0x7da1b2345990>]]
call[name[parser].add_argument, parameter[constant[paths]]]
for taget[tuple[[<ast.Name object at 0x7da1b2347e50>, <ast.Name object at 0x7da1b2346b30>]]] in starred[name[behave_options]] begin[:]
variable[keywords] assign[=] call[name[keywords].copy, parameter[]]
if <ast.UnaryOp object at 0x7da18f58ff10> begin[:]
continue
variable[option_strings] assign[=] list[[]]
for taget[name[option]] in starred[name[fixed]] begin[:]
if compare[name[option] in name[conflicts]] begin[:]
variable[prefix] assign[=] <ast.IfExp object at 0x7da18f58da50>
variable[option] assign[=] call[name[option].replace, parameter[name[prefix], constant[--behave-], constant[1]]]
call[name[option_strings].append, parameter[name[option]]]
if compare[constant[config_help] in name[keywords]] begin[:]
call[name[keywords]][constant[help]] assign[=] call[name[keywords]][constant[config_help]]
<ast.Delete object at 0x7da1b2345e70>
call[name[parser].add_argument, parameter[<ast.Starred object at 0x7da1b23444f0>]] | keyword[def] identifier[add_behave_arguments] ( identifier[parser] ):
literal[string]
identifier[conflicts] =[
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
]
identifier[parser] . identifier[add_argument] (
literal[string] ,
identifier[action] = literal[string] ,
identifier[nargs] = literal[string] ,
identifier[help] = literal[string]
)
keyword[for] identifier[fixed] , identifier[keywords] keyword[in] identifier[behave_options] :
identifier[keywords] = identifier[keywords] . identifier[copy] ()
keyword[if] keyword[not] identifier[fixed] :
keyword[continue]
identifier[option_strings] =[]
keyword[for] identifier[option] keyword[in] identifier[fixed] :
keyword[if] identifier[option] keyword[in] identifier[conflicts] :
identifier[prefix] = literal[string] keyword[if] identifier[option] . identifier[startswith] ( literal[string] ) keyword[else] literal[string]
identifier[option] = identifier[option] . identifier[replace] ( identifier[prefix] , literal[string] , literal[int] )
identifier[option_strings] . identifier[append] ( identifier[option] )
keyword[if] literal[string] keyword[in] identifier[keywords] :
identifier[keywords] [ literal[string] ]= identifier[keywords] [ literal[string] ]
keyword[del] identifier[keywords] [ literal[string] ]
identifier[parser] . identifier[add_argument] (* identifier[option_strings] ,** identifier[keywords] ) | def add_behave_arguments(parser): # noqa
'\n Additional command line arguments extracted directly from behave\n '
# Option strings that conflict with Django
conflicts = ['--no-color', '--version', '-c', '-k', '-v', '-S', '--simple']
parser.add_argument('paths', action='store', nargs='*', help='Feature directory, file or file location (FILE:LINE).')
for (fixed, keywords) in behave_options:
keywords = keywords.copy()
# Configfile only entries are ignored
if not fixed:
continue # depends on [control=['if'], data=[]]
# Build option strings
option_strings = []
for option in fixed:
# Prefix conflicting option strings with `--behave`
if option in conflicts:
prefix = '--' if option.startswith('--') else '-'
option = option.replace(prefix, '--behave-', 1) # depends on [control=['if'], data=['option']]
option_strings.append(option) # depends on [control=['for'], data=['option']]
# config_help isn't a valid keyword for add_argument
if 'config_help' in keywords:
keywords['help'] = keywords['config_help']
del keywords['config_help'] # depends on [control=['if'], data=['keywords']]
parser.add_argument(*option_strings, **keywords) # depends on [control=['for'], data=[]] |
def score(text, *score_functions):
"""Score ``text`` using ``score_functions``.
Examples:
>>> score("abc", function_a)
>>> score("abc", function_a, function_b)
Args:
text (str): The text to score
*score_functions (variable length argument list): functions to score with
Returns:
Arithmetic mean of scores
Raises:
ValueError: If score_functions is empty
"""
if not score_functions:
raise ValueError("score_functions must not be empty")
return statistics.mean(func(text) for func in score_functions) | def function[score, parameter[text]]:
constant[Score ``text`` using ``score_functions``.
Examples:
>>> score("abc", function_a)
>>> score("abc", function_a, function_b)
Args:
text (str): The text to score
*score_functions (variable length argument list): functions to score with
Returns:
Arithmetic mean of scores
Raises:
ValueError: If score_functions is empty
]
if <ast.UnaryOp object at 0x7da1b2525600> begin[:]
<ast.Raise object at 0x7da1b2525810>
return[call[name[statistics].mean, parameter[<ast.GeneratorExp object at 0x7da1b2527670>]]] | keyword[def] identifier[score] ( identifier[text] ,* identifier[score_functions] ):
literal[string]
keyword[if] keyword[not] identifier[score_functions] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] identifier[statistics] . identifier[mean] ( identifier[func] ( identifier[text] ) keyword[for] identifier[func] keyword[in] identifier[score_functions] ) | def score(text, *score_functions):
"""Score ``text`` using ``score_functions``.
Examples:
>>> score("abc", function_a)
>>> score("abc", function_a, function_b)
Args:
text (str): The text to score
*score_functions (variable length argument list): functions to score with
Returns:
Arithmetic mean of scores
Raises:
ValueError: If score_functions is empty
"""
if not score_functions:
raise ValueError('score_functions must not be empty') # depends on [control=['if'], data=[]]
return statistics.mean((func(text) for func in score_functions)) |
def depth_august_average_ground_temperature(self, value=None):
"""Corresponds to IDD Field `depth_august_average_ground_temperature`
Args:
value (float): value for IDD Field `depth_august_average_ground_temperature`
Unit: C
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
"""
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError(
'value {} need to be of type float '
'for field `depth_august_average_ground_temperature`'.format(value))
self._depth_august_average_ground_temperature = value | def function[depth_august_average_ground_temperature, parameter[self, value]]:
constant[Corresponds to IDD Field `depth_august_average_ground_temperature`
Args:
value (float): value for IDD Field `depth_august_average_ground_temperature`
Unit: C
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
]
if compare[name[value] is_not constant[None]] begin[:]
<ast.Try object at 0x7da1b0ff8130>
name[self]._depth_august_average_ground_temperature assign[=] name[value] | keyword[def] identifier[depth_august_average_ground_temperature] ( identifier[self] , identifier[value] = keyword[None] ):
literal[string]
keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] :
keyword[try] :
identifier[value] = identifier[float] ( identifier[value] )
keyword[except] identifier[ValueError] :
keyword[raise] identifier[ValueError] (
literal[string]
literal[string] . identifier[format] ( identifier[value] ))
identifier[self] . identifier[_depth_august_average_ground_temperature] = identifier[value] | def depth_august_average_ground_temperature(self, value=None):
"""Corresponds to IDD Field `depth_august_average_ground_temperature`
Args:
value (float): value for IDD Field `depth_august_average_ground_temperature`
Unit: C
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
"""
if value is not None:
try:
value = float(value) # depends on [control=['try'], data=[]]
except ValueError:
raise ValueError('value {} need to be of type float for field `depth_august_average_ground_temperature`'.format(value)) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['value']]
self._depth_august_average_ground_temperature = value |
def get_available_images(request, project_id=None, images_cache=None):
"""Returns a list of available images
Returns a list of images that are public, shared, community or owned by
the given project_id. If project_id is not specified, only public and
community images are returned.
:param images_cache: An optional dict-like object in which to
cache public and per-project id image metadata.
"""
if images_cache is None:
images_cache = {}
public_images = images_cache.get('public_images', [])
community_images = images_cache.get('community_images', [])
images_by_project = images_cache.get('images_by_project', {})
shared_images = images_cache.get('shared_images', [])
if 'public_images' not in images_cache:
public = {"is_public": True,
"status": "active"}
try:
images, _more, _prev = glance.image_list_detailed(
request, filters=public)
public_images += images
images_cache['public_images'] = public_images
except Exception:
exceptions.handle(request,
_("Unable to retrieve public images."))
# Preempt if we don't have a project_id yet.
if project_id is None:
images_by_project[project_id] = []
if project_id not in images_by_project:
owner = {"property-owner_id": project_id,
"status": "active"}
try:
owned_images, _more, _prev = glance.image_list_detailed(
request, filters=owner)
images_by_project[project_id] = owned_images
except Exception:
owned_images = []
exceptions.handle(request,
_("Unable to retrieve images for "
"the current project."))
else:
owned_images = images_by_project[project_id]
if 'community_images' not in images_cache:
community = {"visibility": "community",
"status": "active"}
try:
images, _more, _prev = glance.image_list_detailed(
request, filters=community)
community_images += images
images_cache['community_images'] = community_images
except Exception:
exceptions.handle(request,
_("Unable to retrieve community images."))
if 'shared_images' not in images_cache:
shared = {"visibility": "shared",
"status": "active"}
try:
shared_images, _more, _prev = \
glance.image_list_detailed(request, filters=shared)
images_cache['shared_images'] = shared_images
except Exception:
exceptions.handle(request,
_("Unable to retrieve shared images."))
if 'images_by_project' not in images_cache:
images_cache['images_by_project'] = images_by_project
images = owned_images + public_images + community_images + shared_images
image_ids = []
final_images = []
for image in images:
if image.id not in image_ids and \
image.container_format not in ('aki', 'ari'):
image_ids.append(image.id)
final_images.append(image)
return final_images | def function[get_available_images, parameter[request, project_id, images_cache]]:
constant[Returns a list of available images
Returns a list of images that are public, shared, community or owned by
the given project_id. If project_id is not specified, only public and
community images are returned.
:param images_cache: An optional dict-like object in which to
cache public and per-project id image metadata.
]
if compare[name[images_cache] is constant[None]] begin[:]
variable[images_cache] assign[=] dictionary[[], []]
variable[public_images] assign[=] call[name[images_cache].get, parameter[constant[public_images], list[[]]]]
variable[community_images] assign[=] call[name[images_cache].get, parameter[constant[community_images], list[[]]]]
variable[images_by_project] assign[=] call[name[images_cache].get, parameter[constant[images_by_project], dictionary[[], []]]]
variable[shared_images] assign[=] call[name[images_cache].get, parameter[constant[shared_images], list[[]]]]
if compare[constant[public_images] <ast.NotIn object at 0x7da2590d7190> name[images_cache]] begin[:]
variable[public] assign[=] dictionary[[<ast.Constant object at 0x7da1b1950e80>, <ast.Constant object at 0x7da1b1950310>], [<ast.Constant object at 0x7da1b1950ac0>, <ast.Constant object at 0x7da1b1950280>]]
<ast.Try object at 0x7da1b19505e0>
if compare[name[project_id] is constant[None]] begin[:]
call[name[images_by_project]][name[project_id]] assign[=] list[[]]
if compare[name[project_id] <ast.NotIn object at 0x7da2590d7190> name[images_by_project]] begin[:]
variable[owner] assign[=] dictionary[[<ast.Constant object at 0x7da1b18a0ca0>, <ast.Constant object at 0x7da1b18a1cc0>], [<ast.Name object at 0x7da1b18a2dd0>, <ast.Constant object at 0x7da1b18a1f90>]]
<ast.Try object at 0x7da1b18a3d00>
if compare[constant[community_images] <ast.NotIn object at 0x7da2590d7190> name[images_cache]] begin[:]
variable[community] assign[=] dictionary[[<ast.Constant object at 0x7da1b18a0e50>, <ast.Constant object at 0x7da1b18a1db0>], [<ast.Constant object at 0x7da1b18a16f0>, <ast.Constant object at 0x7da1b18a0760>]]
<ast.Try object at 0x7da1b18a3280>
if compare[constant[shared_images] <ast.NotIn object at 0x7da2590d7190> name[images_cache]] begin[:]
variable[shared] assign[=] dictionary[[<ast.Constant object at 0x7da1b18a21a0>, <ast.Constant object at 0x7da1b18a3550>], [<ast.Constant object at 0x7da1b18a3eb0>, <ast.Constant object at 0x7da1b18a0a90>]]
<ast.Try object at 0x7da1b18a14e0>
if compare[constant[images_by_project] <ast.NotIn object at 0x7da2590d7190> name[images_cache]] begin[:]
call[name[images_cache]][constant[images_by_project]] assign[=] name[images_by_project]
variable[images] assign[=] binary_operation[binary_operation[binary_operation[name[owned_images] + name[public_images]] + name[community_images]] + name[shared_images]]
variable[image_ids] assign[=] list[[]]
variable[final_images] assign[=] list[[]]
for taget[name[image]] in starred[name[images]] begin[:]
if <ast.BoolOp object at 0x7da1b19dbc10> begin[:]
call[name[image_ids].append, parameter[name[image].id]]
call[name[final_images].append, parameter[name[image]]]
return[name[final_images]] | keyword[def] identifier[get_available_images] ( identifier[request] , identifier[project_id] = keyword[None] , identifier[images_cache] = keyword[None] ):
literal[string]
keyword[if] identifier[images_cache] keyword[is] keyword[None] :
identifier[images_cache] ={}
identifier[public_images] = identifier[images_cache] . identifier[get] ( literal[string] ,[])
identifier[community_images] = identifier[images_cache] . identifier[get] ( literal[string] ,[])
identifier[images_by_project] = identifier[images_cache] . identifier[get] ( literal[string] ,{})
identifier[shared_images] = identifier[images_cache] . identifier[get] ( literal[string] ,[])
keyword[if] literal[string] keyword[not] keyword[in] identifier[images_cache] :
identifier[public] ={ literal[string] : keyword[True] ,
literal[string] : literal[string] }
keyword[try] :
identifier[images] , identifier[_more] , identifier[_prev] = identifier[glance] . identifier[image_list_detailed] (
identifier[request] , identifier[filters] = identifier[public] )
identifier[public_images] += identifier[images]
identifier[images_cache] [ literal[string] ]= identifier[public_images]
keyword[except] identifier[Exception] :
identifier[exceptions] . identifier[handle] ( identifier[request] ,
identifier[_] ( literal[string] ))
keyword[if] identifier[project_id] keyword[is] keyword[None] :
identifier[images_by_project] [ identifier[project_id] ]=[]
keyword[if] identifier[project_id] keyword[not] keyword[in] identifier[images_by_project] :
identifier[owner] ={ literal[string] : identifier[project_id] ,
literal[string] : literal[string] }
keyword[try] :
identifier[owned_images] , identifier[_more] , identifier[_prev] = identifier[glance] . identifier[image_list_detailed] (
identifier[request] , identifier[filters] = identifier[owner] )
identifier[images_by_project] [ identifier[project_id] ]= identifier[owned_images]
keyword[except] identifier[Exception] :
identifier[owned_images] =[]
identifier[exceptions] . identifier[handle] ( identifier[request] ,
identifier[_] ( literal[string]
literal[string] ))
keyword[else] :
identifier[owned_images] = identifier[images_by_project] [ identifier[project_id] ]
keyword[if] literal[string] keyword[not] keyword[in] identifier[images_cache] :
identifier[community] ={ literal[string] : literal[string] ,
literal[string] : literal[string] }
keyword[try] :
identifier[images] , identifier[_more] , identifier[_prev] = identifier[glance] . identifier[image_list_detailed] (
identifier[request] , identifier[filters] = identifier[community] )
identifier[community_images] += identifier[images]
identifier[images_cache] [ literal[string] ]= identifier[community_images]
keyword[except] identifier[Exception] :
identifier[exceptions] . identifier[handle] ( identifier[request] ,
identifier[_] ( literal[string] ))
keyword[if] literal[string] keyword[not] keyword[in] identifier[images_cache] :
identifier[shared] ={ literal[string] : literal[string] ,
literal[string] : literal[string] }
keyword[try] :
identifier[shared_images] , identifier[_more] , identifier[_prev] = identifier[glance] . identifier[image_list_detailed] ( identifier[request] , identifier[filters] = identifier[shared] )
identifier[images_cache] [ literal[string] ]= identifier[shared_images]
keyword[except] identifier[Exception] :
identifier[exceptions] . identifier[handle] ( identifier[request] ,
identifier[_] ( literal[string] ))
keyword[if] literal[string] keyword[not] keyword[in] identifier[images_cache] :
identifier[images_cache] [ literal[string] ]= identifier[images_by_project]
identifier[images] = identifier[owned_images] + identifier[public_images] + identifier[community_images] + identifier[shared_images]
identifier[image_ids] =[]
identifier[final_images] =[]
keyword[for] identifier[image] keyword[in] identifier[images] :
keyword[if] identifier[image] . identifier[id] keyword[not] keyword[in] identifier[image_ids] keyword[and] identifier[image] . identifier[container_format] keyword[not] keyword[in] ( literal[string] , literal[string] ):
identifier[image_ids] . identifier[append] ( identifier[image] . identifier[id] )
identifier[final_images] . identifier[append] ( identifier[image] )
keyword[return] identifier[final_images] | def get_available_images(request, project_id=None, images_cache=None):
"""Returns a list of available images
Returns a list of images that are public, shared, community or owned by
the given project_id. If project_id is not specified, only public and
community images are returned.
:param images_cache: An optional dict-like object in which to
cache public and per-project id image metadata.
"""
if images_cache is None:
images_cache = {} # depends on [control=['if'], data=['images_cache']]
public_images = images_cache.get('public_images', [])
community_images = images_cache.get('community_images', [])
images_by_project = images_cache.get('images_by_project', {})
shared_images = images_cache.get('shared_images', [])
if 'public_images' not in images_cache:
public = {'is_public': True, 'status': 'active'}
try:
(images, _more, _prev) = glance.image_list_detailed(request, filters=public)
public_images += images
images_cache['public_images'] = public_images # depends on [control=['try'], data=[]]
except Exception:
exceptions.handle(request, _('Unable to retrieve public images.')) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['images_cache']]
# Preempt if we don't have a project_id yet.
if project_id is None:
images_by_project[project_id] = [] # depends on [control=['if'], data=['project_id']]
if project_id not in images_by_project:
owner = {'property-owner_id': project_id, 'status': 'active'}
try:
(owned_images, _more, _prev) = glance.image_list_detailed(request, filters=owner)
images_by_project[project_id] = owned_images # depends on [control=['try'], data=[]]
except Exception:
owned_images = []
exceptions.handle(request, _('Unable to retrieve images for the current project.')) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['project_id', 'images_by_project']]
else:
owned_images = images_by_project[project_id]
if 'community_images' not in images_cache:
community = {'visibility': 'community', 'status': 'active'}
try:
(images, _more, _prev) = glance.image_list_detailed(request, filters=community)
community_images += images
images_cache['community_images'] = community_images # depends on [control=['try'], data=[]]
except Exception:
exceptions.handle(request, _('Unable to retrieve community images.')) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['images_cache']]
if 'shared_images' not in images_cache:
shared = {'visibility': 'shared', 'status': 'active'}
try:
(shared_images, _more, _prev) = glance.image_list_detailed(request, filters=shared)
images_cache['shared_images'] = shared_images # depends on [control=['try'], data=[]]
except Exception:
exceptions.handle(request, _('Unable to retrieve shared images.')) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['images_cache']]
if 'images_by_project' not in images_cache:
images_cache['images_by_project'] = images_by_project # depends on [control=['if'], data=['images_cache']]
images = owned_images + public_images + community_images + shared_images
image_ids = []
final_images = []
for image in images:
if image.id not in image_ids and image.container_format not in ('aki', 'ari'):
image_ids.append(image.id)
final_images.append(image) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['image']]
return final_images |
def get_version(version):
"""
Returns a PEP 440-compliant version number from VERSION.
Created by modifying django.utils.version.get_version
"""
# Now build the two parts of the version number:
# major = X.Y[.Z]
# sub = .devN - for development releases
# | {a|b|rc}N - for alpha, beta and rc releases
# | .postN - for post-release releases
assert len(version) == 5
version_parts = version[:2] if version[2] == 0 else version[:3]
# Build the first part of the version
major = '.'.join(str(x) for x in version_parts)
# Just return it if this is a final release version
if version[3] == 'final':
return major
# Add the rest
sub = ''.join(str(x) for x in version[3:5])
if version[3] == 'dev':
# Override the sub part. Add in a timestamp
timestamp = get_git_changeset()
sub = 'dev%s' % (timestamp if timestamp else version[4])
return '%s.%s' % (major, sub)
if version[3] == 'post':
# We need a dot for post
return '%s.%s' % (major, sub)
elif version[3] in ('a', 'b', 'rc'):
# No dot for these
return '%s%s' % (major, sub)
else:
raise ValueError('Invalid version: %s' % str(version)) | def function[get_version, parameter[version]]:
constant[
Returns a PEP 440-compliant version number from VERSION.
Created by modifying django.utils.version.get_version
]
assert[compare[call[name[len], parameter[name[version]]] equal[==] constant[5]]]
variable[version_parts] assign[=] <ast.IfExp object at 0x7da20e9575b0>
variable[major] assign[=] call[constant[.].join, parameter[<ast.GeneratorExp object at 0x7da20e955330>]]
if compare[call[name[version]][constant[3]] equal[==] constant[final]] begin[:]
return[name[major]]
variable[sub] assign[=] call[constant[].join, parameter[<ast.GeneratorExp object at 0x7da20e956aa0>]]
if compare[call[name[version]][constant[3]] equal[==] constant[dev]] begin[:]
variable[timestamp] assign[=] call[name[get_git_changeset], parameter[]]
variable[sub] assign[=] binary_operation[constant[dev%s] <ast.Mod object at 0x7da2590d6920> <ast.IfExp object at 0x7da20e957b80>]
return[binary_operation[constant[%s.%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20e954c40>, <ast.Name object at 0x7da20e9566b0>]]]]
if compare[call[name[version]][constant[3]] equal[==] constant[post]] begin[:]
return[binary_operation[constant[%s.%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20e954430>, <ast.Name object at 0x7da20e955510>]]]] | keyword[def] identifier[get_version] ( identifier[version] ):
literal[string]
keyword[assert] identifier[len] ( identifier[version] )== literal[int]
identifier[version_parts] = identifier[version] [: literal[int] ] keyword[if] identifier[version] [ literal[int] ]== literal[int] keyword[else] identifier[version] [: literal[int] ]
identifier[major] = literal[string] . identifier[join] ( identifier[str] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[version_parts] )
keyword[if] identifier[version] [ literal[int] ]== literal[string] :
keyword[return] identifier[major]
identifier[sub] = literal[string] . identifier[join] ( identifier[str] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[version] [ literal[int] : literal[int] ])
keyword[if] identifier[version] [ literal[int] ]== literal[string] :
identifier[timestamp] = identifier[get_git_changeset] ()
identifier[sub] = literal[string] %( identifier[timestamp] keyword[if] identifier[timestamp] keyword[else] identifier[version] [ literal[int] ])
keyword[return] literal[string] %( identifier[major] , identifier[sub] )
keyword[if] identifier[version] [ literal[int] ]== literal[string] :
keyword[return] literal[string] %( identifier[major] , identifier[sub] )
keyword[elif] identifier[version] [ literal[int] ] keyword[in] ( literal[string] , literal[string] , literal[string] ):
keyword[return] literal[string] %( identifier[major] , identifier[sub] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[str] ( identifier[version] )) | def get_version(version):
"""
Returns a PEP 440-compliant version number from VERSION.
Created by modifying django.utils.version.get_version
"""
# Now build the two parts of the version number:
# major = X.Y[.Z]
# sub = .devN - for development releases
# | {a|b|rc}N - for alpha, beta and rc releases
# | .postN - for post-release releases
assert len(version) == 5
version_parts = version[:2] if version[2] == 0 else version[:3]
# Build the first part of the version
major = '.'.join((str(x) for x in version_parts))
# Just return it if this is a final release version
if version[3] == 'final':
return major # depends on [control=['if'], data=[]]
# Add the rest
sub = ''.join((str(x) for x in version[3:5]))
if version[3] == 'dev':
# Override the sub part. Add in a timestamp
timestamp = get_git_changeset()
sub = 'dev%s' % (timestamp if timestamp else version[4])
return '%s.%s' % (major, sub) # depends on [control=['if'], data=[]]
if version[3] == 'post':
# We need a dot for post
return '%s.%s' % (major, sub) # depends on [control=['if'], data=[]]
elif version[3] in ('a', 'b', 'rc'):
# No dot for these
return '%s%s' % (major, sub) # depends on [control=['if'], data=[]]
else:
raise ValueError('Invalid version: %s' % str(version)) |
def mass_mailing_recipients():
"""
Returns iterable of all mass email recipients.
Default behavior will be to return list of all active users' emails.
This can be changed by providing callback in settings return some other list of users,
when user emails are stored in many, non default models.
To accomplish that add constant MASS_EMAIL_RECIPIENTS to settings. It should contain path to function, e.g.
>>> MASS_EMAIL_RECIPIENTS = 'emailtemplates.helpers.mass_mailing_recipients'
:rtype iterable
"""
if hasattr(settings, 'MASS_EMAIL_RECIPIENTS'):
callback_name = settings.MASS_EMAIL_RECIPIENTS.split('.')
module_name = '.'.join(callback_name[:-1])
func_name = callback_name[-1]
module = import_module(module_name)
func = getattr(module, func_name, lambda: [])
return func()
User = get_user_model()
if hasattr(User, 'is_active') and hasattr(User, 'email'):
filtered_users = User.objects.filter(is_active=True).exclude(email__isnull=True).exclude(email__exact='')
return filtered_users.values_list('email', flat=True).distinct()
return [] | def function[mass_mailing_recipients, parameter[]]:
constant[
Returns iterable of all mass email recipients.
Default behavior will be to return list of all active users' emails.
This can be changed by providing callback in settings return some other list of users,
when user emails are stored in many, non default models.
To accomplish that add constant MASS_EMAIL_RECIPIENTS to settings. It should contain path to function, e.g.
>>> MASS_EMAIL_RECIPIENTS = 'emailtemplates.helpers.mass_mailing_recipients'
:rtype iterable
]
if call[name[hasattr], parameter[name[settings], constant[MASS_EMAIL_RECIPIENTS]]] begin[:]
variable[callback_name] assign[=] call[name[settings].MASS_EMAIL_RECIPIENTS.split, parameter[constant[.]]]
variable[module_name] assign[=] call[constant[.].join, parameter[call[name[callback_name]][<ast.Slice object at 0x7da1b1970790>]]]
variable[func_name] assign[=] call[name[callback_name]][<ast.UnaryOp object at 0x7da1b1970bb0>]
variable[module] assign[=] call[name[import_module], parameter[name[module_name]]]
variable[func] assign[=] call[name[getattr], parameter[name[module], name[func_name], <ast.Lambda object at 0x7da1b1972440>]]
return[call[name[func], parameter[]]]
variable[User] assign[=] call[name[get_user_model], parameter[]]
if <ast.BoolOp object at 0x7da1b19405e0> begin[:]
variable[filtered_users] assign[=] call[call[call[name[User].objects.filter, parameter[]].exclude, parameter[]].exclude, parameter[]]
return[call[call[name[filtered_users].values_list, parameter[constant[email]]].distinct, parameter[]]]
return[list[[]]] | keyword[def] identifier[mass_mailing_recipients] ():
literal[string]
keyword[if] identifier[hasattr] ( identifier[settings] , literal[string] ):
identifier[callback_name] = identifier[settings] . identifier[MASS_EMAIL_RECIPIENTS] . identifier[split] ( literal[string] )
identifier[module_name] = literal[string] . identifier[join] ( identifier[callback_name] [:- literal[int] ])
identifier[func_name] = identifier[callback_name] [- literal[int] ]
identifier[module] = identifier[import_module] ( identifier[module_name] )
identifier[func] = identifier[getattr] ( identifier[module] , identifier[func_name] , keyword[lambda] :[])
keyword[return] identifier[func] ()
identifier[User] = identifier[get_user_model] ()
keyword[if] identifier[hasattr] ( identifier[User] , literal[string] ) keyword[and] identifier[hasattr] ( identifier[User] , literal[string] ):
identifier[filtered_users] = identifier[User] . identifier[objects] . identifier[filter] ( identifier[is_active] = keyword[True] ). identifier[exclude] ( identifier[email__isnull] = keyword[True] ). identifier[exclude] ( identifier[email__exact] = literal[string] )
keyword[return] identifier[filtered_users] . identifier[values_list] ( literal[string] , identifier[flat] = keyword[True] ). identifier[distinct] ()
keyword[return] [] | def mass_mailing_recipients():
"""
Returns iterable of all mass email recipients.
Default behavior will be to return list of all active users' emails.
This can be changed by providing callback in settings return some other list of users,
when user emails are stored in many, non default models.
To accomplish that add constant MASS_EMAIL_RECIPIENTS to settings. It should contain path to function, e.g.
>>> MASS_EMAIL_RECIPIENTS = 'emailtemplates.helpers.mass_mailing_recipients'
:rtype iterable
"""
if hasattr(settings, 'MASS_EMAIL_RECIPIENTS'):
callback_name = settings.MASS_EMAIL_RECIPIENTS.split('.')
module_name = '.'.join(callback_name[:-1])
func_name = callback_name[-1]
module = import_module(module_name)
func = getattr(module, func_name, lambda : [])
return func() # depends on [control=['if'], data=[]]
User = get_user_model()
if hasattr(User, 'is_active') and hasattr(User, 'email'):
filtered_users = User.objects.filter(is_active=True).exclude(email__isnull=True).exclude(email__exact='')
return filtered_users.values_list('email', flat=True).distinct() # depends on [control=['if'], data=[]]
return [] |
def process_raw_data(cls, raw_data):
"""Create a new model using raw API response."""
properties = raw_data.get("properties", {})
raw_content = properties.get("ipSecConfiguration", None)
if raw_content is not None:
ip_sec = IPSecConfiguration.from_raw_data(raw_content)
properties["ipSecConfiguration"] = ip_sec
ip_addresses = []
for raw_content in properties.get("ipAddresses", []):
ip_addresses.append(IPAddress.from_raw_data(raw_content))
properties["ipAddresses"] = ip_addresses
routes = []
for raw_content in properties.get("routes", []):
routes.append(NetworkInterfaceRoute.from_raw_data(raw_content))
properties["routes"] = routes
raw_content = properties.get("statistics", None)
if raw_content is not None:
statistics = NetworkInterfaceStatistics.from_raw_data(
raw_content)
properties["statistics"] = statistics
raw_content = properties.get("greConfiguration", None)
if raw_content is not None:
gre_configuration = GREConfiguration.from_raw_data(raw_content)
properties["greConfiguration"] = gre_configuration
raw_content = properties.get("l3Configuration", None)
if raw_content is not None:
l3_configuration = L3Configuration.from_raw_data(raw_content)
properties["l3Configuration"] = l3_configuration
raw_content = properties.get("gateway", None)
if raw_content is not None:
gateway = Resource.from_raw_data(raw_content)
properties["gateway"] = gateway
return super(NetworkConnections, cls).process_raw_data(raw_data) | def function[process_raw_data, parameter[cls, raw_data]]:
constant[Create a new model using raw API response.]
variable[properties] assign[=] call[name[raw_data].get, parameter[constant[properties], dictionary[[], []]]]
variable[raw_content] assign[=] call[name[properties].get, parameter[constant[ipSecConfiguration], constant[None]]]
if compare[name[raw_content] is_not constant[None]] begin[:]
variable[ip_sec] assign[=] call[name[IPSecConfiguration].from_raw_data, parameter[name[raw_content]]]
call[name[properties]][constant[ipSecConfiguration]] assign[=] name[ip_sec]
variable[ip_addresses] assign[=] list[[]]
for taget[name[raw_content]] in starred[call[name[properties].get, parameter[constant[ipAddresses], list[[]]]]] begin[:]
call[name[ip_addresses].append, parameter[call[name[IPAddress].from_raw_data, parameter[name[raw_content]]]]]
call[name[properties]][constant[ipAddresses]] assign[=] name[ip_addresses]
variable[routes] assign[=] list[[]]
for taget[name[raw_content]] in starred[call[name[properties].get, parameter[constant[routes], list[[]]]]] begin[:]
call[name[routes].append, parameter[call[name[NetworkInterfaceRoute].from_raw_data, parameter[name[raw_content]]]]]
call[name[properties]][constant[routes]] assign[=] name[routes]
variable[raw_content] assign[=] call[name[properties].get, parameter[constant[statistics], constant[None]]]
if compare[name[raw_content] is_not constant[None]] begin[:]
variable[statistics] assign[=] call[name[NetworkInterfaceStatistics].from_raw_data, parameter[name[raw_content]]]
call[name[properties]][constant[statistics]] assign[=] name[statistics]
variable[raw_content] assign[=] call[name[properties].get, parameter[constant[greConfiguration], constant[None]]]
if compare[name[raw_content] is_not constant[None]] begin[:]
variable[gre_configuration] assign[=] call[name[GREConfiguration].from_raw_data, parameter[name[raw_content]]]
call[name[properties]][constant[greConfiguration]] assign[=] name[gre_configuration]
variable[raw_content] assign[=] call[name[properties].get, parameter[constant[l3Configuration], constant[None]]]
if compare[name[raw_content] is_not constant[None]] begin[:]
variable[l3_configuration] assign[=] call[name[L3Configuration].from_raw_data, parameter[name[raw_content]]]
call[name[properties]][constant[l3Configuration]] assign[=] name[l3_configuration]
variable[raw_content] assign[=] call[name[properties].get, parameter[constant[gateway], constant[None]]]
if compare[name[raw_content] is_not constant[None]] begin[:]
variable[gateway] assign[=] call[name[Resource].from_raw_data, parameter[name[raw_content]]]
call[name[properties]][constant[gateway]] assign[=] name[gateway]
return[call[call[name[super], parameter[name[NetworkConnections], name[cls]]].process_raw_data, parameter[name[raw_data]]]] | keyword[def] identifier[process_raw_data] ( identifier[cls] , identifier[raw_data] ):
literal[string]
identifier[properties] = identifier[raw_data] . identifier[get] ( literal[string] ,{})
identifier[raw_content] = identifier[properties] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] identifier[raw_content] keyword[is] keyword[not] keyword[None] :
identifier[ip_sec] = identifier[IPSecConfiguration] . identifier[from_raw_data] ( identifier[raw_content] )
identifier[properties] [ literal[string] ]= identifier[ip_sec]
identifier[ip_addresses] =[]
keyword[for] identifier[raw_content] keyword[in] identifier[properties] . identifier[get] ( literal[string] ,[]):
identifier[ip_addresses] . identifier[append] ( identifier[IPAddress] . identifier[from_raw_data] ( identifier[raw_content] ))
identifier[properties] [ literal[string] ]= identifier[ip_addresses]
identifier[routes] =[]
keyword[for] identifier[raw_content] keyword[in] identifier[properties] . identifier[get] ( literal[string] ,[]):
identifier[routes] . identifier[append] ( identifier[NetworkInterfaceRoute] . identifier[from_raw_data] ( identifier[raw_content] ))
identifier[properties] [ literal[string] ]= identifier[routes]
identifier[raw_content] = identifier[properties] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] identifier[raw_content] keyword[is] keyword[not] keyword[None] :
identifier[statistics] = identifier[NetworkInterfaceStatistics] . identifier[from_raw_data] (
identifier[raw_content] )
identifier[properties] [ literal[string] ]= identifier[statistics]
identifier[raw_content] = identifier[properties] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] identifier[raw_content] keyword[is] keyword[not] keyword[None] :
identifier[gre_configuration] = identifier[GREConfiguration] . identifier[from_raw_data] ( identifier[raw_content] )
identifier[properties] [ literal[string] ]= identifier[gre_configuration]
identifier[raw_content] = identifier[properties] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] identifier[raw_content] keyword[is] keyword[not] keyword[None] :
identifier[l3_configuration] = identifier[L3Configuration] . identifier[from_raw_data] ( identifier[raw_content] )
identifier[properties] [ literal[string] ]= identifier[l3_configuration]
identifier[raw_content] = identifier[properties] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] identifier[raw_content] keyword[is] keyword[not] keyword[None] :
identifier[gateway] = identifier[Resource] . identifier[from_raw_data] ( identifier[raw_content] )
identifier[properties] [ literal[string] ]= identifier[gateway]
keyword[return] identifier[super] ( identifier[NetworkConnections] , identifier[cls] ). identifier[process_raw_data] ( identifier[raw_data] ) | def process_raw_data(cls, raw_data):
"""Create a new model using raw API response."""
properties = raw_data.get('properties', {})
raw_content = properties.get('ipSecConfiguration', None)
if raw_content is not None:
ip_sec = IPSecConfiguration.from_raw_data(raw_content)
properties['ipSecConfiguration'] = ip_sec # depends on [control=['if'], data=['raw_content']]
ip_addresses = []
for raw_content in properties.get('ipAddresses', []):
ip_addresses.append(IPAddress.from_raw_data(raw_content)) # depends on [control=['for'], data=['raw_content']]
properties['ipAddresses'] = ip_addresses
routes = []
for raw_content in properties.get('routes', []):
routes.append(NetworkInterfaceRoute.from_raw_data(raw_content)) # depends on [control=['for'], data=['raw_content']]
properties['routes'] = routes
raw_content = properties.get('statistics', None)
if raw_content is not None:
statistics = NetworkInterfaceStatistics.from_raw_data(raw_content)
properties['statistics'] = statistics # depends on [control=['if'], data=['raw_content']]
raw_content = properties.get('greConfiguration', None)
if raw_content is not None:
gre_configuration = GREConfiguration.from_raw_data(raw_content)
properties['greConfiguration'] = gre_configuration # depends on [control=['if'], data=['raw_content']]
raw_content = properties.get('l3Configuration', None)
if raw_content is not None:
l3_configuration = L3Configuration.from_raw_data(raw_content)
properties['l3Configuration'] = l3_configuration # depends on [control=['if'], data=['raw_content']]
raw_content = properties.get('gateway', None)
if raw_content is not None:
gateway = Resource.from_raw_data(raw_content)
properties['gateway'] = gateway # depends on [control=['if'], data=['raw_content']]
return super(NetworkConnections, cls).process_raw_data(raw_data) |
def _printTaxonomy(self, hrlinetop=True):
"""
print(a local taxonomy for the object)
"""
if not self.currentEntity: # ==> ontology level
return
if hrlinetop:
self._print("----------------")
self._print("TAXONOMY:", "IMPORTANT")
x = self.currentEntity['object']
parents = x.parents()
if not parents:
if self.currentEntity['type'] == 'class':
self._print("owl:Thing")
elif self.currentEntity['type'] == 'property':
self._print("RDF:Property")
elif self.currentEntity['type'] == 'concept':
self._print("SKOS:Concept")
else:
pass
else:
for p in parents:
self._print(p.qname)
self._print("..." + x.qname, "TEXT")
for c in x.children():
self._print("......" + c.qname)
self._print("----------------") | def function[_printTaxonomy, parameter[self, hrlinetop]]:
constant[
print(a local taxonomy for the object)
]
if <ast.UnaryOp object at 0x7da1b11aaaa0> begin[:]
return[None]
if name[hrlinetop] begin[:]
call[name[self]._print, parameter[constant[----------------]]]
call[name[self]._print, parameter[constant[TAXONOMY:], constant[IMPORTANT]]]
variable[x] assign[=] call[name[self].currentEntity][constant[object]]
variable[parents] assign[=] call[name[x].parents, parameter[]]
if <ast.UnaryOp object at 0x7da1b11aa830> begin[:]
if compare[call[name[self].currentEntity][constant[type]] equal[==] constant[class]] begin[:]
call[name[self]._print, parameter[constant[owl:Thing]]]
call[name[self]._print, parameter[binary_operation[constant[...] + name[x].qname], constant[TEXT]]]
for taget[name[c]] in starred[call[name[x].children, parameter[]]] begin[:]
call[name[self]._print, parameter[binary_operation[constant[......] + name[c].qname]]]
call[name[self]._print, parameter[constant[----------------]]] | keyword[def] identifier[_printTaxonomy] ( identifier[self] , identifier[hrlinetop] = keyword[True] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[currentEntity] :
keyword[return]
keyword[if] identifier[hrlinetop] :
identifier[self] . identifier[_print] ( literal[string] )
identifier[self] . identifier[_print] ( literal[string] , literal[string] )
identifier[x] = identifier[self] . identifier[currentEntity] [ literal[string] ]
identifier[parents] = identifier[x] . identifier[parents] ()
keyword[if] keyword[not] identifier[parents] :
keyword[if] identifier[self] . identifier[currentEntity] [ literal[string] ]== literal[string] :
identifier[self] . identifier[_print] ( literal[string] )
keyword[elif] identifier[self] . identifier[currentEntity] [ literal[string] ]== literal[string] :
identifier[self] . identifier[_print] ( literal[string] )
keyword[elif] identifier[self] . identifier[currentEntity] [ literal[string] ]== literal[string] :
identifier[self] . identifier[_print] ( literal[string] )
keyword[else] :
keyword[pass]
keyword[else] :
keyword[for] identifier[p] keyword[in] identifier[parents] :
identifier[self] . identifier[_print] ( identifier[p] . identifier[qname] )
identifier[self] . identifier[_print] ( literal[string] + identifier[x] . identifier[qname] , literal[string] )
keyword[for] identifier[c] keyword[in] identifier[x] . identifier[children] ():
identifier[self] . identifier[_print] ( literal[string] + identifier[c] . identifier[qname] )
identifier[self] . identifier[_print] ( literal[string] ) | def _printTaxonomy(self, hrlinetop=True):
"""
print(a local taxonomy for the object)
"""
if not self.currentEntity: # ==> ontology level
return # depends on [control=['if'], data=[]]
if hrlinetop:
self._print('----------------') # depends on [control=['if'], data=[]]
self._print('TAXONOMY:', 'IMPORTANT')
x = self.currentEntity['object']
parents = x.parents()
if not parents:
if self.currentEntity['type'] == 'class':
self._print('owl:Thing') # depends on [control=['if'], data=[]]
elif self.currentEntity['type'] == 'property':
self._print('RDF:Property') # depends on [control=['if'], data=[]]
elif self.currentEntity['type'] == 'concept':
self._print('SKOS:Concept') # depends on [control=['if'], data=[]]
else:
pass # depends on [control=['if'], data=[]]
else:
for p in parents:
self._print(p.qname) # depends on [control=['for'], data=['p']]
self._print('...' + x.qname, 'TEXT')
for c in x.children():
self._print('......' + c.qname) # depends on [control=['for'], data=['c']]
self._print('----------------') |
def get_readout_time(self, child, duration):
"""Calculate the readout time of the detector from the EPICS driver:
- Set exposure and acquire period to same value
- Acquire period will be set to lowest acceptable value
- Difference will be readout time (this value is affected by
detector settings)
"""
child.exposure.put_value(duration)
child.acquirePeriod.put_value(duration)
readout_time = child.acquirePeriod.value - child.exposure.value
# It seems that the difference between acquirePeriod and exposure
# doesn't tell the whole story, we seem to need an additional bit
# of readout (or something) time on top
fudge_factor = duration * 0.004 + 0.001
return readout_time + fudge_factor | def function[get_readout_time, parameter[self, child, duration]]:
constant[Calculate the readout time of the detector from the EPICS driver:
- Set exposure and acquire period to same value
- Acquire period will be set to lowest acceptable value
- Difference will be readout time (this value is affected by
detector settings)
]
call[name[child].exposure.put_value, parameter[name[duration]]]
call[name[child].acquirePeriod.put_value, parameter[name[duration]]]
variable[readout_time] assign[=] binary_operation[name[child].acquirePeriod.value - name[child].exposure.value]
variable[fudge_factor] assign[=] binary_operation[binary_operation[name[duration] * constant[0.004]] + constant[0.001]]
return[binary_operation[name[readout_time] + name[fudge_factor]]] | keyword[def] identifier[get_readout_time] ( identifier[self] , identifier[child] , identifier[duration] ):
literal[string]
identifier[child] . identifier[exposure] . identifier[put_value] ( identifier[duration] )
identifier[child] . identifier[acquirePeriod] . identifier[put_value] ( identifier[duration] )
identifier[readout_time] = identifier[child] . identifier[acquirePeriod] . identifier[value] - identifier[child] . identifier[exposure] . identifier[value]
identifier[fudge_factor] = identifier[duration] * literal[int] + literal[int]
keyword[return] identifier[readout_time] + identifier[fudge_factor] | def get_readout_time(self, child, duration):
"""Calculate the readout time of the detector from the EPICS driver:
- Set exposure and acquire period to same value
- Acquire period will be set to lowest acceptable value
- Difference will be readout time (this value is affected by
detector settings)
"""
child.exposure.put_value(duration)
child.acquirePeriod.put_value(duration)
readout_time = child.acquirePeriod.value - child.exposure.value
# It seems that the difference between acquirePeriod and exposure
# doesn't tell the whole story, we seem to need an additional bit
# of readout (or something) time on top
fudge_factor = duration * 0.004 + 0.001
return readout_time + fudge_factor |
def write_profile(name, repo, token):
"""Save a profile to the CONFIG_FILE.
After you use this method to save a profile, you can load it anytime
later with the ``read_profile()`` function defined above.
Args:
name
The name of the profile to save.
repo
The Github repo you want to connect to. For instance,
this repo is ``jtpaasch/simplygithub``.
token
A personal access token to connect to the repo. It is
a hash that looks something like ``ff20ae42dc...``
Returns:
A dictionary with the profile's ``repo`` and ``token`` values.
"""
make_sure_folder_exists(CONFIG_FOLDER)
config = configparser.ConfigParser()
config.read(CONFIG_FILE)
profile = {"repo": repo, "token": token}
config[name] = profile
with open(CONFIG_FILE, "w") as configfile:
config.write(configfile)
return profile | def function[write_profile, parameter[name, repo, token]]:
constant[Save a profile to the CONFIG_FILE.
After you use this method to save a profile, you can load it anytime
later with the ``read_profile()`` function defined above.
Args:
name
The name of the profile to save.
repo
The Github repo you want to connect to. For instance,
this repo is ``jtpaasch/simplygithub``.
token
A personal access token to connect to the repo. It is
a hash that looks something like ``ff20ae42dc...``
Returns:
A dictionary with the profile's ``repo`` and ``token`` values.
]
call[name[make_sure_folder_exists], parameter[name[CONFIG_FOLDER]]]
variable[config] assign[=] call[name[configparser].ConfigParser, parameter[]]
call[name[config].read, parameter[name[CONFIG_FILE]]]
variable[profile] assign[=] dictionary[[<ast.Constant object at 0x7da1b15b3610>, <ast.Constant object at 0x7da1b15b2110>], [<ast.Name object at 0x7da1b15b3280>, <ast.Name object at 0x7da1b15b3be0>]]
call[name[config]][name[name]] assign[=] name[profile]
with call[name[open], parameter[name[CONFIG_FILE], constant[w]]] begin[:]
call[name[config].write, parameter[name[configfile]]]
return[name[profile]] | keyword[def] identifier[write_profile] ( identifier[name] , identifier[repo] , identifier[token] ):
literal[string]
identifier[make_sure_folder_exists] ( identifier[CONFIG_FOLDER] )
identifier[config] = identifier[configparser] . identifier[ConfigParser] ()
identifier[config] . identifier[read] ( identifier[CONFIG_FILE] )
identifier[profile] ={ literal[string] : identifier[repo] , literal[string] : identifier[token] }
identifier[config] [ identifier[name] ]= identifier[profile]
keyword[with] identifier[open] ( identifier[CONFIG_FILE] , literal[string] ) keyword[as] identifier[configfile] :
identifier[config] . identifier[write] ( identifier[configfile] )
keyword[return] identifier[profile] | def write_profile(name, repo, token):
"""Save a profile to the CONFIG_FILE.
After you use this method to save a profile, you can load it anytime
later with the ``read_profile()`` function defined above.
Args:
name
The name of the profile to save.
repo
The Github repo you want to connect to. For instance,
this repo is ``jtpaasch/simplygithub``.
token
A personal access token to connect to the repo. It is
a hash that looks something like ``ff20ae42dc...``
Returns:
A dictionary with the profile's ``repo`` and ``token`` values.
"""
make_sure_folder_exists(CONFIG_FOLDER)
config = configparser.ConfigParser()
config.read(CONFIG_FILE)
profile = {'repo': repo, 'token': token}
config[name] = profile
with open(CONFIG_FILE, 'w') as configfile:
config.write(configfile) # depends on [control=['with'], data=['configfile']]
return profile |
def write(self, path, wrap_ttl=None, **kwargs):
"""POST /<path>
:param path:
:type path:
:param wrap_ttl:
:type wrap_ttl:
:param kwargs:
:type kwargs:
:return:
:rtype:
"""
response = self._adapter.post('/v1/{0}'.format(path), json=kwargs, wrap_ttl=wrap_ttl)
if response.status_code == 200:
return response.json() | def function[write, parameter[self, path, wrap_ttl]]:
constant[POST /<path>
:param path:
:type path:
:param wrap_ttl:
:type wrap_ttl:
:param kwargs:
:type kwargs:
:return:
:rtype:
]
variable[response] assign[=] call[name[self]._adapter.post, parameter[call[constant[/v1/{0}].format, parameter[name[path]]]]]
if compare[name[response].status_code equal[==] constant[200]] begin[:]
return[call[name[response].json, parameter[]]] | keyword[def] identifier[write] ( identifier[self] , identifier[path] , identifier[wrap_ttl] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[response] = identifier[self] . identifier[_adapter] . identifier[post] ( literal[string] . identifier[format] ( identifier[path] ), identifier[json] = identifier[kwargs] , identifier[wrap_ttl] = identifier[wrap_ttl] )
keyword[if] identifier[response] . identifier[status_code] == literal[int] :
keyword[return] identifier[response] . identifier[json] () | def write(self, path, wrap_ttl=None, **kwargs):
"""POST /<path>
:param path:
:type path:
:param wrap_ttl:
:type wrap_ttl:
:param kwargs:
:type kwargs:
:return:
:rtype:
"""
response = self._adapter.post('/v1/{0}'.format(path), json=kwargs, wrap_ttl=wrap_ttl)
if response.status_code == 200:
return response.json() # depends on [control=['if'], data=[]] |
def rtruncated_pareto(alpha, m, b, size=None):
"""
Random bounded Pareto variates.
"""
u = random_number(size)
return (-(u * b ** alpha - u * m ** alpha - b ** alpha) /
(b ** alpha * m ** alpha)) ** (-1. / alpha) | def function[rtruncated_pareto, parameter[alpha, m, b, size]]:
constant[
Random bounded Pareto variates.
]
variable[u] assign[=] call[name[random_number], parameter[name[size]]]
return[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da20e957700> / binary_operation[binary_operation[name[b] ** name[alpha]] * binary_operation[name[m] ** name[alpha]]]] ** binary_operation[<ast.UnaryOp object at 0x7da18f58e950> / name[alpha]]]] | keyword[def] identifier[rtruncated_pareto] ( identifier[alpha] , identifier[m] , identifier[b] , identifier[size] = keyword[None] ):
literal[string]
identifier[u] = identifier[random_number] ( identifier[size] )
keyword[return] (-( identifier[u] * identifier[b] ** identifier[alpha] - identifier[u] * identifier[m] ** identifier[alpha] - identifier[b] ** identifier[alpha] )/
( identifier[b] ** identifier[alpha] * identifier[m] ** identifier[alpha] ))**(- literal[int] / identifier[alpha] ) | def rtruncated_pareto(alpha, m, b, size=None):
"""
Random bounded Pareto variates.
"""
u = random_number(size)
return (-(u * b ** alpha - u * m ** alpha - b ** alpha) / (b ** alpha * m ** alpha)) ** (-1.0 / alpha) |
def contains_field_list(self, path, name):
"""
Returns True if a multi-valued field exists at the specified path, otherwise False.
:param path: str or Path instance
:param name:
:type name: str
:return:
:raises ValueError: A component of path is a field name.
:raises TypeError: The field name is a component of a path.
"""
try:
self.get_field_list(path, name)
return True
except KeyError:
return False | def function[contains_field_list, parameter[self, path, name]]:
constant[
Returns True if a multi-valued field exists at the specified path, otherwise False.
:param path: str or Path instance
:param name:
:type name: str
:return:
:raises ValueError: A component of path is a field name.
:raises TypeError: The field name is a component of a path.
]
<ast.Try object at 0x7da18dc9a1a0> | keyword[def] identifier[contains_field_list] ( identifier[self] , identifier[path] , identifier[name] ):
literal[string]
keyword[try] :
identifier[self] . identifier[get_field_list] ( identifier[path] , identifier[name] )
keyword[return] keyword[True]
keyword[except] identifier[KeyError] :
keyword[return] keyword[False] | def contains_field_list(self, path, name):
"""
Returns True if a multi-valued field exists at the specified path, otherwise False.
:param path: str or Path instance
:param name:
:type name: str
:return:
:raises ValueError: A component of path is a field name.
:raises TypeError: The field name is a component of a path.
"""
try:
self.get_field_list(path, name)
return True # depends on [control=['try'], data=[]]
except KeyError:
return False # depends on [control=['except'], data=[]] |
def circular(cls, shape, pixel_scale, radius_arcsec, centre=(0., 0.), invert=False):
"""Setup a mask where unmasked pixels are within a circle of an input arc second radius and centre.
Parameters
----------
shape: (int, int)
The (y,x) shape of the mask in units of pixels.
pixel_scale: float
The arc-second to pixel conversion factor of each pixel.
radius_arcsec : float
The radius (in arc seconds) of the circle within which pixels unmasked.
centre: (float, float)
The centre of the circle used to mask pixels.
"""
mask = mask_util.mask_circular_from_shape_pixel_scale_and_radius(shape, pixel_scale, radius_arcsec,
centre)
if invert: mask = np.invert(mask)
return cls(array=mask.astype('bool'), pixel_scale=pixel_scale) | def function[circular, parameter[cls, shape, pixel_scale, radius_arcsec, centre, invert]]:
constant[Setup a mask where unmasked pixels are within a circle of an input arc second radius and centre.
Parameters
----------
shape: (int, int)
The (y,x) shape of the mask in units of pixels.
pixel_scale: float
The arc-second to pixel conversion factor of each pixel.
radius_arcsec : float
The radius (in arc seconds) of the circle within which pixels unmasked.
centre: (float, float)
The centre of the circle used to mask pixels.
]
variable[mask] assign[=] call[name[mask_util].mask_circular_from_shape_pixel_scale_and_radius, parameter[name[shape], name[pixel_scale], name[radius_arcsec], name[centre]]]
if name[invert] begin[:]
variable[mask] assign[=] call[name[np].invert, parameter[name[mask]]]
return[call[name[cls], parameter[]]] | keyword[def] identifier[circular] ( identifier[cls] , identifier[shape] , identifier[pixel_scale] , identifier[radius_arcsec] , identifier[centre] =( literal[int] , literal[int] ), identifier[invert] = keyword[False] ):
literal[string]
identifier[mask] = identifier[mask_util] . identifier[mask_circular_from_shape_pixel_scale_and_radius] ( identifier[shape] , identifier[pixel_scale] , identifier[radius_arcsec] ,
identifier[centre] )
keyword[if] identifier[invert] : identifier[mask] = identifier[np] . identifier[invert] ( identifier[mask] )
keyword[return] identifier[cls] ( identifier[array] = identifier[mask] . identifier[astype] ( literal[string] ), identifier[pixel_scale] = identifier[pixel_scale] ) | def circular(cls, shape, pixel_scale, radius_arcsec, centre=(0.0, 0.0), invert=False):
"""Setup a mask where unmasked pixels are within a circle of an input arc second radius and centre.
Parameters
----------
shape: (int, int)
The (y,x) shape of the mask in units of pixels.
pixel_scale: float
The arc-second to pixel conversion factor of each pixel.
radius_arcsec : float
The radius (in arc seconds) of the circle within which pixels unmasked.
centre: (float, float)
The centre of the circle used to mask pixels.
"""
mask = mask_util.mask_circular_from_shape_pixel_scale_and_radius(shape, pixel_scale, radius_arcsec, centre)
if invert:
mask = np.invert(mask) # depends on [control=['if'], data=[]]
return cls(array=mask.astype('bool'), pixel_scale=pixel_scale) |
def process_iq(self, stanza):
"""Process IQ stanza received.
:Parameters:
- `stanza`: the stanza received
:Types:
- `stanza`: `Iq`
If a matching handler is available pass the stanza to it. Otherwise
ignore it if it is "error" or "result" stanza or return
"feature-not-implemented" error if it is "get" or "set"."""
typ = stanza.stanza_type
if typ in ("result", "error"):
return self._process_iq_response(stanza)
if typ not in ("get", "set"):
raise BadRequestProtocolError("Bad <iq/> type")
logger.debug("Handling <iq type='{0}'> stanza: {1!r}".format(
stanza, typ))
payload = stanza.get_payload(None)
logger.debug(" payload: {0!r}".format(payload))
if not payload:
raise BadRequestProtocolError("<iq/> stanza with no child element")
handler = self._get_iq_handler(typ, payload)
if not handler:
payload = stanza.get_payload(None, specialize = True)
logger.debug(" specialized payload: {0!r}".format(payload))
if not isinstance(payload, XMLPayload):
handler = self._get_iq_handler(typ, payload)
if handler:
response = handler(stanza)
self._process_handler_result(response)
return True
else:
raise ServiceUnavailableProtocolError("Not implemented") | def function[process_iq, parameter[self, stanza]]:
constant[Process IQ stanza received.
:Parameters:
- `stanza`: the stanza received
:Types:
- `stanza`: `Iq`
If a matching handler is available pass the stanza to it. Otherwise
ignore it if it is "error" or "result" stanza or return
"feature-not-implemented" error if it is "get" or "set".]
variable[typ] assign[=] name[stanza].stanza_type
if compare[name[typ] in tuple[[<ast.Constant object at 0x7da18ede7460>, <ast.Constant object at 0x7da18ede5ae0>]]] begin[:]
return[call[name[self]._process_iq_response, parameter[name[stanza]]]]
if compare[name[typ] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da18ede4ee0>, <ast.Constant object at 0x7da18ede4e20>]]] begin[:]
<ast.Raise object at 0x7da18ede7340>
call[name[logger].debug, parameter[call[constant[Handling <iq type='{0}'> stanza: {1!r}].format, parameter[name[stanza], name[typ]]]]]
variable[payload] assign[=] call[name[stanza].get_payload, parameter[constant[None]]]
call[name[logger].debug, parameter[call[constant[ payload: {0!r}].format, parameter[name[payload]]]]]
if <ast.UnaryOp object at 0x7da18ede76d0> begin[:]
<ast.Raise object at 0x7da18ede5690>
variable[handler] assign[=] call[name[self]._get_iq_handler, parameter[name[typ], name[payload]]]
if <ast.UnaryOp object at 0x7da18ede4730> begin[:]
variable[payload] assign[=] call[name[stanza].get_payload, parameter[constant[None]]]
call[name[logger].debug, parameter[call[constant[ specialized payload: {0!r}].format, parameter[name[payload]]]]]
if <ast.UnaryOp object at 0x7da204620d60> begin[:]
variable[handler] assign[=] call[name[self]._get_iq_handler, parameter[name[typ], name[payload]]]
if name[handler] begin[:]
variable[response] assign[=] call[name[handler], parameter[name[stanza]]]
call[name[self]._process_handler_result, parameter[name[response]]]
return[constant[True]] | keyword[def] identifier[process_iq] ( identifier[self] , identifier[stanza] ):
literal[string]
identifier[typ] = identifier[stanza] . identifier[stanza_type]
keyword[if] identifier[typ] keyword[in] ( literal[string] , literal[string] ):
keyword[return] identifier[self] . identifier[_process_iq_response] ( identifier[stanza] )
keyword[if] identifier[typ] keyword[not] keyword[in] ( literal[string] , literal[string] ):
keyword[raise] identifier[BadRequestProtocolError] ( literal[string] )
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] (
identifier[stanza] , identifier[typ] ))
identifier[payload] = identifier[stanza] . identifier[get_payload] ( keyword[None] )
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[payload] ))
keyword[if] keyword[not] identifier[payload] :
keyword[raise] identifier[BadRequestProtocolError] ( literal[string] )
identifier[handler] = identifier[self] . identifier[_get_iq_handler] ( identifier[typ] , identifier[payload] )
keyword[if] keyword[not] identifier[handler] :
identifier[payload] = identifier[stanza] . identifier[get_payload] ( keyword[None] , identifier[specialize] = keyword[True] )
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[payload] ))
keyword[if] keyword[not] identifier[isinstance] ( identifier[payload] , identifier[XMLPayload] ):
identifier[handler] = identifier[self] . identifier[_get_iq_handler] ( identifier[typ] , identifier[payload] )
keyword[if] identifier[handler] :
identifier[response] = identifier[handler] ( identifier[stanza] )
identifier[self] . identifier[_process_handler_result] ( identifier[response] )
keyword[return] keyword[True]
keyword[else] :
keyword[raise] identifier[ServiceUnavailableProtocolError] ( literal[string] ) | def process_iq(self, stanza):
"""Process IQ stanza received.
:Parameters:
- `stanza`: the stanza received
:Types:
- `stanza`: `Iq`
If a matching handler is available pass the stanza to it. Otherwise
ignore it if it is "error" or "result" stanza or return
"feature-not-implemented" error if it is "get" or "set"."""
typ = stanza.stanza_type
if typ in ('result', 'error'):
return self._process_iq_response(stanza) # depends on [control=['if'], data=[]]
if typ not in ('get', 'set'):
raise BadRequestProtocolError('Bad <iq/> type') # depends on [control=['if'], data=[]]
logger.debug("Handling <iq type='{0}'> stanza: {1!r}".format(stanza, typ))
payload = stanza.get_payload(None)
logger.debug(' payload: {0!r}'.format(payload))
if not payload:
raise BadRequestProtocolError('<iq/> stanza with no child element') # depends on [control=['if'], data=[]]
handler = self._get_iq_handler(typ, payload)
if not handler:
payload = stanza.get_payload(None, specialize=True)
logger.debug(' specialized payload: {0!r}'.format(payload))
if not isinstance(payload, XMLPayload):
handler = self._get_iq_handler(typ, payload) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if handler:
response = handler(stanza)
self._process_handler_result(response)
return True # depends on [control=['if'], data=[]]
else:
raise ServiceUnavailableProtocolError('Not implemented') |
def subscribe(self, callback_url, timeout=None):
"""
Set up a subscription to the events offered by this service.
"""
url = urljoin(self._url_base, self._event_sub_url)
headers = dict(
HOST=urlparse(url).netloc,
CALLBACK='<%s>' % callback_url,
NT='upnp:event'
)
if timeout is not None:
headers['TIMEOUT'] = 'Second-%s' % timeout
resp = requests.request('SUBSCRIBE', url, headers=headers, auth=self.device.http_auth)
resp.raise_for_status()
return Service.validate_subscription_response(resp) | def function[subscribe, parameter[self, callback_url, timeout]]:
constant[
Set up a subscription to the events offered by this service.
]
variable[url] assign[=] call[name[urljoin], parameter[name[self]._url_base, name[self]._event_sub_url]]
variable[headers] assign[=] call[name[dict], parameter[]]
if compare[name[timeout] is_not constant[None]] begin[:]
call[name[headers]][constant[TIMEOUT]] assign[=] binary_operation[constant[Second-%s] <ast.Mod object at 0x7da2590d6920> name[timeout]]
variable[resp] assign[=] call[name[requests].request, parameter[constant[SUBSCRIBE], name[url]]]
call[name[resp].raise_for_status, parameter[]]
return[call[name[Service].validate_subscription_response, parameter[name[resp]]]] | keyword[def] identifier[subscribe] ( identifier[self] , identifier[callback_url] , identifier[timeout] = keyword[None] ):
literal[string]
identifier[url] = identifier[urljoin] ( identifier[self] . identifier[_url_base] , identifier[self] . identifier[_event_sub_url] )
identifier[headers] = identifier[dict] (
identifier[HOST] = identifier[urlparse] ( identifier[url] ). identifier[netloc] ,
identifier[CALLBACK] = literal[string] % identifier[callback_url] ,
identifier[NT] = literal[string]
)
keyword[if] identifier[timeout] keyword[is] keyword[not] keyword[None] :
identifier[headers] [ literal[string] ]= literal[string] % identifier[timeout]
identifier[resp] = identifier[requests] . identifier[request] ( literal[string] , identifier[url] , identifier[headers] = identifier[headers] , identifier[auth] = identifier[self] . identifier[device] . identifier[http_auth] )
identifier[resp] . identifier[raise_for_status] ()
keyword[return] identifier[Service] . identifier[validate_subscription_response] ( identifier[resp] ) | def subscribe(self, callback_url, timeout=None):
"""
Set up a subscription to the events offered by this service.
"""
url = urljoin(self._url_base, self._event_sub_url)
headers = dict(HOST=urlparse(url).netloc, CALLBACK='<%s>' % callback_url, NT='upnp:event')
if timeout is not None:
headers['TIMEOUT'] = 'Second-%s' % timeout # depends on [control=['if'], data=['timeout']]
resp = requests.request('SUBSCRIBE', url, headers=headers, auth=self.device.http_auth)
resp.raise_for_status()
return Service.validate_subscription_response(resp) |
def fill_subparser(subparser):
"""Sets up a subparser to download the MNIST dataset files.
The following MNIST dataset files are downloaded from Yann LeCun's
website [LECUN]:
`train-images-idx3-ubyte.gz`, `train-labels-idx1-ubyte.gz`,
`t10k-images-idx3-ubyte.gz`, `t10k-labels-idx1-ubyte.gz`.
Parameters
----------
subparser : :class:`argparse.ArgumentParser`
Subparser handling the `mnist` command.
"""
filenames = ['train-images-idx3-ubyte.gz', 'train-labels-idx1-ubyte.gz',
't10k-images-idx3-ubyte.gz', 't10k-labels-idx1-ubyte.gz']
urls = ['http://yann.lecun.com/exdb/mnist/' + f for f in filenames]
subparser.set_defaults(urls=urls, filenames=filenames)
return default_downloader | def function[fill_subparser, parameter[subparser]]:
constant[Sets up a subparser to download the MNIST dataset files.
The following MNIST dataset files are downloaded from Yann LeCun's
website [LECUN]:
`train-images-idx3-ubyte.gz`, `train-labels-idx1-ubyte.gz`,
`t10k-images-idx3-ubyte.gz`, `t10k-labels-idx1-ubyte.gz`.
Parameters
----------
subparser : :class:`argparse.ArgumentParser`
Subparser handling the `mnist` command.
]
variable[filenames] assign[=] list[[<ast.Constant object at 0x7da18f813dc0>, <ast.Constant object at 0x7da18f812650>, <ast.Constant object at 0x7da18f812080>, <ast.Constant object at 0x7da18f810160>]]
variable[urls] assign[=] <ast.ListComp object at 0x7da18f8131c0>
call[name[subparser].set_defaults, parameter[]]
return[name[default_downloader]] | keyword[def] identifier[fill_subparser] ( identifier[subparser] ):
literal[string]
identifier[filenames] =[ literal[string] , literal[string] ,
literal[string] , literal[string] ]
identifier[urls] =[ literal[string] + identifier[f] keyword[for] identifier[f] keyword[in] identifier[filenames] ]
identifier[subparser] . identifier[set_defaults] ( identifier[urls] = identifier[urls] , identifier[filenames] = identifier[filenames] )
keyword[return] identifier[default_downloader] | def fill_subparser(subparser):
"""Sets up a subparser to download the MNIST dataset files.
The following MNIST dataset files are downloaded from Yann LeCun's
website [LECUN]:
`train-images-idx3-ubyte.gz`, `train-labels-idx1-ubyte.gz`,
`t10k-images-idx3-ubyte.gz`, `t10k-labels-idx1-ubyte.gz`.
Parameters
----------
subparser : :class:`argparse.ArgumentParser`
Subparser handling the `mnist` command.
"""
filenames = ['train-images-idx3-ubyte.gz', 'train-labels-idx1-ubyte.gz', 't10k-images-idx3-ubyte.gz', 't10k-labels-idx1-ubyte.gz']
urls = ['http://yann.lecun.com/exdb/mnist/' + f for f in filenames]
subparser.set_defaults(urls=urls, filenames=filenames)
return default_downloader |
def handle_packets(pk):
"""handle_packets
:param pk: data packet that kamene sends in
"""
log.info(("processing with pub={}")
.format(pub))
# get the lowest layer
eth = pk.getlayer(kamene.Ether)
should_forward = False
send_msg = {"data": {},
"created": rnow(),
"source": SOURCE}
if eth:
# parse all layer frames under ethernet
send_msg["data"] = convert_pkt_to_json(eth)
should_forward = True
else:
log.error(("unsupported pk={}")
.format(pk))
# end of if supported
if should_forward:
log.info("forwarding")
# Publish the message:
msg_sent = pub.publish(body=send_msg,
exchange=FORWARD_EXCHANGE,
routing_key=FORWARD_ROUTING_KEY,
queue=FORWARD_QUEUE,
serializer="json",
retry=True)
log.info("done forwarding={}".format(msg_sent)) | def function[handle_packets, parameter[pk]]:
constant[handle_packets
:param pk: data packet that kamene sends in
]
call[name[log].info, parameter[call[constant[processing with pub={}].format, parameter[name[pub]]]]]
variable[eth] assign[=] call[name[pk].getlayer, parameter[name[kamene].Ether]]
variable[should_forward] assign[=] constant[False]
variable[send_msg] assign[=] dictionary[[<ast.Constant object at 0x7da18ede4460>, <ast.Constant object at 0x7da18ede59c0>, <ast.Constant object at 0x7da18ede5a80>], [<ast.Dict object at 0x7da18ede4fa0>, <ast.Call object at 0x7da18ede41c0>, <ast.Name object at 0x7da18ede57e0>]]
if name[eth] begin[:]
call[name[send_msg]][constant[data]] assign[=] call[name[convert_pkt_to_json], parameter[name[eth]]]
variable[should_forward] assign[=] constant[True]
if name[should_forward] begin[:]
call[name[log].info, parameter[constant[forwarding]]]
variable[msg_sent] assign[=] call[name[pub].publish, parameter[]]
call[name[log].info, parameter[call[constant[done forwarding={}].format, parameter[name[msg_sent]]]]] | keyword[def] identifier[handle_packets] ( identifier[pk] ):
literal[string]
identifier[log] . identifier[info] (( literal[string] )
. identifier[format] ( identifier[pub] ))
identifier[eth] = identifier[pk] . identifier[getlayer] ( identifier[kamene] . identifier[Ether] )
identifier[should_forward] = keyword[False]
identifier[send_msg] ={ literal[string] :{},
literal[string] : identifier[rnow] (),
literal[string] : identifier[SOURCE] }
keyword[if] identifier[eth] :
identifier[send_msg] [ literal[string] ]= identifier[convert_pkt_to_json] ( identifier[eth] )
identifier[should_forward] = keyword[True]
keyword[else] :
identifier[log] . identifier[error] (( literal[string] )
. identifier[format] ( identifier[pk] ))
keyword[if] identifier[should_forward] :
identifier[log] . identifier[info] ( literal[string] )
identifier[msg_sent] = identifier[pub] . identifier[publish] ( identifier[body] = identifier[send_msg] ,
identifier[exchange] = identifier[FORWARD_EXCHANGE] ,
identifier[routing_key] = identifier[FORWARD_ROUTING_KEY] ,
identifier[queue] = identifier[FORWARD_QUEUE] ,
identifier[serializer] = literal[string] ,
identifier[retry] = keyword[True] )
identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[msg_sent] )) | def handle_packets(pk):
"""handle_packets
:param pk: data packet that kamene sends in
"""
log.info('processing with pub={}'.format(pub))
# get the lowest layer
eth = pk.getlayer(kamene.Ether)
should_forward = False
send_msg = {'data': {}, 'created': rnow(), 'source': SOURCE}
if eth:
# parse all layer frames under ethernet
send_msg['data'] = convert_pkt_to_json(eth)
should_forward = True # depends on [control=['if'], data=[]]
else:
log.error('unsupported pk={}'.format(pk))
# end of if supported
if should_forward:
log.info('forwarding')
# Publish the message:
msg_sent = pub.publish(body=send_msg, exchange=FORWARD_EXCHANGE, routing_key=FORWARD_ROUTING_KEY, queue=FORWARD_QUEUE, serializer='json', retry=True)
log.info('done forwarding={}'.format(msg_sent)) # depends on [control=['if'], data=[]] |
def authenticate(remote_addr, password, cert, key, verify_cert=True):
'''
Authenticate with a remote LXDaemon.
remote_addr :
An URL to a remote Server, you also have to give cert and key if you
provide remote_addr and its a TCP Address!
Examples:
https://myserver.lan:8443
password :
The password of the remote.
cert :
PEM Formatted SSL Certificate.
Examples:
~/.config/lxc/client.crt
key :
PEM Formatted SSL Key.
Examples:
~/.config/lxc/client.key
verify_cert : True
Wherever to verify the cert, this is by default True
but in the most cases you want to set it off as LXD
normaly uses self-signed certificates.
CLI Example:
.. code-block:: bash
$ salt '*' lxd.authenticate https://srv01:8443 <yourpass> ~/.config/lxc/client.crt ~/.config/lxc/client.key false
See the `requests-docs`_ for the SSL stuff.
.. _requests-docs: http://docs.python-requests.org/en/master/user/advanced/#ssl-cert-verification
'''
client = pylxd_client_get(remote_addr, cert, key, verify_cert)
if client.trusted:
return True
try:
client.authenticate(password)
except pylxd.exceptions.LXDAPIException as e:
# Wrong password
raise CommandExecutionError(six.text_type(e))
return client.trusted | def function[authenticate, parameter[remote_addr, password, cert, key, verify_cert]]:
constant[
Authenticate with a remote LXDaemon.
remote_addr :
An URL to a remote Server, you also have to give cert and key if you
provide remote_addr and its a TCP Address!
Examples:
https://myserver.lan:8443
password :
The password of the remote.
cert :
PEM Formatted SSL Certificate.
Examples:
~/.config/lxc/client.crt
key :
PEM Formatted SSL Key.
Examples:
~/.config/lxc/client.key
verify_cert : True
Wherever to verify the cert, this is by default True
but in the most cases you want to set it off as LXD
normaly uses self-signed certificates.
CLI Example:
.. code-block:: bash
$ salt '*' lxd.authenticate https://srv01:8443 <yourpass> ~/.config/lxc/client.crt ~/.config/lxc/client.key false
See the `requests-docs`_ for the SSL stuff.
.. _requests-docs: http://docs.python-requests.org/en/master/user/advanced/#ssl-cert-verification
]
variable[client] assign[=] call[name[pylxd_client_get], parameter[name[remote_addr], name[cert], name[key], name[verify_cert]]]
if name[client].trusted begin[:]
return[constant[True]]
<ast.Try object at 0x7da18f58f7f0>
return[name[client].trusted] | keyword[def] identifier[authenticate] ( identifier[remote_addr] , identifier[password] , identifier[cert] , identifier[key] , identifier[verify_cert] = keyword[True] ):
literal[string]
identifier[client] = identifier[pylxd_client_get] ( identifier[remote_addr] , identifier[cert] , identifier[key] , identifier[verify_cert] )
keyword[if] identifier[client] . identifier[trusted] :
keyword[return] keyword[True]
keyword[try] :
identifier[client] . identifier[authenticate] ( identifier[password] )
keyword[except] identifier[pylxd] . identifier[exceptions] . identifier[LXDAPIException] keyword[as] identifier[e] :
keyword[raise] identifier[CommandExecutionError] ( identifier[six] . identifier[text_type] ( identifier[e] ))
keyword[return] identifier[client] . identifier[trusted] | def authenticate(remote_addr, password, cert, key, verify_cert=True):
"""
Authenticate with a remote LXDaemon.
remote_addr :
An URL to a remote Server, you also have to give cert and key if you
provide remote_addr and its a TCP Address!
Examples:
https://myserver.lan:8443
password :
The password of the remote.
cert :
PEM Formatted SSL Certificate.
Examples:
~/.config/lxc/client.crt
key :
PEM Formatted SSL Key.
Examples:
~/.config/lxc/client.key
verify_cert : True
Wherever to verify the cert, this is by default True
but in the most cases you want to set it off as LXD
normaly uses self-signed certificates.
CLI Example:
.. code-block:: bash
$ salt '*' lxd.authenticate https://srv01:8443 <yourpass> ~/.config/lxc/client.crt ~/.config/lxc/client.key false
See the `requests-docs`_ for the SSL stuff.
.. _requests-docs: http://docs.python-requests.org/en/master/user/advanced/#ssl-cert-verification
"""
client = pylxd_client_get(remote_addr, cert, key, verify_cert)
if client.trusted:
return True # depends on [control=['if'], data=[]]
try:
client.authenticate(password) # depends on [control=['try'], data=[]]
except pylxd.exceptions.LXDAPIException as e:
# Wrong password
raise CommandExecutionError(six.text_type(e)) # depends on [control=['except'], data=['e']]
return client.trusted |
def zipWithIndex(self):
"""
Zips this RDD with its element indices.
The ordering is first based on the partition index and then the
ordering of items within each partition. So the first item in
the first partition gets index 0, and the last item in the last
partition receives the largest index.
This method needs to trigger a spark job when this RDD contains
more than one partitions.
>>> sc.parallelize(["a", "b", "c", "d"], 3).zipWithIndex().collect()
[('a', 0), ('b', 1), ('c', 2), ('d', 3)]
"""
starts = [0]
if self.getNumPartitions() > 1:
nums = self.mapPartitions(lambda it: [sum(1 for i in it)]).collect()
for i in range(len(nums) - 1):
starts.append(starts[-1] + nums[i])
def func(k, it):
for i, v in enumerate(it, starts[k]):
yield v, i
return self.mapPartitionsWithIndex(func) | def function[zipWithIndex, parameter[self]]:
constant[
Zips this RDD with its element indices.
The ordering is first based on the partition index and then the
ordering of items within each partition. So the first item in
the first partition gets index 0, and the last item in the last
partition receives the largest index.
This method needs to trigger a spark job when this RDD contains
more than one partitions.
>>> sc.parallelize(["a", "b", "c", "d"], 3).zipWithIndex().collect()
[('a', 0), ('b', 1), ('c', 2), ('d', 3)]
]
variable[starts] assign[=] list[[<ast.Constant object at 0x7da20e954a00>]]
if compare[call[name[self].getNumPartitions, parameter[]] greater[>] constant[1]] begin[:]
variable[nums] assign[=] call[call[name[self].mapPartitions, parameter[<ast.Lambda object at 0x7da20e956050>]].collect, parameter[]]
for taget[name[i]] in starred[call[name[range], parameter[binary_operation[call[name[len], parameter[name[nums]]] - constant[1]]]]] begin[:]
call[name[starts].append, parameter[binary_operation[call[name[starts]][<ast.UnaryOp object at 0x7da20e9553f0>] + call[name[nums]][name[i]]]]]
def function[func, parameter[k, it]]:
for taget[tuple[[<ast.Name object at 0x7da20e955ab0>, <ast.Name object at 0x7da20e955360>]]] in starred[call[name[enumerate], parameter[name[it], call[name[starts]][name[k]]]]] begin[:]
<ast.Yield object at 0x7da20e954130>
return[call[name[self].mapPartitionsWithIndex, parameter[name[func]]]] | keyword[def] identifier[zipWithIndex] ( identifier[self] ):
literal[string]
identifier[starts] =[ literal[int] ]
keyword[if] identifier[self] . identifier[getNumPartitions] ()> literal[int] :
identifier[nums] = identifier[self] . identifier[mapPartitions] ( keyword[lambda] identifier[it] :[ identifier[sum] ( literal[int] keyword[for] identifier[i] keyword[in] identifier[it] )]). identifier[collect] ()
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[nums] )- literal[int] ):
identifier[starts] . identifier[append] ( identifier[starts] [- literal[int] ]+ identifier[nums] [ identifier[i] ])
keyword[def] identifier[func] ( identifier[k] , identifier[it] ):
keyword[for] identifier[i] , identifier[v] keyword[in] identifier[enumerate] ( identifier[it] , identifier[starts] [ identifier[k] ]):
keyword[yield] identifier[v] , identifier[i]
keyword[return] identifier[self] . identifier[mapPartitionsWithIndex] ( identifier[func] ) | def zipWithIndex(self):
"""
Zips this RDD with its element indices.
The ordering is first based on the partition index and then the
ordering of items within each partition. So the first item in
the first partition gets index 0, and the last item in the last
partition receives the largest index.
This method needs to trigger a spark job when this RDD contains
more than one partitions.
>>> sc.parallelize(["a", "b", "c", "d"], 3).zipWithIndex().collect()
[('a', 0), ('b', 1), ('c', 2), ('d', 3)]
"""
starts = [0]
if self.getNumPartitions() > 1:
nums = self.mapPartitions(lambda it: [sum((1 for i in it))]).collect()
for i in range(len(nums) - 1):
starts.append(starts[-1] + nums[i]) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
def func(k, it):
for (i, v) in enumerate(it, starts[k]):
yield (v, i) # depends on [control=['for'], data=[]]
return self.mapPartitionsWithIndex(func) |
def _get_max_sigma(self, R):
"""Calculate maximum sigma of scanner RAS coordinates
Parameters
----------
R : 2D array, with shape [n_voxel, n_dim]
The coordinate matrix of fMRI data from one subject
Returns
-------
max_sigma : float
The maximum sigma of scanner coordinates.
"""
max_sigma = 2.0 * math.pow(np.nanmax(np.std(R, axis=0)), 2)
return max_sigma | def function[_get_max_sigma, parameter[self, R]]:
constant[Calculate maximum sigma of scanner RAS coordinates
Parameters
----------
R : 2D array, with shape [n_voxel, n_dim]
The coordinate matrix of fMRI data from one subject
Returns
-------
max_sigma : float
The maximum sigma of scanner coordinates.
]
variable[max_sigma] assign[=] binary_operation[constant[2.0] * call[name[math].pow, parameter[call[name[np].nanmax, parameter[call[name[np].std, parameter[name[R]]]]], constant[2]]]]
return[name[max_sigma]] | keyword[def] identifier[_get_max_sigma] ( identifier[self] , identifier[R] ):
literal[string]
identifier[max_sigma] = literal[int] * identifier[math] . identifier[pow] ( identifier[np] . identifier[nanmax] ( identifier[np] . identifier[std] ( identifier[R] , identifier[axis] = literal[int] )), literal[int] )
keyword[return] identifier[max_sigma] | def _get_max_sigma(self, R):
"""Calculate maximum sigma of scanner RAS coordinates
Parameters
----------
R : 2D array, with shape [n_voxel, n_dim]
The coordinate matrix of fMRI data from one subject
Returns
-------
max_sigma : float
The maximum sigma of scanner coordinates.
"""
max_sigma = 2.0 * math.pow(np.nanmax(np.std(R, axis=0)), 2)
return max_sigma |
def create_security_group(self, name, description, vpc_id=None):
"""
Create a new security group for your account.
This will create the security group within the region you
are currently connected to.
:type name: string
:param name: The name of the new security group
:type description: string
:param description: The description of the new security group
:type vpc_id: string
:param vpc_id: The ID of the VPC to create the security group in,
if any.
:rtype: :class:`boto.ec2.securitygroup.SecurityGroup`
:return: The newly created :class:`boto.ec2.keypair.KeyPair`.
"""
params = {
'GroupName': name,
'GroupDescription': description
}
if vpc_id is not None:
params['VpcId'] = vpc_id
group = self.get_object('CreateSecurityGroup', params,
SecurityGroup, verb='POST')
group.name = name
group.description = description
return group | def function[create_security_group, parameter[self, name, description, vpc_id]]:
constant[
Create a new security group for your account.
This will create the security group within the region you
are currently connected to.
:type name: string
:param name: The name of the new security group
:type description: string
:param description: The description of the new security group
:type vpc_id: string
:param vpc_id: The ID of the VPC to create the security group in,
if any.
:rtype: :class:`boto.ec2.securitygroup.SecurityGroup`
:return: The newly created :class:`boto.ec2.keypair.KeyPair`.
]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da20c6ab790>, <ast.Constant object at 0x7da20c6a9030>], [<ast.Name object at 0x7da20c6ab550>, <ast.Name object at 0x7da20c6a89d0>]]
if compare[name[vpc_id] is_not constant[None]] begin[:]
call[name[params]][constant[VpcId]] assign[=] name[vpc_id]
variable[group] assign[=] call[name[self].get_object, parameter[constant[CreateSecurityGroup], name[params], name[SecurityGroup]]]
name[group].name assign[=] name[name]
name[group].description assign[=] name[description]
return[name[group]] | keyword[def] identifier[create_security_group] ( identifier[self] , identifier[name] , identifier[description] , identifier[vpc_id] = keyword[None] ):
literal[string]
identifier[params] ={
literal[string] : identifier[name] ,
literal[string] : identifier[description]
}
keyword[if] identifier[vpc_id] keyword[is] keyword[not] keyword[None] :
identifier[params] [ literal[string] ]= identifier[vpc_id]
identifier[group] = identifier[self] . identifier[get_object] ( literal[string] , identifier[params] ,
identifier[SecurityGroup] , identifier[verb] = literal[string] )
identifier[group] . identifier[name] = identifier[name]
identifier[group] . identifier[description] = identifier[description]
keyword[return] identifier[group] | def create_security_group(self, name, description, vpc_id=None):
"""
Create a new security group for your account.
This will create the security group within the region you
are currently connected to.
:type name: string
:param name: The name of the new security group
:type description: string
:param description: The description of the new security group
:type vpc_id: string
:param vpc_id: The ID of the VPC to create the security group in,
if any.
:rtype: :class:`boto.ec2.securitygroup.SecurityGroup`
:return: The newly created :class:`boto.ec2.keypair.KeyPair`.
"""
params = {'GroupName': name, 'GroupDescription': description}
if vpc_id is not None:
params['VpcId'] = vpc_id # depends on [control=['if'], data=['vpc_id']]
group = self.get_object('CreateSecurityGroup', params, SecurityGroup, verb='POST')
group.name = name
group.description = description
return group |
def context(self, name, ctx):
"""
Execute the block with the given context applied. This manager
ensures that the context is removed even if an exception is raised
within the context.
"""
self.enter_context(name, ctx)
try:
yield
finally:
self.exit_context(name) | def function[context, parameter[self, name, ctx]]:
constant[
Execute the block with the given context applied. This manager
ensures that the context is removed even if an exception is raised
within the context.
]
call[name[self].enter_context, parameter[name[name], name[ctx]]]
<ast.Try object at 0x7da207f025f0> | keyword[def] identifier[context] ( identifier[self] , identifier[name] , identifier[ctx] ):
literal[string]
identifier[self] . identifier[enter_context] ( identifier[name] , identifier[ctx] )
keyword[try] :
keyword[yield]
keyword[finally] :
identifier[self] . identifier[exit_context] ( identifier[name] ) | def context(self, name, ctx):
"""
Execute the block with the given context applied. This manager
ensures that the context is removed even if an exception is raised
within the context.
"""
self.enter_context(name, ctx)
try:
yield # depends on [control=['try'], data=[]]
finally:
self.exit_context(name) |
def _tokens_from_patsy(node):
"""
Yields all the individual tokens from within a patsy formula
as parsed by patsy.parse_formula.parse_formula.
Parameters
----------
node : patsy.parse_formula.ParseNode
"""
for n in node.args:
for t in _tokens_from_patsy(n):
yield t
if node.token:
yield node.token | def function[_tokens_from_patsy, parameter[node]]:
constant[
Yields all the individual tokens from within a patsy formula
as parsed by patsy.parse_formula.parse_formula.
Parameters
----------
node : patsy.parse_formula.ParseNode
]
for taget[name[n]] in starred[name[node].args] begin[:]
for taget[name[t]] in starred[call[name[_tokens_from_patsy], parameter[name[n]]]] begin[:]
<ast.Yield object at 0x7da18f09f550>
if name[node].token begin[:]
<ast.Yield object at 0x7da18f09dde0> | keyword[def] identifier[_tokens_from_patsy] ( identifier[node] ):
literal[string]
keyword[for] identifier[n] keyword[in] identifier[node] . identifier[args] :
keyword[for] identifier[t] keyword[in] identifier[_tokens_from_patsy] ( identifier[n] ):
keyword[yield] identifier[t]
keyword[if] identifier[node] . identifier[token] :
keyword[yield] identifier[node] . identifier[token] | def _tokens_from_patsy(node):
"""
Yields all the individual tokens from within a patsy formula
as parsed by patsy.parse_formula.parse_formula.
Parameters
----------
node : patsy.parse_formula.ParseNode
"""
for n in node.args:
for t in _tokens_from_patsy(n):
yield t # depends on [control=['for'], data=['t']] # depends on [control=['for'], data=['n']]
if node.token:
yield node.token # depends on [control=['if'], data=[]] |
def subscribeContext(self, objectID, domain, dist, varIDs=(
tc.VAR_ROAD_ID, tc.VAR_LANEPOSITION), begin=0, end=2**31 - 1):
"""subscribe(string, int, double, list(integer), int, int) -> None
Subscribe to one or more object values of the given domain around the
given objectID in a given radius
"""
Domain.subscribeContext(
self, objectID, domain, dist, varIDs, begin, end) | def function[subscribeContext, parameter[self, objectID, domain, dist, varIDs, begin, end]]:
constant[subscribe(string, int, double, list(integer), int, int) -> None
Subscribe to one or more object values of the given domain around the
given objectID in a given radius
]
call[name[Domain].subscribeContext, parameter[name[self], name[objectID], name[domain], name[dist], name[varIDs], name[begin], name[end]]] | keyword[def] identifier[subscribeContext] ( identifier[self] , identifier[objectID] , identifier[domain] , identifier[dist] , identifier[varIDs] =(
identifier[tc] . identifier[VAR_ROAD_ID] , identifier[tc] . identifier[VAR_LANEPOSITION] ), identifier[begin] = literal[int] , identifier[end] = literal[int] ** literal[int] - literal[int] ):
literal[string]
identifier[Domain] . identifier[subscribeContext] (
identifier[self] , identifier[objectID] , identifier[domain] , identifier[dist] , identifier[varIDs] , identifier[begin] , identifier[end] ) | def subscribeContext(self, objectID, domain, dist, varIDs=(tc.VAR_ROAD_ID, tc.VAR_LANEPOSITION), begin=0, end=2 ** 31 - 1):
"""subscribe(string, int, double, list(integer), int, int) -> None
Subscribe to one or more object values of the given domain around the
given objectID in a given radius
"""
Domain.subscribeContext(self, objectID, domain, dist, varIDs, begin, end) |
def getDPI(filepath):
"""
Return (width, height) for a given img file content
no requirements
"""
xDPI = -1
yDPI = -1
with open(filepath, 'rb') as fhandle:
head = fhandle.read(24)
size = len(head)
# handle GIFs
# GIFs doesn't have density
if size >= 10 and head[:6] in (b'GIF87a', b'GIF89a'):
pass
# see png edition spec bytes are below chunk length then and finally the
elif size >= 24 and head.startswith(b'\211PNG\r\n\032\n'):
chunkOffset = 8
chunk = head[8:]
while True:
chunkType = chunk[4:8]
if chunkType == b'pHYs':
try:
xDensity, yDensity, unit = struct.unpack(">LLB", chunk[8:])
except struct.error:
raise ValueError("Invalid PNG file")
if unit:
xDPI = _convertToDPI(xDensity, _UNIT_1M)
yDPI = _convertToDPI(yDensity, _UNIT_1M)
else: # no unit
xDPI = xDensity
yDPI = yDensity
break
elif chunkType == b'IDAT':
break
else:
try:
dataSize, = struct.unpack(">L", chunk[0:4])
except struct.error:
raise ValueError("Invalid PNG file")
chunkOffset += dataSize + 12
fhandle.seek(chunkOffset)
chunk = fhandle.read(17)
# handle JPEGs
elif size >= 2 and head.startswith(b'\377\330'):
try:
fhandle.seek(0) # Read 0xff next
size = 2
ftype = 0
while not 0xc0 <= ftype <= 0xcf:
if ftype == 0xe0: # APP0 marker
fhandle.seek(7, 1)
unit, xDensity, yDensity = struct.unpack(">BHH", fhandle.read(5))
if unit == 1 or unit == 0:
xDPI = xDensity
yDPI = yDensity
elif unit == 2:
xDPI = _convertToDPI(xDensity, _UNIT_CM)
yDPI = _convertToDPI(yDensity, _UNIT_CM)
break
fhandle.seek(size, 1)
byte = fhandle.read(1)
while ord(byte) == 0xff:
byte = fhandle.read(1)
ftype = ord(byte)
size = struct.unpack('>H', fhandle.read(2))[0] - 2
except struct.error:
raise ValueError("Invalid JPEG file")
# handle JPEG2000s
elif size >= 12 and head.startswith(b'\x00\x00\x00\x0cjP \r\n\x87\n'):
fhandle.seek(32)
# skip JP2 image header box
headerSize = struct.unpack('>L', fhandle.read(4))[0] - 8
fhandle.seek(4, 1)
foundResBox = False
try:
while headerSize > 0:
print("headerSize", headerSize)
boxHeader = fhandle.read(8)
boxType = boxHeader[4:]
print(boxType)
if boxType == 'res ': # find resolution super box
foundResBox = True
headerSize -= 8
print("found res super box")
break
print("@1", boxHeader)
boxSize, = struct.unpack('>L', boxHeader[:4])
print("boxSize", boxSize)
fhandle.seek(boxSize - 8, 1)
headerSize -= boxSize
if foundResBox:
while headerSize > 0:
boxHeader = fhandle.read(8)
boxType = boxHeader[4:]
print(boxType)
if boxType == 'resd': # Display resolution box
print("@2")
yDensity, xDensity, yUnit, xUnit = struct.unpack(">HHBB", fhandle.read(10))
xDPI = _convertToDPI(xDensity, xUnit)
yDPI = _convertToDPI(yDensity, yUnit)
break
boxSize, = struct.unpack('>L', boxHeader[:4])
print("boxSize", boxSize)
fhandle.seek(boxSize - 8, 1)
headerSize -= boxSize
except struct.error as e:
print(e)
raise ValueError("Invalid JPEG2000 file")
return xDPI, yDPI | def function[getDPI, parameter[filepath]]:
constant[
Return (width, height) for a given img file content
no requirements
]
variable[xDPI] assign[=] <ast.UnaryOp object at 0x7da1b0ebf2b0>
variable[yDPI] assign[=] <ast.UnaryOp object at 0x7da1b0ebdf30>
with call[name[open], parameter[name[filepath], constant[rb]]] begin[:]
variable[head] assign[=] call[name[fhandle].read, parameter[constant[24]]]
variable[size] assign[=] call[name[len], parameter[name[head]]]
if <ast.BoolOp object at 0x7da1b0ebc7c0> begin[:]
pass
return[tuple[[<ast.Name object at 0x7da1b0e66d70>, <ast.Name object at 0x7da1b0e66da0>]]] | keyword[def] identifier[getDPI] ( identifier[filepath] ):
literal[string]
identifier[xDPI] =- literal[int]
identifier[yDPI] =- literal[int]
keyword[with] identifier[open] ( identifier[filepath] , literal[string] ) keyword[as] identifier[fhandle] :
identifier[head] = identifier[fhandle] . identifier[read] ( literal[int] )
identifier[size] = identifier[len] ( identifier[head] )
keyword[if] identifier[size] >= literal[int] keyword[and] identifier[head] [: literal[int] ] keyword[in] ( literal[string] , literal[string] ):
keyword[pass]
keyword[elif] identifier[size] >= literal[int] keyword[and] identifier[head] . identifier[startswith] ( literal[string] ):
identifier[chunkOffset] = literal[int]
identifier[chunk] = identifier[head] [ literal[int] :]
keyword[while] keyword[True] :
identifier[chunkType] = identifier[chunk] [ literal[int] : literal[int] ]
keyword[if] identifier[chunkType] == literal[string] :
keyword[try] :
identifier[xDensity] , identifier[yDensity] , identifier[unit] = identifier[struct] . identifier[unpack] ( literal[string] , identifier[chunk] [ literal[int] :])
keyword[except] identifier[struct] . identifier[error] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[unit] :
identifier[xDPI] = identifier[_convertToDPI] ( identifier[xDensity] , identifier[_UNIT_1M] )
identifier[yDPI] = identifier[_convertToDPI] ( identifier[yDensity] , identifier[_UNIT_1M] )
keyword[else] :
identifier[xDPI] = identifier[xDensity]
identifier[yDPI] = identifier[yDensity]
keyword[break]
keyword[elif] identifier[chunkType] == literal[string] :
keyword[break]
keyword[else] :
keyword[try] :
identifier[dataSize] ,= identifier[struct] . identifier[unpack] ( literal[string] , identifier[chunk] [ literal[int] : literal[int] ])
keyword[except] identifier[struct] . identifier[error] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[chunkOffset] += identifier[dataSize] + literal[int]
identifier[fhandle] . identifier[seek] ( identifier[chunkOffset] )
identifier[chunk] = identifier[fhandle] . identifier[read] ( literal[int] )
keyword[elif] identifier[size] >= literal[int] keyword[and] identifier[head] . identifier[startswith] ( literal[string] ):
keyword[try] :
identifier[fhandle] . identifier[seek] ( literal[int] )
identifier[size] = literal[int]
identifier[ftype] = literal[int]
keyword[while] keyword[not] literal[int] <= identifier[ftype] <= literal[int] :
keyword[if] identifier[ftype] == literal[int] :
identifier[fhandle] . identifier[seek] ( literal[int] , literal[int] )
identifier[unit] , identifier[xDensity] , identifier[yDensity] = identifier[struct] . identifier[unpack] ( literal[string] , identifier[fhandle] . identifier[read] ( literal[int] ))
keyword[if] identifier[unit] == literal[int] keyword[or] identifier[unit] == literal[int] :
identifier[xDPI] = identifier[xDensity]
identifier[yDPI] = identifier[yDensity]
keyword[elif] identifier[unit] == literal[int] :
identifier[xDPI] = identifier[_convertToDPI] ( identifier[xDensity] , identifier[_UNIT_CM] )
identifier[yDPI] = identifier[_convertToDPI] ( identifier[yDensity] , identifier[_UNIT_CM] )
keyword[break]
identifier[fhandle] . identifier[seek] ( identifier[size] , literal[int] )
identifier[byte] = identifier[fhandle] . identifier[read] ( literal[int] )
keyword[while] identifier[ord] ( identifier[byte] )== literal[int] :
identifier[byte] = identifier[fhandle] . identifier[read] ( literal[int] )
identifier[ftype] = identifier[ord] ( identifier[byte] )
identifier[size] = identifier[struct] . identifier[unpack] ( literal[string] , identifier[fhandle] . identifier[read] ( literal[int] ))[ literal[int] ]- literal[int]
keyword[except] identifier[struct] . identifier[error] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[elif] identifier[size] >= literal[int] keyword[and] identifier[head] . identifier[startswith] ( literal[string] ):
identifier[fhandle] . identifier[seek] ( literal[int] )
identifier[headerSize] = identifier[struct] . identifier[unpack] ( literal[string] , identifier[fhandle] . identifier[read] ( literal[int] ))[ literal[int] ]- literal[int]
identifier[fhandle] . identifier[seek] ( literal[int] , literal[int] )
identifier[foundResBox] = keyword[False]
keyword[try] :
keyword[while] identifier[headerSize] > literal[int] :
identifier[print] ( literal[string] , identifier[headerSize] )
identifier[boxHeader] = identifier[fhandle] . identifier[read] ( literal[int] )
identifier[boxType] = identifier[boxHeader] [ literal[int] :]
identifier[print] ( identifier[boxType] )
keyword[if] identifier[boxType] == literal[string] :
identifier[foundResBox] = keyword[True]
identifier[headerSize] -= literal[int]
identifier[print] ( literal[string] )
keyword[break]
identifier[print] ( literal[string] , identifier[boxHeader] )
identifier[boxSize] ,= identifier[struct] . identifier[unpack] ( literal[string] , identifier[boxHeader] [: literal[int] ])
identifier[print] ( literal[string] , identifier[boxSize] )
identifier[fhandle] . identifier[seek] ( identifier[boxSize] - literal[int] , literal[int] )
identifier[headerSize] -= identifier[boxSize]
keyword[if] identifier[foundResBox] :
keyword[while] identifier[headerSize] > literal[int] :
identifier[boxHeader] = identifier[fhandle] . identifier[read] ( literal[int] )
identifier[boxType] = identifier[boxHeader] [ literal[int] :]
identifier[print] ( identifier[boxType] )
keyword[if] identifier[boxType] == literal[string] :
identifier[print] ( literal[string] )
identifier[yDensity] , identifier[xDensity] , identifier[yUnit] , identifier[xUnit] = identifier[struct] . identifier[unpack] ( literal[string] , identifier[fhandle] . identifier[read] ( literal[int] ))
identifier[xDPI] = identifier[_convertToDPI] ( identifier[xDensity] , identifier[xUnit] )
identifier[yDPI] = identifier[_convertToDPI] ( identifier[yDensity] , identifier[yUnit] )
keyword[break]
identifier[boxSize] ,= identifier[struct] . identifier[unpack] ( literal[string] , identifier[boxHeader] [: literal[int] ])
identifier[print] ( literal[string] , identifier[boxSize] )
identifier[fhandle] . identifier[seek] ( identifier[boxSize] - literal[int] , literal[int] )
identifier[headerSize] -= identifier[boxSize]
keyword[except] identifier[struct] . identifier[error] keyword[as] identifier[e] :
identifier[print] ( identifier[e] )
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] identifier[xDPI] , identifier[yDPI] | def getDPI(filepath):
"""
Return (width, height) for a given img file content
no requirements
"""
xDPI = -1
yDPI = -1
with open(filepath, 'rb') as fhandle:
head = fhandle.read(24)
size = len(head)
# handle GIFs
# GIFs doesn't have density
if size >= 10 and head[:6] in (b'GIF87a', b'GIF89a'):
pass # depends on [control=['if'], data=[]]
# see png edition spec bytes are below chunk length then and finally the
elif size >= 24 and head.startswith(b'\x89PNG\r\n\x1a\n'):
chunkOffset = 8
chunk = head[8:]
while True:
chunkType = chunk[4:8]
if chunkType == b'pHYs':
try:
(xDensity, yDensity, unit) = struct.unpack('>LLB', chunk[8:]) # depends on [control=['try'], data=[]]
except struct.error:
raise ValueError('Invalid PNG file') # depends on [control=['except'], data=[]]
if unit:
xDPI = _convertToDPI(xDensity, _UNIT_1M)
yDPI = _convertToDPI(yDensity, _UNIT_1M) # depends on [control=['if'], data=[]]
else: # no unit
xDPI = xDensity
yDPI = yDensity
break # depends on [control=['if'], data=[]]
elif chunkType == b'IDAT':
break # depends on [control=['if'], data=[]]
else:
try:
(dataSize,) = struct.unpack('>L', chunk[0:4]) # depends on [control=['try'], data=[]]
except struct.error:
raise ValueError('Invalid PNG file') # depends on [control=['except'], data=[]]
chunkOffset += dataSize + 12
fhandle.seek(chunkOffset)
chunk = fhandle.read(17) # depends on [control=['while'], data=[]] # depends on [control=['if'], data=[]]
# handle JPEGs
elif size >= 2 and head.startswith(b'\xff\xd8'):
try:
fhandle.seek(0) # Read 0xff next
size = 2
ftype = 0
while not 192 <= ftype <= 207:
if ftype == 224: # APP0 marker
fhandle.seek(7, 1)
(unit, xDensity, yDensity) = struct.unpack('>BHH', fhandle.read(5))
if unit == 1 or unit == 0:
xDPI = xDensity
yDPI = yDensity # depends on [control=['if'], data=[]]
elif unit == 2:
xDPI = _convertToDPI(xDensity, _UNIT_CM)
yDPI = _convertToDPI(yDensity, _UNIT_CM) # depends on [control=['if'], data=[]]
break # depends on [control=['if'], data=[]]
fhandle.seek(size, 1)
byte = fhandle.read(1)
while ord(byte) == 255:
byte = fhandle.read(1) # depends on [control=['while'], data=[]]
ftype = ord(byte)
size = struct.unpack('>H', fhandle.read(2))[0] - 2 # depends on [control=['while'], data=[]] # depends on [control=['try'], data=[]]
except struct.error:
raise ValueError('Invalid JPEG file') # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
# handle JPEG2000s
elif size >= 12 and head.startswith(b'\x00\x00\x00\x0cjP \r\n\x87\n'):
fhandle.seek(32)
# skip JP2 image header box
headerSize = struct.unpack('>L', fhandle.read(4))[0] - 8
fhandle.seek(4, 1)
foundResBox = False
try:
while headerSize > 0:
print('headerSize', headerSize)
boxHeader = fhandle.read(8)
boxType = boxHeader[4:]
print(boxType)
if boxType == 'res ': # find resolution super box
foundResBox = True
headerSize -= 8
print('found res super box')
break # depends on [control=['if'], data=[]]
print('@1', boxHeader)
(boxSize,) = struct.unpack('>L', boxHeader[:4])
print('boxSize', boxSize)
fhandle.seek(boxSize - 8, 1)
headerSize -= boxSize # depends on [control=['while'], data=['headerSize']]
if foundResBox:
while headerSize > 0:
boxHeader = fhandle.read(8)
boxType = boxHeader[4:]
print(boxType)
if boxType == 'resd': # Display resolution box
print('@2')
(yDensity, xDensity, yUnit, xUnit) = struct.unpack('>HHBB', fhandle.read(10))
xDPI = _convertToDPI(xDensity, xUnit)
yDPI = _convertToDPI(yDensity, yUnit)
break # depends on [control=['if'], data=[]]
(boxSize,) = struct.unpack('>L', boxHeader[:4])
print('boxSize', boxSize)
fhandle.seek(boxSize - 8, 1)
headerSize -= boxSize # depends on [control=['while'], data=['headerSize']] # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except struct.error as e:
print(e)
raise ValueError('Invalid JPEG2000 file') # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=[]] # depends on [control=['with'], data=['fhandle']]
return (xDPI, yDPI) |
def rollforward(self, date):
"""Roll date forward to nearest start of quarter"""
if self.onOffset(date):
return date
else:
return date + QuarterBegin(month=self.month) | def function[rollforward, parameter[self, date]]:
constant[Roll date forward to nearest start of quarter]
if call[name[self].onOffset, parameter[name[date]]] begin[:]
return[name[date]] | keyword[def] identifier[rollforward] ( identifier[self] , identifier[date] ):
literal[string]
keyword[if] identifier[self] . identifier[onOffset] ( identifier[date] ):
keyword[return] identifier[date]
keyword[else] :
keyword[return] identifier[date] + identifier[QuarterBegin] ( identifier[month] = identifier[self] . identifier[month] ) | def rollforward(self, date):
"""Roll date forward to nearest start of quarter"""
if self.onOffset(date):
return date # depends on [control=['if'], data=[]]
else:
return date + QuarterBegin(month=self.month) |
def do_gc(self, args):
"""gc - print out garbage collection information"""
### humm...
instance_type = getattr(types, 'InstanceType', object)
# snapshot of counts
type2count = {}
type2all = {}
for o in gc.get_objects():
if type(o) == instance_type:
type2count[o.__class__] = type2count.get(o.__class__,0) + 1
type2all[o.__class__] = type2all.get(o.__class__,0) + sys.getrefcount(o)
# count the things that have changed
ct = [ ( t.__module__
, t.__name__
, type2count[t]
, type2count[t] - self.type2count.get(t,0)
, type2all[t] - self.type2all.get(t,0)
) for t in type2count.iterkeys()
]
# ready for the next time
self.type2count = type2count
self.type2all = type2all
fmt = "%-30s %-30s %6s %6s %6s\n"
self.stdout.write(fmt % ("Module", "Type", "Count", "dCount", "dRef"))
# sorted by count
ct.sort(lambda x, y: cmp(y[2], x[2]))
for i in range(min(10,len(ct))):
m, n, c, delta1, delta2 = ct[i]
self.stdout.write(fmt % (m, n, c, delta1, delta2))
self.stdout.write("\n")
self.stdout.write(fmt % ("Module", "Type", "Count", "dCount", "dRef"))
# sorted by module and class
ct.sort()
for m, n, c, delta1, delta2 in ct:
if delta1 or delta2:
self.stdout.write(fmt % (m, n, c, delta1, delta2))
self.stdout.write("\n") | def function[do_gc, parameter[self, args]]:
constant[gc - print out garbage collection information]
variable[instance_type] assign[=] call[name[getattr], parameter[name[types], constant[InstanceType], name[object]]]
variable[type2count] assign[=] dictionary[[], []]
variable[type2all] assign[=] dictionary[[], []]
for taget[name[o]] in starred[call[name[gc].get_objects, parameter[]]] begin[:]
if compare[call[name[type], parameter[name[o]]] equal[==] name[instance_type]] begin[:]
call[name[type2count]][name[o].__class__] assign[=] binary_operation[call[name[type2count].get, parameter[name[o].__class__, constant[0]]] + constant[1]]
call[name[type2all]][name[o].__class__] assign[=] binary_operation[call[name[type2all].get, parameter[name[o].__class__, constant[0]]] + call[name[sys].getrefcount, parameter[name[o]]]]
variable[ct] assign[=] <ast.ListComp object at 0x7da1b084edd0>
name[self].type2count assign[=] name[type2count]
name[self].type2all assign[=] name[type2all]
variable[fmt] assign[=] constant[%-30s %-30s %6s %6s %6s
]
call[name[self].stdout.write, parameter[binary_operation[name[fmt] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Constant object at 0x7da1b084d840>, <ast.Constant object at 0x7da1b084f3a0>, <ast.Constant object at 0x7da1b084fd60>, <ast.Constant object at 0x7da1b084e980>, <ast.Constant object at 0x7da1b084dc60>]]]]]
call[name[ct].sort, parameter[<ast.Lambda object at 0x7da1b084dcc0>]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[min], parameter[constant[10], call[name[len], parameter[name[ct]]]]]]]] begin[:]
<ast.Tuple object at 0x7da1b084ddb0> assign[=] call[name[ct]][name[i]]
call[name[self].stdout.write, parameter[binary_operation[name[fmt] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b084d180>, <ast.Name object at 0x7da1b084d420>, <ast.Name object at 0x7da1b084f070>, <ast.Name object at 0x7da1b084ead0>, <ast.Name object at 0x7da1b084ff40>]]]]]
call[name[self].stdout.write, parameter[constant[
]]]
call[name[self].stdout.write, parameter[binary_operation[name[fmt] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Constant object at 0x7da1b084fbe0>, <ast.Constant object at 0x7da1b084d210>, <ast.Constant object at 0x7da1b084f4c0>, <ast.Constant object at 0x7da1b084d240>, <ast.Constant object at 0x7da1b084d780>]]]]]
call[name[ct].sort, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b084df90>, <ast.Name object at 0x7da1b084ea70>, <ast.Name object at 0x7da1b084e4a0>, <ast.Name object at 0x7da1b084ed70>, <ast.Name object at 0x7da1b084d0c0>]]] in starred[name[ct]] begin[:]
if <ast.BoolOp object at 0x7da1b084e2f0> begin[:]
call[name[self].stdout.write, parameter[binary_operation[name[fmt] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b084e0e0>, <ast.Name object at 0x7da1b084ce80>, <ast.Name object at 0x7da1b084dcf0>, <ast.Name object at 0x7da1b084f7f0>, <ast.Name object at 0x7da1b084f9a0>]]]]]
call[name[self].stdout.write, parameter[constant[
]]] | keyword[def] identifier[do_gc] ( identifier[self] , identifier[args] ):
literal[string]
identifier[instance_type] = identifier[getattr] ( identifier[types] , literal[string] , identifier[object] )
identifier[type2count] ={}
identifier[type2all] ={}
keyword[for] identifier[o] keyword[in] identifier[gc] . identifier[get_objects] ():
keyword[if] identifier[type] ( identifier[o] )== identifier[instance_type] :
identifier[type2count] [ identifier[o] . identifier[__class__] ]= identifier[type2count] . identifier[get] ( identifier[o] . identifier[__class__] , literal[int] )+ literal[int]
identifier[type2all] [ identifier[o] . identifier[__class__] ]= identifier[type2all] . identifier[get] ( identifier[o] . identifier[__class__] , literal[int] )+ identifier[sys] . identifier[getrefcount] ( identifier[o] )
identifier[ct] =[( identifier[t] . identifier[__module__]
, identifier[t] . identifier[__name__]
, identifier[type2count] [ identifier[t] ]
, identifier[type2count] [ identifier[t] ]- identifier[self] . identifier[type2count] . identifier[get] ( identifier[t] , literal[int] )
, identifier[type2all] [ identifier[t] ]- identifier[self] . identifier[type2all] . identifier[get] ( identifier[t] , literal[int] )
) keyword[for] identifier[t] keyword[in] identifier[type2count] . identifier[iterkeys] ()
]
identifier[self] . identifier[type2count] = identifier[type2count]
identifier[self] . identifier[type2all] = identifier[type2all]
identifier[fmt] = literal[string]
identifier[self] . identifier[stdout] . identifier[write] ( identifier[fmt] %( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ))
identifier[ct] . identifier[sort] ( keyword[lambda] identifier[x] , identifier[y] : identifier[cmp] ( identifier[y] [ literal[int] ], identifier[x] [ literal[int] ]))
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[min] ( literal[int] , identifier[len] ( identifier[ct] ))):
identifier[m] , identifier[n] , identifier[c] , identifier[delta1] , identifier[delta2] = identifier[ct] [ identifier[i] ]
identifier[self] . identifier[stdout] . identifier[write] ( identifier[fmt] %( identifier[m] , identifier[n] , identifier[c] , identifier[delta1] , identifier[delta2] ))
identifier[self] . identifier[stdout] . identifier[write] ( literal[string] )
identifier[self] . identifier[stdout] . identifier[write] ( identifier[fmt] %( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ))
identifier[ct] . identifier[sort] ()
keyword[for] identifier[m] , identifier[n] , identifier[c] , identifier[delta1] , identifier[delta2] keyword[in] identifier[ct] :
keyword[if] identifier[delta1] keyword[or] identifier[delta2] :
identifier[self] . identifier[stdout] . identifier[write] ( identifier[fmt] %( identifier[m] , identifier[n] , identifier[c] , identifier[delta1] , identifier[delta2] ))
identifier[self] . identifier[stdout] . identifier[write] ( literal[string] ) | def do_gc(self, args):
"""gc - print out garbage collection information"""
### humm...
instance_type = getattr(types, 'InstanceType', object)
# snapshot of counts
type2count = {}
type2all = {}
for o in gc.get_objects():
if type(o) == instance_type:
type2count[o.__class__] = type2count.get(o.__class__, 0) + 1
type2all[o.__class__] = type2all.get(o.__class__, 0) + sys.getrefcount(o) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['o']]
# count the things that have changed
ct = [(t.__module__, t.__name__, type2count[t], type2count[t] - self.type2count.get(t, 0), type2all[t] - self.type2all.get(t, 0)) for t in type2count.iterkeys()]
# ready for the next time
self.type2count = type2count
self.type2all = type2all
fmt = '%-30s %-30s %6s %6s %6s\n'
self.stdout.write(fmt % ('Module', 'Type', 'Count', 'dCount', 'dRef'))
# sorted by count
ct.sort(lambda x, y: cmp(y[2], x[2]))
for i in range(min(10, len(ct))):
(m, n, c, delta1, delta2) = ct[i]
self.stdout.write(fmt % (m, n, c, delta1, delta2)) # depends on [control=['for'], data=['i']]
self.stdout.write('\n')
self.stdout.write(fmt % ('Module', 'Type', 'Count', 'dCount', 'dRef'))
# sorted by module and class
ct.sort()
for (m, n, c, delta1, delta2) in ct:
if delta1 or delta2:
self.stdout.write(fmt % (m, n, c, delta1, delta2)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
self.stdout.write('\n') |
def _generate_create_dict(self,
size=None,
hostname=None,
domain=None,
location=None,
os=None,
port_speed=None,
ssh_keys=None,
post_uri=None,
hourly=True,
no_public=False,
extras=None):
"""Translates arguments into a dictionary for creating a server."""
extras = extras or []
package = self._get_package()
location = _get_location(package, location)
prices = []
for category in ['pri_ip_addresses',
'vpn_management',
'remote_management']:
prices.append(_get_default_price_id(package['items'],
option=category,
hourly=hourly,
location=location))
prices.append(_get_os_price_id(package['items'], os,
location=location))
prices.append(_get_bandwidth_price_id(package['items'],
hourly=hourly,
no_public=no_public,
location=location))
prices.append(_get_port_speed_price_id(package['items'],
port_speed,
no_public,
location=location))
for extra in extras:
prices.append(_get_extra_price_id(package['items'],
extra, hourly,
location=location))
hardware = {
'hostname': hostname,
'domain': domain,
}
order = {
'hardware': [hardware],
'location': location['keyname'],
'prices': [{'id': price} for price in prices],
'packageId': package['id'],
'presetId': _get_preset_id(package, size),
'useHourlyPricing': hourly,
}
if post_uri:
order['provisionScripts'] = [post_uri]
if ssh_keys:
order['sshKeys'] = [{'sshKeyIds': ssh_keys}]
return order | def function[_generate_create_dict, parameter[self, size, hostname, domain, location, os, port_speed, ssh_keys, post_uri, hourly, no_public, extras]]:
constant[Translates arguments into a dictionary for creating a server.]
variable[extras] assign[=] <ast.BoolOp object at 0x7da18bcca3e0>
variable[package] assign[=] call[name[self]._get_package, parameter[]]
variable[location] assign[=] call[name[_get_location], parameter[name[package], name[location]]]
variable[prices] assign[=] list[[]]
for taget[name[category]] in starred[list[[<ast.Constant object at 0x7da18bcc92d0>, <ast.Constant object at 0x7da18bcca9b0>, <ast.Constant object at 0x7da18bcca500>]]] begin[:]
call[name[prices].append, parameter[call[name[_get_default_price_id], parameter[call[name[package]][constant[items]]]]]]
call[name[prices].append, parameter[call[name[_get_os_price_id], parameter[call[name[package]][constant[items]], name[os]]]]]
call[name[prices].append, parameter[call[name[_get_bandwidth_price_id], parameter[call[name[package]][constant[items]]]]]]
call[name[prices].append, parameter[call[name[_get_port_speed_price_id], parameter[call[name[package]][constant[items]], name[port_speed], name[no_public]]]]]
for taget[name[extra]] in starred[name[extras]] begin[:]
call[name[prices].append, parameter[call[name[_get_extra_price_id], parameter[call[name[package]][constant[items]], name[extra], name[hourly]]]]]
variable[hardware] assign[=] dictionary[[<ast.Constant object at 0x7da18bcc8310>, <ast.Constant object at 0x7da18bccbc10>], [<ast.Name object at 0x7da18bcc98a0>, <ast.Name object at 0x7da18bcc8820>]]
variable[order] assign[=] dictionary[[<ast.Constant object at 0x7da18bcc8700>, <ast.Constant object at 0x7da18bccbbb0>, <ast.Constant object at 0x7da18bccae00>, <ast.Constant object at 0x7da18bcca860>, <ast.Constant object at 0x7da18bcc95a0>, <ast.Constant object at 0x7da18bcc81c0>], [<ast.List object at 0x7da18bccab90>, <ast.Subscript object at 0x7da18bcc9a80>, <ast.ListComp object at 0x7da18bcc92a0>, <ast.Subscript object at 0x7da18bccb670>, <ast.Call object at 0x7da18bcc8670>, <ast.Name object at 0x7da207f995a0>]]
if name[post_uri] begin[:]
call[name[order]][constant[provisionScripts]] assign[=] list[[<ast.Name object at 0x7da207f99ff0>]]
if name[ssh_keys] begin[:]
call[name[order]][constant[sshKeys]] assign[=] list[[<ast.Dict object at 0x7da207f9a7d0>]]
return[name[order]] | keyword[def] identifier[_generate_create_dict] ( identifier[self] ,
identifier[size] = keyword[None] ,
identifier[hostname] = keyword[None] ,
identifier[domain] = keyword[None] ,
identifier[location] = keyword[None] ,
identifier[os] = keyword[None] ,
identifier[port_speed] = keyword[None] ,
identifier[ssh_keys] = keyword[None] ,
identifier[post_uri] = keyword[None] ,
identifier[hourly] = keyword[True] ,
identifier[no_public] = keyword[False] ,
identifier[extras] = keyword[None] ):
literal[string]
identifier[extras] = identifier[extras] keyword[or] []
identifier[package] = identifier[self] . identifier[_get_package] ()
identifier[location] = identifier[_get_location] ( identifier[package] , identifier[location] )
identifier[prices] =[]
keyword[for] identifier[category] keyword[in] [ literal[string] ,
literal[string] ,
literal[string] ]:
identifier[prices] . identifier[append] ( identifier[_get_default_price_id] ( identifier[package] [ literal[string] ],
identifier[option] = identifier[category] ,
identifier[hourly] = identifier[hourly] ,
identifier[location] = identifier[location] ))
identifier[prices] . identifier[append] ( identifier[_get_os_price_id] ( identifier[package] [ literal[string] ], identifier[os] ,
identifier[location] = identifier[location] ))
identifier[prices] . identifier[append] ( identifier[_get_bandwidth_price_id] ( identifier[package] [ literal[string] ],
identifier[hourly] = identifier[hourly] ,
identifier[no_public] = identifier[no_public] ,
identifier[location] = identifier[location] ))
identifier[prices] . identifier[append] ( identifier[_get_port_speed_price_id] ( identifier[package] [ literal[string] ],
identifier[port_speed] ,
identifier[no_public] ,
identifier[location] = identifier[location] ))
keyword[for] identifier[extra] keyword[in] identifier[extras] :
identifier[prices] . identifier[append] ( identifier[_get_extra_price_id] ( identifier[package] [ literal[string] ],
identifier[extra] , identifier[hourly] ,
identifier[location] = identifier[location] ))
identifier[hardware] ={
literal[string] : identifier[hostname] ,
literal[string] : identifier[domain] ,
}
identifier[order] ={
literal[string] :[ identifier[hardware] ],
literal[string] : identifier[location] [ literal[string] ],
literal[string] :[{ literal[string] : identifier[price] } keyword[for] identifier[price] keyword[in] identifier[prices] ],
literal[string] : identifier[package] [ literal[string] ],
literal[string] : identifier[_get_preset_id] ( identifier[package] , identifier[size] ),
literal[string] : identifier[hourly] ,
}
keyword[if] identifier[post_uri] :
identifier[order] [ literal[string] ]=[ identifier[post_uri] ]
keyword[if] identifier[ssh_keys] :
identifier[order] [ literal[string] ]=[{ literal[string] : identifier[ssh_keys] }]
keyword[return] identifier[order] | def _generate_create_dict(self, size=None, hostname=None, domain=None, location=None, os=None, port_speed=None, ssh_keys=None, post_uri=None, hourly=True, no_public=False, extras=None):
"""Translates arguments into a dictionary for creating a server."""
extras = extras or []
package = self._get_package()
location = _get_location(package, location)
prices = []
for category in ['pri_ip_addresses', 'vpn_management', 'remote_management']:
prices.append(_get_default_price_id(package['items'], option=category, hourly=hourly, location=location)) # depends on [control=['for'], data=['category']]
prices.append(_get_os_price_id(package['items'], os, location=location))
prices.append(_get_bandwidth_price_id(package['items'], hourly=hourly, no_public=no_public, location=location))
prices.append(_get_port_speed_price_id(package['items'], port_speed, no_public, location=location))
for extra in extras:
prices.append(_get_extra_price_id(package['items'], extra, hourly, location=location)) # depends on [control=['for'], data=['extra']]
hardware = {'hostname': hostname, 'domain': domain}
order = {'hardware': [hardware], 'location': location['keyname'], 'prices': [{'id': price} for price in prices], 'packageId': package['id'], 'presetId': _get_preset_id(package, size), 'useHourlyPricing': hourly}
if post_uri:
order['provisionScripts'] = [post_uri] # depends on [control=['if'], data=[]]
if ssh_keys:
order['sshKeys'] = [{'sshKeyIds': ssh_keys}] # depends on [control=['if'], data=[]]
return order |
def distinfo_dirname(cls, name, version):
"""
The *name* and *version* parameters are converted into their
filename-escaped form, i.e. any ``'-'`` characters are replaced
with ``'_'`` other than the one in ``'dist-info'`` and the one
separating the name from the version number.
:parameter name: is converted to a standard distribution name by replacing
any runs of non- alphanumeric characters with a single
``'-'``.
:type name: string
:parameter version: is converted to a standard version string. Spaces
become dots, and all other non-alphanumeric characters
(except dots) become dashes, with runs of multiple
dashes condensed to a single dash.
:type version: string
:returns: directory name
:rtype: string"""
name = name.replace('-', '_')
return '-'.join([name, version]) + DISTINFO_EXT | def function[distinfo_dirname, parameter[cls, name, version]]:
constant[
The *name* and *version* parameters are converted into their
filename-escaped form, i.e. any ``'-'`` characters are replaced
with ``'_'`` other than the one in ``'dist-info'`` and the one
separating the name from the version number.
:parameter name: is converted to a standard distribution name by replacing
any runs of non- alphanumeric characters with a single
``'-'``.
:type name: string
:parameter version: is converted to a standard version string. Spaces
become dots, and all other non-alphanumeric characters
(except dots) become dashes, with runs of multiple
dashes condensed to a single dash.
:type version: string
:returns: directory name
:rtype: string]
variable[name] assign[=] call[name[name].replace, parameter[constant[-], constant[_]]]
return[binary_operation[call[constant[-].join, parameter[list[[<ast.Name object at 0x7da1b1e8ec80>, <ast.Name object at 0x7da1b1e8c790>]]]] + name[DISTINFO_EXT]]] | keyword[def] identifier[distinfo_dirname] ( identifier[cls] , identifier[name] , identifier[version] ):
literal[string]
identifier[name] = identifier[name] . identifier[replace] ( literal[string] , literal[string] )
keyword[return] literal[string] . identifier[join] ([ identifier[name] , identifier[version] ])+ identifier[DISTINFO_EXT] | def distinfo_dirname(cls, name, version):
"""
The *name* and *version* parameters are converted into their
filename-escaped form, i.e. any ``'-'`` characters are replaced
with ``'_'`` other than the one in ``'dist-info'`` and the one
separating the name from the version number.
:parameter name: is converted to a standard distribution name by replacing
any runs of non- alphanumeric characters with a single
``'-'``.
:type name: string
:parameter version: is converted to a standard version string. Spaces
become dots, and all other non-alphanumeric characters
(except dots) become dashes, with runs of multiple
dashes condensed to a single dash.
:type version: string
:returns: directory name
:rtype: string"""
name = name.replace('-', '_')
return '-'.join([name, version]) + DISTINFO_EXT |
def return_params(islitlet, csu_bar_slit_center, params, parmodel):
"""Return individual model parameters from object of type Parameters.
Parameters
----------
islitlet : int
Number of slitlet.
csu_bar_slit_center : float
CSU bar slit center, in mm.
params : :class:`~lmfit.parameter.Parameters`
Parameters to be employed in the prediction of the distorted
boundaries.
parmodel : str
Model to be assumed. Allowed values are 'longslit' and
'multislit'.
Returns
-------
c2 : float
Coefficient corresponding to the term r**2 in distortion
equation.
c4 : float
Coefficient corresponding to the term r**4 in distortion
equation.
ff : float
Scaling factor to be applied to the Y axis.
slit_gap : float
Slit gap.
slit_height : float
Slit height.
theta0 : float
Additional rotation angle (radians).
x0 : float
X coordinate of reference pixel.
y0 : float
Y coordinate of reference pixel.
y_baseline : float
Y coordinate employed as baseline.
"""
if parmodel == "longslit":
# set each variable in EXPECTED_PARAMETER_LIST to the value
# transferred through 'params'
c2 = params['c2'].value
c4 = params['c4'].value
ff = params['ff'].value
slit_gap = params['slit_gap'].value
slit_height = params['slit_height'].value
theta0_origin = params['theta0_origin'].value
theta0_slope = params['theta0_slope'].value
x0 = params['x0'].value
y0 = params['y0'].value
y_baseline = params['y_baseline'].value
else:
# set each variable in EXPECTED_PARAMETER_LIST_EXTENDED to the value
# transferred through 'params'
c2_a0s = params['c2_a0s'].value
c2_a1s = params['c2_a1s'].value / 1E3
c2_a2s = params['c2_a2s'].value / 1E6
c2 = c2_a0s + \
c2_a1s * csu_bar_slit_center + \
c2_a2s * csu_bar_slit_center ** 2
# ---
c4_a0s = params['c4_a0s'].value
c4_a1s = params['c4_a1s'].value / 1E3
c4_a2s = params['c4_a2s'].value / 1E6
c4 = c4_a0s + \
c4_a1s * csu_bar_slit_center + \
c4_a2s * csu_bar_slit_center ** 2
# ---
ff_a0s = params['ff_a0s'].value
ff_a1s = params['ff_a1s'].value / 1E3
ff_a2s = params['ff_a2s'].value / 1E6
ff = ff_a0s + \
ff_a1s * csu_bar_slit_center + \
ff_a2s * csu_bar_slit_center ** 2
# ---
slit_gap_a0s = params['slit_gap_a0s'].value
slit_gap_a1s = params['slit_gap_a1s'].value / 1E3
slit_gap_a2s = params['slit_gap_a2s'].value / 1E6
slit_gap = slit_gap_a0s + \
slit_gap_a1s * csu_bar_slit_center + \
slit_gap_a2s * csu_bar_slit_center ** 2
# ---
slit_height_a0s = params['slit_height_a0s'].value
slit_height_a1s = params['slit_height_a1s'].value / 1E3
slit_height_a2s = params['slit_height_a2s'].value / 1E6
slit_height = slit_height_a0s + \
slit_height_a1s * csu_bar_slit_center + \
slit_height_a2s * csu_bar_slit_center ** 2
# ---
theta0_origin_a0s = params['theta0_origin_a0s'].value
theta0_origin_a1s = params['theta0_origin_a1s'].value / 1E3
theta0_origin_a2s = params['theta0_origin_a2s'].value / 1E6
theta0_origin = theta0_origin_a0s + \
theta0_origin_a1s * csu_bar_slit_center + \
theta0_origin_a2s * csu_bar_slit_center ** 2
# ---
theta0_slope_a0s = params['theta0_slope_a0s'].value
theta0_slope_a1s = params['theta0_slope_a1s'].value / 1E3
theta0_slope_a2s = params['theta0_slope_a2s'].value / 1E6
theta0_slope = theta0_slope_a0s + \
theta0_slope_a1s * csu_bar_slit_center + \
theta0_slope_a2s * csu_bar_slit_center ** 2
# ---
x0_a0s = params['x0_a0s'].value
x0_a1s = params['x0_a1s'].value / 1E3
x0_a2s = params['x0_a2s'].value / 1E6
x0 = x0_a0s + \
x0_a1s * csu_bar_slit_center + \
x0_a2s * csu_bar_slit_center ** 2
# ---
y0_a0s = params['y0_a0s'].value
y0_a1s = params['y0_a1s'].value / 1E3
y0_a2s = params['y0_a2s'].value / 1E6
y0 = y0_a0s + \
y0_a1s * csu_bar_slit_center + \
y0_a2s * csu_bar_slit_center ** 2
# ---
y_baseline_a0s = params['y_baseline_a0s'].value
y_baseline_a1s = params['y_baseline_a1s'].value / 1E3
y_baseline_a2s = params['y_baseline_a2s'].value / 1E6
y_baseline = y_baseline_a0s + \
y_baseline_a1s * csu_bar_slit_center + \
y_baseline_a2s * csu_bar_slit_center ** 2
theta0 = theta0_origin / 1E3 + theta0_slope / 1E4 * islitlet
return c2, c4, ff, slit_gap, slit_height, theta0, x0, y0, y_baseline | def function[return_params, parameter[islitlet, csu_bar_slit_center, params, parmodel]]:
constant[Return individual model parameters from object of type Parameters.
Parameters
----------
islitlet : int
Number of slitlet.
csu_bar_slit_center : float
CSU bar slit center, in mm.
params : :class:`~lmfit.parameter.Parameters`
Parameters to be employed in the prediction of the distorted
boundaries.
parmodel : str
Model to be assumed. Allowed values are 'longslit' and
'multislit'.
Returns
-------
c2 : float
Coefficient corresponding to the term r**2 in distortion
equation.
c4 : float
Coefficient corresponding to the term r**4 in distortion
equation.
ff : float
Scaling factor to be applied to the Y axis.
slit_gap : float
Slit gap.
slit_height : float
Slit height.
theta0 : float
Additional rotation angle (radians).
x0 : float
X coordinate of reference pixel.
y0 : float
Y coordinate of reference pixel.
y_baseline : float
Y coordinate employed as baseline.
]
if compare[name[parmodel] equal[==] constant[longslit]] begin[:]
variable[c2] assign[=] call[name[params]][constant[c2]].value
variable[c4] assign[=] call[name[params]][constant[c4]].value
variable[ff] assign[=] call[name[params]][constant[ff]].value
variable[slit_gap] assign[=] call[name[params]][constant[slit_gap]].value
variable[slit_height] assign[=] call[name[params]][constant[slit_height]].value
variable[theta0_origin] assign[=] call[name[params]][constant[theta0_origin]].value
variable[theta0_slope] assign[=] call[name[params]][constant[theta0_slope]].value
variable[x0] assign[=] call[name[params]][constant[x0]].value
variable[y0] assign[=] call[name[params]][constant[y0]].value
variable[y_baseline] assign[=] call[name[params]][constant[y_baseline]].value
variable[theta0] assign[=] binary_operation[binary_operation[name[theta0_origin] / constant[1000.0]] + binary_operation[binary_operation[name[theta0_slope] / constant[10000.0]] * name[islitlet]]]
return[tuple[[<ast.Name object at 0x7da18ede4be0>, <ast.Name object at 0x7da18ede4a30>, <ast.Name object at 0x7da18ede6170>, <ast.Name object at 0x7da18ede5ea0>, <ast.Name object at 0x7da18ede6530>, <ast.Name object at 0x7da18ede6140>, <ast.Name object at 0x7da18ede5300>, <ast.Name object at 0x7da18ede7cd0>, <ast.Name object at 0x7da18ede5510>]]] | keyword[def] identifier[return_params] ( identifier[islitlet] , identifier[csu_bar_slit_center] , identifier[params] , identifier[parmodel] ):
literal[string]
keyword[if] identifier[parmodel] == literal[string] :
identifier[c2] = identifier[params] [ literal[string] ]. identifier[value]
identifier[c4] = identifier[params] [ literal[string] ]. identifier[value]
identifier[ff] = identifier[params] [ literal[string] ]. identifier[value]
identifier[slit_gap] = identifier[params] [ literal[string] ]. identifier[value]
identifier[slit_height] = identifier[params] [ literal[string] ]. identifier[value]
identifier[theta0_origin] = identifier[params] [ literal[string] ]. identifier[value]
identifier[theta0_slope] = identifier[params] [ literal[string] ]. identifier[value]
identifier[x0] = identifier[params] [ literal[string] ]. identifier[value]
identifier[y0] = identifier[params] [ literal[string] ]. identifier[value]
identifier[y_baseline] = identifier[params] [ literal[string] ]. identifier[value]
keyword[else] :
identifier[c2_a0s] = identifier[params] [ literal[string] ]. identifier[value]
identifier[c2_a1s] = identifier[params] [ literal[string] ]. identifier[value] / literal[int]
identifier[c2_a2s] = identifier[params] [ literal[string] ]. identifier[value] / literal[int]
identifier[c2] = identifier[c2_a0s] + identifier[c2_a1s] * identifier[csu_bar_slit_center] + identifier[c2_a2s] * identifier[csu_bar_slit_center] ** literal[int]
identifier[c4_a0s] = identifier[params] [ literal[string] ]. identifier[value]
identifier[c4_a1s] = identifier[params] [ literal[string] ]. identifier[value] / literal[int]
identifier[c4_a2s] = identifier[params] [ literal[string] ]. identifier[value] / literal[int]
identifier[c4] = identifier[c4_a0s] + identifier[c4_a1s] * identifier[csu_bar_slit_center] + identifier[c4_a2s] * identifier[csu_bar_slit_center] ** literal[int]
identifier[ff_a0s] = identifier[params] [ literal[string] ]. identifier[value]
identifier[ff_a1s] = identifier[params] [ literal[string] ]. identifier[value] / literal[int]
identifier[ff_a2s] = identifier[params] [ literal[string] ]. identifier[value] / literal[int]
identifier[ff] = identifier[ff_a0s] + identifier[ff_a1s] * identifier[csu_bar_slit_center] + identifier[ff_a2s] * identifier[csu_bar_slit_center] ** literal[int]
identifier[slit_gap_a0s] = identifier[params] [ literal[string] ]. identifier[value]
identifier[slit_gap_a1s] = identifier[params] [ literal[string] ]. identifier[value] / literal[int]
identifier[slit_gap_a2s] = identifier[params] [ literal[string] ]. identifier[value] / literal[int]
identifier[slit_gap] = identifier[slit_gap_a0s] + identifier[slit_gap_a1s] * identifier[csu_bar_slit_center] + identifier[slit_gap_a2s] * identifier[csu_bar_slit_center] ** literal[int]
identifier[slit_height_a0s] = identifier[params] [ literal[string] ]. identifier[value]
identifier[slit_height_a1s] = identifier[params] [ literal[string] ]. identifier[value] / literal[int]
identifier[slit_height_a2s] = identifier[params] [ literal[string] ]. identifier[value] / literal[int]
identifier[slit_height] = identifier[slit_height_a0s] + identifier[slit_height_a1s] * identifier[csu_bar_slit_center] + identifier[slit_height_a2s] * identifier[csu_bar_slit_center] ** literal[int]
identifier[theta0_origin_a0s] = identifier[params] [ literal[string] ]. identifier[value]
identifier[theta0_origin_a1s] = identifier[params] [ literal[string] ]. identifier[value] / literal[int]
identifier[theta0_origin_a2s] = identifier[params] [ literal[string] ]. identifier[value] / literal[int]
identifier[theta0_origin] = identifier[theta0_origin_a0s] + identifier[theta0_origin_a1s] * identifier[csu_bar_slit_center] + identifier[theta0_origin_a2s] * identifier[csu_bar_slit_center] ** literal[int]
identifier[theta0_slope_a0s] = identifier[params] [ literal[string] ]. identifier[value]
identifier[theta0_slope_a1s] = identifier[params] [ literal[string] ]. identifier[value] / literal[int]
identifier[theta0_slope_a2s] = identifier[params] [ literal[string] ]. identifier[value] / literal[int]
identifier[theta0_slope] = identifier[theta0_slope_a0s] + identifier[theta0_slope_a1s] * identifier[csu_bar_slit_center] + identifier[theta0_slope_a2s] * identifier[csu_bar_slit_center] ** literal[int]
identifier[x0_a0s] = identifier[params] [ literal[string] ]. identifier[value]
identifier[x0_a1s] = identifier[params] [ literal[string] ]. identifier[value] / literal[int]
identifier[x0_a2s] = identifier[params] [ literal[string] ]. identifier[value] / literal[int]
identifier[x0] = identifier[x0_a0s] + identifier[x0_a1s] * identifier[csu_bar_slit_center] + identifier[x0_a2s] * identifier[csu_bar_slit_center] ** literal[int]
identifier[y0_a0s] = identifier[params] [ literal[string] ]. identifier[value]
identifier[y0_a1s] = identifier[params] [ literal[string] ]. identifier[value] / literal[int]
identifier[y0_a2s] = identifier[params] [ literal[string] ]. identifier[value] / literal[int]
identifier[y0] = identifier[y0_a0s] + identifier[y0_a1s] * identifier[csu_bar_slit_center] + identifier[y0_a2s] * identifier[csu_bar_slit_center] ** literal[int]
identifier[y_baseline_a0s] = identifier[params] [ literal[string] ]. identifier[value]
identifier[y_baseline_a1s] = identifier[params] [ literal[string] ]. identifier[value] / literal[int]
identifier[y_baseline_a2s] = identifier[params] [ literal[string] ]. identifier[value] / literal[int]
identifier[y_baseline] = identifier[y_baseline_a0s] + identifier[y_baseline_a1s] * identifier[csu_bar_slit_center] + identifier[y_baseline_a2s] * identifier[csu_bar_slit_center] ** literal[int]
identifier[theta0] = identifier[theta0_origin] / literal[int] + identifier[theta0_slope] / literal[int] * identifier[islitlet]
keyword[return] identifier[c2] , identifier[c4] , identifier[ff] , identifier[slit_gap] , identifier[slit_height] , identifier[theta0] , identifier[x0] , identifier[y0] , identifier[y_baseline] | def return_params(islitlet, csu_bar_slit_center, params, parmodel):
"""Return individual model parameters from object of type Parameters.
Parameters
----------
islitlet : int
Number of slitlet.
csu_bar_slit_center : float
CSU bar slit center, in mm.
params : :class:`~lmfit.parameter.Parameters`
Parameters to be employed in the prediction of the distorted
boundaries.
parmodel : str
Model to be assumed. Allowed values are 'longslit' and
'multislit'.
Returns
-------
c2 : float
Coefficient corresponding to the term r**2 in distortion
equation.
c4 : float
Coefficient corresponding to the term r**4 in distortion
equation.
ff : float
Scaling factor to be applied to the Y axis.
slit_gap : float
Slit gap.
slit_height : float
Slit height.
theta0 : float
Additional rotation angle (radians).
x0 : float
X coordinate of reference pixel.
y0 : float
Y coordinate of reference pixel.
y_baseline : float
Y coordinate employed as baseline.
"""
if parmodel == 'longslit':
# set each variable in EXPECTED_PARAMETER_LIST to the value
# transferred through 'params'
c2 = params['c2'].value
c4 = params['c4'].value
ff = params['ff'].value
slit_gap = params['slit_gap'].value
slit_height = params['slit_height'].value
theta0_origin = params['theta0_origin'].value
theta0_slope = params['theta0_slope'].value
x0 = params['x0'].value
y0 = params['y0'].value
y_baseline = params['y_baseline'].value # depends on [control=['if'], data=[]]
else:
# set each variable in EXPECTED_PARAMETER_LIST_EXTENDED to the value
# transferred through 'params'
c2_a0s = params['c2_a0s'].value
c2_a1s = params['c2_a1s'].value / 1000.0
c2_a2s = params['c2_a2s'].value / 1000000.0
c2 = c2_a0s + c2_a1s * csu_bar_slit_center + c2_a2s * csu_bar_slit_center ** 2
# ---
c4_a0s = params['c4_a0s'].value
c4_a1s = params['c4_a1s'].value / 1000.0
c4_a2s = params['c4_a2s'].value / 1000000.0
c4 = c4_a0s + c4_a1s * csu_bar_slit_center + c4_a2s * csu_bar_slit_center ** 2
# ---
ff_a0s = params['ff_a0s'].value
ff_a1s = params['ff_a1s'].value / 1000.0
ff_a2s = params['ff_a2s'].value / 1000000.0
ff = ff_a0s + ff_a1s * csu_bar_slit_center + ff_a2s * csu_bar_slit_center ** 2
# ---
slit_gap_a0s = params['slit_gap_a0s'].value
slit_gap_a1s = params['slit_gap_a1s'].value / 1000.0
slit_gap_a2s = params['slit_gap_a2s'].value / 1000000.0
slit_gap = slit_gap_a0s + slit_gap_a1s * csu_bar_slit_center + slit_gap_a2s * csu_bar_slit_center ** 2
# ---
slit_height_a0s = params['slit_height_a0s'].value
slit_height_a1s = params['slit_height_a1s'].value / 1000.0
slit_height_a2s = params['slit_height_a2s'].value / 1000000.0
slit_height = slit_height_a0s + slit_height_a1s * csu_bar_slit_center + slit_height_a2s * csu_bar_slit_center ** 2
# ---
theta0_origin_a0s = params['theta0_origin_a0s'].value
theta0_origin_a1s = params['theta0_origin_a1s'].value / 1000.0
theta0_origin_a2s = params['theta0_origin_a2s'].value / 1000000.0
theta0_origin = theta0_origin_a0s + theta0_origin_a1s * csu_bar_slit_center + theta0_origin_a2s * csu_bar_slit_center ** 2
# ---
theta0_slope_a0s = params['theta0_slope_a0s'].value
theta0_slope_a1s = params['theta0_slope_a1s'].value / 1000.0
theta0_slope_a2s = params['theta0_slope_a2s'].value / 1000000.0
theta0_slope = theta0_slope_a0s + theta0_slope_a1s * csu_bar_slit_center + theta0_slope_a2s * csu_bar_slit_center ** 2
# ---
x0_a0s = params['x0_a0s'].value
x0_a1s = params['x0_a1s'].value / 1000.0
x0_a2s = params['x0_a2s'].value / 1000000.0
x0 = x0_a0s + x0_a1s * csu_bar_slit_center + x0_a2s * csu_bar_slit_center ** 2
# ---
y0_a0s = params['y0_a0s'].value
y0_a1s = params['y0_a1s'].value / 1000.0
y0_a2s = params['y0_a2s'].value / 1000000.0
y0 = y0_a0s + y0_a1s * csu_bar_slit_center + y0_a2s * csu_bar_slit_center ** 2
# ---
y_baseline_a0s = params['y_baseline_a0s'].value
y_baseline_a1s = params['y_baseline_a1s'].value / 1000.0
y_baseline_a2s = params['y_baseline_a2s'].value / 1000000.0
y_baseline = y_baseline_a0s + y_baseline_a1s * csu_bar_slit_center + y_baseline_a2s * csu_bar_slit_center ** 2
theta0 = theta0_origin / 1000.0 + theta0_slope / 10000.0 * islitlet
return (c2, c4, ff, slit_gap, slit_height, theta0, x0, y0, y_baseline) |
def process_parsed_args(opts: Namespace, error_fun: Optional[Callable], connect: bool=True) -> Namespace:
"""
Set the defaults for the crc and ontology schemas
:param opts: parsed arguments
:param error_fun: Function to call if error
:param connect: actually connect. (For debugging)
:return: namespace with additional elements added
"""
def setdefault(vn: str, default: object) -> None:
assert vn in opts, "Unknown option"
if not getattr(opts, vn):
setattr(opts, vn, default)
if error_fun and \
(getattr(opts, 'dburl') is None or getattr(opts, 'user') is None or getattr(opts, 'password') is None):
error_fun("db url, user id and password must be supplied")
setdefault('crcdb', opts.dburl)
setdefault('crcuser', opts.user)
setdefault('crcpassword', opts.password)
setdefault('ontodb', opts.dburl)
setdefault('ontouser', opts.user)
setdefault('ontopassword', opts.password)
if connect:
opts.tables = I2B2Tables(opts)
# TODO: This approach needs to be re-thought. As i2b2tablenames is a singleton, any changes here
# impact the entire testing harness
if opts.onttable:
i2b2tablenames.ontology_table = opts.onttable
return opts | def function[process_parsed_args, parameter[opts, error_fun, connect]]:
constant[
Set the defaults for the crc and ontology schemas
:param opts: parsed arguments
:param error_fun: Function to call if error
:param connect: actually connect. (For debugging)
:return: namespace with additional elements added
]
def function[setdefault, parameter[vn, default]]:
assert[compare[name[vn] in name[opts]]]
if <ast.UnaryOp object at 0x7da20c6e58d0> begin[:]
call[name[setattr], parameter[name[opts], name[vn], name[default]]]
if <ast.BoolOp object at 0x7da20c6e7730> begin[:]
call[name[error_fun], parameter[constant[db url, user id and password must be supplied]]]
call[name[setdefault], parameter[constant[crcdb], name[opts].dburl]]
call[name[setdefault], parameter[constant[crcuser], name[opts].user]]
call[name[setdefault], parameter[constant[crcpassword], name[opts].password]]
call[name[setdefault], parameter[constant[ontodb], name[opts].dburl]]
call[name[setdefault], parameter[constant[ontouser], name[opts].user]]
call[name[setdefault], parameter[constant[ontopassword], name[opts].password]]
if name[connect] begin[:]
name[opts].tables assign[=] call[name[I2B2Tables], parameter[name[opts]]]
if name[opts].onttable begin[:]
name[i2b2tablenames].ontology_table assign[=] name[opts].onttable
return[name[opts]] | keyword[def] identifier[process_parsed_args] ( identifier[opts] : identifier[Namespace] , identifier[error_fun] : identifier[Optional] [ identifier[Callable] ], identifier[connect] : identifier[bool] = keyword[True] )-> identifier[Namespace] :
literal[string]
keyword[def] identifier[setdefault] ( identifier[vn] : identifier[str] , identifier[default] : identifier[object] )-> keyword[None] :
keyword[assert] identifier[vn] keyword[in] identifier[opts] , literal[string]
keyword[if] keyword[not] identifier[getattr] ( identifier[opts] , identifier[vn] ):
identifier[setattr] ( identifier[opts] , identifier[vn] , identifier[default] )
keyword[if] identifier[error_fun] keyword[and] ( identifier[getattr] ( identifier[opts] , literal[string] ) keyword[is] keyword[None] keyword[or] identifier[getattr] ( identifier[opts] , literal[string] ) keyword[is] keyword[None] keyword[or] identifier[getattr] ( identifier[opts] , literal[string] ) keyword[is] keyword[None] ):
identifier[error_fun] ( literal[string] )
identifier[setdefault] ( literal[string] , identifier[opts] . identifier[dburl] )
identifier[setdefault] ( literal[string] , identifier[opts] . identifier[user] )
identifier[setdefault] ( literal[string] , identifier[opts] . identifier[password] )
identifier[setdefault] ( literal[string] , identifier[opts] . identifier[dburl] )
identifier[setdefault] ( literal[string] , identifier[opts] . identifier[user] )
identifier[setdefault] ( literal[string] , identifier[opts] . identifier[password] )
keyword[if] identifier[connect] :
identifier[opts] . identifier[tables] = identifier[I2B2Tables] ( identifier[opts] )
keyword[if] identifier[opts] . identifier[onttable] :
identifier[i2b2tablenames] . identifier[ontology_table] = identifier[opts] . identifier[onttable]
keyword[return] identifier[opts] | def process_parsed_args(opts: Namespace, error_fun: Optional[Callable], connect: bool=True) -> Namespace:
"""
Set the defaults for the crc and ontology schemas
:param opts: parsed arguments
:param error_fun: Function to call if error
:param connect: actually connect. (For debugging)
:return: namespace with additional elements added
"""
def setdefault(vn: str, default: object) -> None:
assert vn in opts, 'Unknown option'
if not getattr(opts, vn):
setattr(opts, vn, default) # depends on [control=['if'], data=[]]
if error_fun and (getattr(opts, 'dburl') is None or getattr(opts, 'user') is None or getattr(opts, 'password') is None):
error_fun('db url, user id and password must be supplied') # depends on [control=['if'], data=[]]
setdefault('crcdb', opts.dburl)
setdefault('crcuser', opts.user)
setdefault('crcpassword', opts.password)
setdefault('ontodb', opts.dburl)
setdefault('ontouser', opts.user)
setdefault('ontopassword', opts.password)
if connect:
opts.tables = I2B2Tables(opts) # depends on [control=['if'], data=[]]
# TODO: This approach needs to be re-thought. As i2b2tablenames is a singleton, any changes here
# impact the entire testing harness
if opts.onttable:
i2b2tablenames.ontology_table = opts.onttable # depends on [control=['if'], data=[]]
return opts |
def register(name=''):
"For backwards compatibility, we support @register(name) syntax."
def reg(widget):
"""A decorator registering a widget class in the widget registry."""
w = widget.class_traits()
Widget.widget_types.register(w['_model_module'].default_value,
w['_model_module_version'].default_value,
w['_model_name'].default_value,
w['_view_module'].default_value,
w['_view_module_version'].default_value,
w['_view_name'].default_value,
widget)
return widget
if isinstance(name, string_types):
import warnings
warnings.warn("Widget registration using a string name has been deprecated. Widget registration now uses a plain `@register` decorator.", DeprecationWarning)
return reg
else:
return reg(name) | def function[register, parameter[name]]:
constant[For backwards compatibility, we support @register(name) syntax.]
def function[reg, parameter[widget]]:
constant[A decorator registering a widget class in the widget registry.]
variable[w] assign[=] call[name[widget].class_traits, parameter[]]
call[name[Widget].widget_types.register, parameter[call[name[w]][constant[_model_module]].default_value, call[name[w]][constant[_model_module_version]].default_value, call[name[w]][constant[_model_name]].default_value, call[name[w]][constant[_view_module]].default_value, call[name[w]][constant[_view_module_version]].default_value, call[name[w]][constant[_view_name]].default_value, name[widget]]]
return[name[widget]]
if call[name[isinstance], parameter[name[name], name[string_types]]] begin[:]
import module[warnings]
call[name[warnings].warn, parameter[constant[Widget registration using a string name has been deprecated. Widget registration now uses a plain `@register` decorator.], name[DeprecationWarning]]]
return[name[reg]] | keyword[def] identifier[register] ( identifier[name] = literal[string] ):
literal[string]
keyword[def] identifier[reg] ( identifier[widget] ):
literal[string]
identifier[w] = identifier[widget] . identifier[class_traits] ()
identifier[Widget] . identifier[widget_types] . identifier[register] ( identifier[w] [ literal[string] ]. identifier[default_value] ,
identifier[w] [ literal[string] ]. identifier[default_value] ,
identifier[w] [ literal[string] ]. identifier[default_value] ,
identifier[w] [ literal[string] ]. identifier[default_value] ,
identifier[w] [ literal[string] ]. identifier[default_value] ,
identifier[w] [ literal[string] ]. identifier[default_value] ,
identifier[widget] )
keyword[return] identifier[widget]
keyword[if] identifier[isinstance] ( identifier[name] , identifier[string_types] ):
keyword[import] identifier[warnings]
identifier[warnings] . identifier[warn] ( literal[string] , identifier[DeprecationWarning] )
keyword[return] identifier[reg]
keyword[else] :
keyword[return] identifier[reg] ( identifier[name] ) | def register(name=''):
"""For backwards compatibility, we support @register(name) syntax."""
def reg(widget):
"""A decorator registering a widget class in the widget registry."""
w = widget.class_traits()
Widget.widget_types.register(w['_model_module'].default_value, w['_model_module_version'].default_value, w['_model_name'].default_value, w['_view_module'].default_value, w['_view_module_version'].default_value, w['_view_name'].default_value, widget)
return widget
if isinstance(name, string_types):
import warnings
warnings.warn('Widget registration using a string name has been deprecated. Widget registration now uses a plain `@register` decorator.', DeprecationWarning)
return reg # depends on [control=['if'], data=[]]
else:
return reg(name) |
def close(self):
'''close the graph'''
self.close_graph.set()
if self.is_alive():
self.child.join(2) | def function[close, parameter[self]]:
constant[close the graph]
call[name[self].close_graph.set, parameter[]]
if call[name[self].is_alive, parameter[]] begin[:]
call[name[self].child.join, parameter[constant[2]]] | keyword[def] identifier[close] ( identifier[self] ):
literal[string]
identifier[self] . identifier[close_graph] . identifier[set] ()
keyword[if] identifier[self] . identifier[is_alive] ():
identifier[self] . identifier[child] . identifier[join] ( literal[int] ) | def close(self):
"""close the graph"""
self.close_graph.set()
if self.is_alive():
self.child.join(2) # depends on [control=['if'], data=[]] |
def modularity_und_sign(W, ci, qtype='sta'):
'''
This function simply calculates the signed modularity for a given
partition. It does not do automatic partition generation right now.
Parameters
----------
W : NxN np.ndarray
undirected weighted/binary connection matrix with positive and
negative weights
ci : Nx1 np.ndarray
community partition
qtype : str
modularity type. Can be 'sta' (default), 'pos', 'smp', 'gja', 'neg'.
See Rubinov and Sporns (2011) for a description.
Returns
-------
ci : Nx1 np.ndarray
the partition which was input (for consistency of the API)
Q : float
maximized modularity metric
Notes
-----
uses a deterministic algorithm
'''
n = len(W)
_, ci = np.unique(ci, return_inverse=True)
ci += 1
W0 = W * (W > 0) # positive weights matrix
W1 = -W * (W < 0) # negative weights matrix
s0 = np.sum(W0) # positive sum of weights
s1 = np.sum(W1) # negative sum of weights
Knm0 = np.zeros((n, n)) # positive node-to-module degree
Knm1 = np.zeros((n, n)) # negative node-to-module degree
for m in range(int(np.max(ci))): # loop over initial modules
Knm0[:, m] = np.sum(W0[:, ci == m + 1], axis=1)
Knm1[:, m] = np.sum(W1[:, ci == m + 1], axis=1)
Kn0 = np.sum(Knm0, axis=1) # positive node degree
Kn1 = np.sum(Knm1, axis=1) # negative node degree
Km0 = np.sum(Knm0, axis=0) # positive module degree
Km1 = np.sum(Knm1, axis=0) # negaitve module degree
if qtype == 'smp':
d0 = 1 / s0
d1 = 1 / s1 # dQ=dQ0/s0-dQ1/s1
elif qtype == 'gja':
d0 = 1 / (s0 + s1)
d1 = 1 / (s0 + s1) # dQ=(dQ0-dQ1)/(s0+s1)
elif qtype == 'sta':
d0 = 1 / s0
d1 = 1 / (s0 + s1) # dQ=dQ0/s0-dQ1/(s0+s1)
elif qtype == 'pos':
d0 = 1 / s0
d1 = 0 # dQ=dQ0/s0
elif qtype == 'neg':
d0 = 0
d1 = 1 / s1 # dQ=-dQ1/s1
else:
raise KeyError('modularity type unknown')
if not s0: # adjust for absent positive weights
s0 = 1
d0 = 0
if not s1: # adjust for absent negative weights
s1 = 1
d1 = 0
m = np.tile(ci, (n, 1))
q0 = (W0 - np.outer(Kn0, Kn0) / s0) * (m == m.T)
q1 = (W1 - np.outer(Kn1, Kn1) / s1) * (m == m.T)
q = d0 * np.sum(q0) - d1 * np.sum(q1)
return ci, q | def function[modularity_und_sign, parameter[W, ci, qtype]]:
constant[
This function simply calculates the signed modularity for a given
partition. It does not do automatic partition generation right now.
Parameters
----------
W : NxN np.ndarray
undirected weighted/binary connection matrix with positive and
negative weights
ci : Nx1 np.ndarray
community partition
qtype : str
modularity type. Can be 'sta' (default), 'pos', 'smp', 'gja', 'neg'.
See Rubinov and Sporns (2011) for a description.
Returns
-------
ci : Nx1 np.ndarray
the partition which was input (for consistency of the API)
Q : float
maximized modularity metric
Notes
-----
uses a deterministic algorithm
]
variable[n] assign[=] call[name[len], parameter[name[W]]]
<ast.Tuple object at 0x7da1b0830790> assign[=] call[name[np].unique, parameter[name[ci]]]
<ast.AugAssign object at 0x7da1b08328c0>
variable[W0] assign[=] binary_operation[name[W] * compare[name[W] greater[>] constant[0]]]
variable[W1] assign[=] binary_operation[<ast.UnaryOp object at 0x7da1b08306a0> * compare[name[W] less[<] constant[0]]]
variable[s0] assign[=] call[name[np].sum, parameter[name[W0]]]
variable[s1] assign[=] call[name[np].sum, parameter[name[W1]]]
variable[Knm0] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da1b0830730>, <ast.Name object at 0x7da1b0830100>]]]]
variable[Knm1] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da1b0832ec0>, <ast.Name object at 0x7da1b0833fa0>]]]]
for taget[name[m]] in starred[call[name[range], parameter[call[name[int], parameter[call[name[np].max, parameter[name[ci]]]]]]]] begin[:]
call[name[Knm0]][tuple[[<ast.Slice object at 0x7da1b07042e0>, <ast.Name object at 0x7da1b07042b0>]]] assign[=] call[name[np].sum, parameter[call[name[W0]][tuple[[<ast.Slice object at 0x7da1b0704130>, <ast.Compare object at 0x7da1b0704100>]]]]]
call[name[Knm1]][tuple[[<ast.Slice object at 0x7da1b0704820>, <ast.Name object at 0x7da1b07047f0>]]] assign[=] call[name[np].sum, parameter[call[name[W1]][tuple[[<ast.Slice object at 0x7da1b07040d0>, <ast.Compare object at 0x7da1b0882290>]]]]]
variable[Kn0] assign[=] call[name[np].sum, parameter[name[Knm0]]]
variable[Kn1] assign[=] call[name[np].sum, parameter[name[Knm1]]]
variable[Km0] assign[=] call[name[np].sum, parameter[name[Knm0]]]
variable[Km1] assign[=] call[name[np].sum, parameter[name[Knm1]]]
if compare[name[qtype] equal[==] constant[smp]] begin[:]
variable[d0] assign[=] binary_operation[constant[1] / name[s0]]
variable[d1] assign[=] binary_operation[constant[1] / name[s1]]
if <ast.UnaryOp object at 0x7da1b07943d0> begin[:]
variable[s0] assign[=] constant[1]
variable[d0] assign[=] constant[0]
if <ast.UnaryOp object at 0x7da1b07956f0> begin[:]
variable[s1] assign[=] constant[1]
variable[d1] assign[=] constant[0]
variable[m] assign[=] call[name[np].tile, parameter[name[ci], tuple[[<ast.Name object at 0x7da1b0795030>, <ast.Constant object at 0x7da1b0794fa0>]]]]
variable[q0] assign[=] binary_operation[binary_operation[name[W0] - binary_operation[call[name[np].outer, parameter[name[Kn0], name[Kn0]]] / name[s0]]] * compare[name[m] equal[==] name[m].T]]
variable[q1] assign[=] binary_operation[binary_operation[name[W1] - binary_operation[call[name[np].outer, parameter[name[Kn1], name[Kn1]]] / name[s1]]] * compare[name[m] equal[==] name[m].T]]
variable[q] assign[=] binary_operation[binary_operation[name[d0] * call[name[np].sum, parameter[name[q0]]]] - binary_operation[name[d1] * call[name[np].sum, parameter[name[q1]]]]]
return[tuple[[<ast.Name object at 0x7da1b0795360>, <ast.Name object at 0x7da1b0795390>]]] | keyword[def] identifier[modularity_und_sign] ( identifier[W] , identifier[ci] , identifier[qtype] = literal[string] ):
literal[string]
identifier[n] = identifier[len] ( identifier[W] )
identifier[_] , identifier[ci] = identifier[np] . identifier[unique] ( identifier[ci] , identifier[return_inverse] = keyword[True] )
identifier[ci] += literal[int]
identifier[W0] = identifier[W] *( identifier[W] > literal[int] )
identifier[W1] =- identifier[W] *( identifier[W] < literal[int] )
identifier[s0] = identifier[np] . identifier[sum] ( identifier[W0] )
identifier[s1] = identifier[np] . identifier[sum] ( identifier[W1] )
identifier[Knm0] = identifier[np] . identifier[zeros] (( identifier[n] , identifier[n] ))
identifier[Knm1] = identifier[np] . identifier[zeros] (( identifier[n] , identifier[n] ))
keyword[for] identifier[m] keyword[in] identifier[range] ( identifier[int] ( identifier[np] . identifier[max] ( identifier[ci] ))):
identifier[Knm0] [:, identifier[m] ]= identifier[np] . identifier[sum] ( identifier[W0] [:, identifier[ci] == identifier[m] + literal[int] ], identifier[axis] = literal[int] )
identifier[Knm1] [:, identifier[m] ]= identifier[np] . identifier[sum] ( identifier[W1] [:, identifier[ci] == identifier[m] + literal[int] ], identifier[axis] = literal[int] )
identifier[Kn0] = identifier[np] . identifier[sum] ( identifier[Knm0] , identifier[axis] = literal[int] )
identifier[Kn1] = identifier[np] . identifier[sum] ( identifier[Knm1] , identifier[axis] = literal[int] )
identifier[Km0] = identifier[np] . identifier[sum] ( identifier[Knm0] , identifier[axis] = literal[int] )
identifier[Km1] = identifier[np] . identifier[sum] ( identifier[Knm1] , identifier[axis] = literal[int] )
keyword[if] identifier[qtype] == literal[string] :
identifier[d0] = literal[int] / identifier[s0]
identifier[d1] = literal[int] / identifier[s1]
keyword[elif] identifier[qtype] == literal[string] :
identifier[d0] = literal[int] /( identifier[s0] + identifier[s1] )
identifier[d1] = literal[int] /( identifier[s0] + identifier[s1] )
keyword[elif] identifier[qtype] == literal[string] :
identifier[d0] = literal[int] / identifier[s0]
identifier[d1] = literal[int] /( identifier[s0] + identifier[s1] )
keyword[elif] identifier[qtype] == literal[string] :
identifier[d0] = literal[int] / identifier[s0]
identifier[d1] = literal[int]
keyword[elif] identifier[qtype] == literal[string] :
identifier[d0] = literal[int]
identifier[d1] = literal[int] / identifier[s1]
keyword[else] :
keyword[raise] identifier[KeyError] ( literal[string] )
keyword[if] keyword[not] identifier[s0] :
identifier[s0] = literal[int]
identifier[d0] = literal[int]
keyword[if] keyword[not] identifier[s1] :
identifier[s1] = literal[int]
identifier[d1] = literal[int]
identifier[m] = identifier[np] . identifier[tile] ( identifier[ci] ,( identifier[n] , literal[int] ))
identifier[q0] =( identifier[W0] - identifier[np] . identifier[outer] ( identifier[Kn0] , identifier[Kn0] )/ identifier[s0] )*( identifier[m] == identifier[m] . identifier[T] )
identifier[q1] =( identifier[W1] - identifier[np] . identifier[outer] ( identifier[Kn1] , identifier[Kn1] )/ identifier[s1] )*( identifier[m] == identifier[m] . identifier[T] )
identifier[q] = identifier[d0] * identifier[np] . identifier[sum] ( identifier[q0] )- identifier[d1] * identifier[np] . identifier[sum] ( identifier[q1] )
keyword[return] identifier[ci] , identifier[q] | def modularity_und_sign(W, ci, qtype='sta'):
"""
This function simply calculates the signed modularity for a given
partition. It does not do automatic partition generation right now.
Parameters
----------
W : NxN np.ndarray
undirected weighted/binary connection matrix with positive and
negative weights
ci : Nx1 np.ndarray
community partition
qtype : str
modularity type. Can be 'sta' (default), 'pos', 'smp', 'gja', 'neg'.
See Rubinov and Sporns (2011) for a description.
Returns
-------
ci : Nx1 np.ndarray
the partition which was input (for consistency of the API)
Q : float
maximized modularity metric
Notes
-----
uses a deterministic algorithm
"""
n = len(W)
(_, ci) = np.unique(ci, return_inverse=True)
ci += 1
W0 = W * (W > 0) # positive weights matrix
W1 = -W * (W < 0) # negative weights matrix
s0 = np.sum(W0) # positive sum of weights
s1 = np.sum(W1) # negative sum of weights
Knm0 = np.zeros((n, n)) # positive node-to-module degree
Knm1 = np.zeros((n, n)) # negative node-to-module degree
for m in range(int(np.max(ci))): # loop over initial modules
Knm0[:, m] = np.sum(W0[:, ci == m + 1], axis=1)
Knm1[:, m] = np.sum(W1[:, ci == m + 1], axis=1) # depends on [control=['for'], data=['m']]
Kn0 = np.sum(Knm0, axis=1) # positive node degree
Kn1 = np.sum(Knm1, axis=1) # negative node degree
Km0 = np.sum(Knm0, axis=0) # positive module degree
Km1 = np.sum(Knm1, axis=0) # negaitve module degree
if qtype == 'smp':
d0 = 1 / s0
d1 = 1 / s1 # dQ=dQ0/s0-dQ1/s1 # depends on [control=['if'], data=[]]
elif qtype == 'gja':
d0 = 1 / (s0 + s1)
d1 = 1 / (s0 + s1) # dQ=(dQ0-dQ1)/(s0+s1) # depends on [control=['if'], data=[]]
elif qtype == 'sta':
d0 = 1 / s0
d1 = 1 / (s0 + s1) # dQ=dQ0/s0-dQ1/(s0+s1) # depends on [control=['if'], data=[]]
elif qtype == 'pos':
d0 = 1 / s0
d1 = 0 # dQ=dQ0/s0 # depends on [control=['if'], data=[]]
elif qtype == 'neg':
d0 = 0
d1 = 1 / s1 # dQ=-dQ1/s1 # depends on [control=['if'], data=[]]
else:
raise KeyError('modularity type unknown')
if not s0: # adjust for absent positive weights
s0 = 1
d0 = 0 # depends on [control=['if'], data=[]]
if not s1: # adjust for absent negative weights
s1 = 1
d1 = 0 # depends on [control=['if'], data=[]]
m = np.tile(ci, (n, 1))
q0 = (W0 - np.outer(Kn0, Kn0) / s0) * (m == m.T)
q1 = (W1 - np.outer(Kn1, Kn1) / s1) * (m == m.T)
q = d0 * np.sum(q0) - d1 * np.sum(q1)
return (ci, q) |
def get_xyz(self, xyz_axis=0):
"""Return a vector array of the x, y, and z coordinates.
Parameters
----------
xyz_axis : int, optional
The axis in the final array along which the x, y, z components
should be stored (default: 0).
Returns
-------
xs : `~astropy.units.Quantity`
With dimension 3 along ``xyz_axis``.
"""
# Add new axis in x, y, z so one can concatenate them around it.
# NOTE: just use np.stack once our minimum numpy version is 1.10.
result_ndim = self.ndim + 1
if not -result_ndim <= xyz_axis < result_ndim:
raise IndexError('xyz_axis {0} out of bounds [-{1}, {1})'
.format(xyz_axis, result_ndim))
if xyz_axis < 0:
xyz_axis += result_ndim
# Get components to the same units (very fast for identical units)
# since np.concatenate cannot deal with quantity.
unit = self._x1.unit
sh = self.shape
sh = sh[:xyz_axis] + (1,) + sh[xyz_axis:]
components = [getattr(self, '_'+name).reshape(sh).to(unit).value
for name in self.attr_classes]
xs_value = np.concatenate(components, axis=xyz_axis)
return u.Quantity(xs_value, unit=unit, copy=False) | def function[get_xyz, parameter[self, xyz_axis]]:
constant[Return a vector array of the x, y, and z coordinates.
Parameters
----------
xyz_axis : int, optional
The axis in the final array along which the x, y, z components
should be stored (default: 0).
Returns
-------
xs : `~astropy.units.Quantity`
With dimension 3 along ``xyz_axis``.
]
variable[result_ndim] assign[=] binary_operation[name[self].ndim + constant[1]]
if <ast.UnaryOp object at 0x7da1b0c32260> begin[:]
<ast.Raise object at 0x7da1b0c329b0>
if compare[name[xyz_axis] less[<] constant[0]] begin[:]
<ast.AugAssign object at 0x7da1b0c32920>
variable[unit] assign[=] name[self]._x1.unit
variable[sh] assign[=] name[self].shape
variable[sh] assign[=] binary_operation[binary_operation[call[name[sh]][<ast.Slice object at 0x7da1b0c325f0>] + tuple[[<ast.Constant object at 0x7da1b0d01930>]]] + call[name[sh]][<ast.Slice object at 0x7da1b0d02a70>]]
variable[components] assign[=] <ast.ListComp object at 0x7da1b0d00820>
variable[xs_value] assign[=] call[name[np].concatenate, parameter[name[components]]]
return[call[name[u].Quantity, parameter[name[xs_value]]]] | keyword[def] identifier[get_xyz] ( identifier[self] , identifier[xyz_axis] = literal[int] ):
literal[string]
identifier[result_ndim] = identifier[self] . identifier[ndim] + literal[int]
keyword[if] keyword[not] - identifier[result_ndim] <= identifier[xyz_axis] < identifier[result_ndim] :
keyword[raise] identifier[IndexError] ( literal[string]
. identifier[format] ( identifier[xyz_axis] , identifier[result_ndim] ))
keyword[if] identifier[xyz_axis] < literal[int] :
identifier[xyz_axis] += identifier[result_ndim]
identifier[unit] = identifier[self] . identifier[_x1] . identifier[unit]
identifier[sh] = identifier[self] . identifier[shape]
identifier[sh] = identifier[sh] [: identifier[xyz_axis] ]+( literal[int] ,)+ identifier[sh] [ identifier[xyz_axis] :]
identifier[components] =[ identifier[getattr] ( identifier[self] , literal[string] + identifier[name] ). identifier[reshape] ( identifier[sh] ). identifier[to] ( identifier[unit] ). identifier[value]
keyword[for] identifier[name] keyword[in] identifier[self] . identifier[attr_classes] ]
identifier[xs_value] = identifier[np] . identifier[concatenate] ( identifier[components] , identifier[axis] = identifier[xyz_axis] )
keyword[return] identifier[u] . identifier[Quantity] ( identifier[xs_value] , identifier[unit] = identifier[unit] , identifier[copy] = keyword[False] ) | def get_xyz(self, xyz_axis=0):
"""Return a vector array of the x, y, and z coordinates.
Parameters
----------
xyz_axis : int, optional
The axis in the final array along which the x, y, z components
should be stored (default: 0).
Returns
-------
xs : `~astropy.units.Quantity`
With dimension 3 along ``xyz_axis``.
"""
# Add new axis in x, y, z so one can concatenate them around it.
# NOTE: just use np.stack once our minimum numpy version is 1.10.
result_ndim = self.ndim + 1
if not -result_ndim <= xyz_axis < result_ndim:
raise IndexError('xyz_axis {0} out of bounds [-{1}, {1})'.format(xyz_axis, result_ndim)) # depends on [control=['if'], data=[]]
if xyz_axis < 0:
xyz_axis += result_ndim # depends on [control=['if'], data=['xyz_axis']]
# Get components to the same units (very fast for identical units)
# since np.concatenate cannot deal with quantity.
unit = self._x1.unit
sh = self.shape
sh = sh[:xyz_axis] + (1,) + sh[xyz_axis:]
components = [getattr(self, '_' + name).reshape(sh).to(unit).value for name in self.attr_classes]
xs_value = np.concatenate(components, axis=xyz_axis)
return u.Quantity(xs_value, unit=unit, copy=False) |
def Jz(self,**kwargs):
"""
NAME:
Jz
PURPOSE:
Calculate the vertical action
INPUT:
+scipy.integrate.quad keywords
OUTPUT:
J_z(z,vz)/ro/vc + estimate of the error
HISTORY:
2012-06-01 - Written - Bovy (IAS)
"""
if hasattr(self,'_Jz'):
return self._Jz
zmax= self.calczmax()
if zmax == -9999.99: return nu.array([9999.99,nu.nan])
Ez= calcEz(self._z,self._vz,self._verticalpot)
self._Jz= 2.*integrate.quad(_JzIntegrand,0.,zmax,
args=(Ez,self._verticalpot),
**kwargs)[0]/nu.pi
return self._Jz | def function[Jz, parameter[self]]:
constant[
NAME:
Jz
PURPOSE:
Calculate the vertical action
INPUT:
+scipy.integrate.quad keywords
OUTPUT:
J_z(z,vz)/ro/vc + estimate of the error
HISTORY:
2012-06-01 - Written - Bovy (IAS)
]
if call[name[hasattr], parameter[name[self], constant[_Jz]]] begin[:]
return[name[self]._Jz]
variable[zmax] assign[=] call[name[self].calczmax, parameter[]]
if compare[name[zmax] equal[==] <ast.UnaryOp object at 0x7da1b0e9d480>] begin[:]
return[call[name[nu].array, parameter[list[[<ast.Constant object at 0x7da1b0e9e830>, <ast.Attribute object at 0x7da1b0e9cd90>]]]]]
variable[Ez] assign[=] call[name[calcEz], parameter[name[self]._z, name[self]._vz, name[self]._verticalpot]]
name[self]._Jz assign[=] binary_operation[binary_operation[constant[2.0] * call[call[name[integrate].quad, parameter[name[_JzIntegrand], constant[0.0], name[zmax]]]][constant[0]]] / name[nu].pi]
return[name[self]._Jz] | keyword[def] identifier[Jz] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
keyword[return] identifier[self] . identifier[_Jz]
identifier[zmax] = identifier[self] . identifier[calczmax] ()
keyword[if] identifier[zmax] ==- literal[int] : keyword[return] identifier[nu] . identifier[array] ([ literal[int] , identifier[nu] . identifier[nan] ])
identifier[Ez] = identifier[calcEz] ( identifier[self] . identifier[_z] , identifier[self] . identifier[_vz] , identifier[self] . identifier[_verticalpot] )
identifier[self] . identifier[_Jz] = literal[int] * identifier[integrate] . identifier[quad] ( identifier[_JzIntegrand] , literal[int] , identifier[zmax] ,
identifier[args] =( identifier[Ez] , identifier[self] . identifier[_verticalpot] ),
** identifier[kwargs] )[ literal[int] ]/ identifier[nu] . identifier[pi]
keyword[return] identifier[self] . identifier[_Jz] | def Jz(self, **kwargs):
"""
NAME:
Jz
PURPOSE:
Calculate the vertical action
INPUT:
+scipy.integrate.quad keywords
OUTPUT:
J_z(z,vz)/ro/vc + estimate of the error
HISTORY:
2012-06-01 - Written - Bovy (IAS)
"""
if hasattr(self, '_Jz'):
return self._Jz # depends on [control=['if'], data=[]]
zmax = self.calczmax()
if zmax == -9999.99:
return nu.array([9999.99, nu.nan]) # depends on [control=['if'], data=[]]
Ez = calcEz(self._z, self._vz, self._verticalpot)
self._Jz = 2.0 * integrate.quad(_JzIntegrand, 0.0, zmax, args=(Ez, self._verticalpot), **kwargs)[0] / nu.pi
return self._Jz |
def setCurrentRule( self, rule ):
"""
Sets the current query rule for this widget, updating its widget \
editor if the types do not match.
:param rule | <QueryRule> || None
"""
curr_rule = self.currentRule()
if ( curr_rule == rule ):
return
self._currentRule = rule
curr_op = self.uiOperatorDDL.currentText()
self.uiOperatorDDL.blockSignals(True)
self.uiOperatorDDL.clear()
if ( rule ):
self.uiOperatorDDL.addItems(rule.operators())
index = self.uiOperatorDDL.findText(curr_op)
if ( index != -1 ):
self.uiOperatorDDL.setCurrentIndex(index)
self.uiOperatorDDL.blockSignals(False)
self.updateEditor() | def function[setCurrentRule, parameter[self, rule]]:
constant[
Sets the current query rule for this widget, updating its widget editor if the types do not match.
:param rule | <QueryRule> || None
]
variable[curr_rule] assign[=] call[name[self].currentRule, parameter[]]
if compare[name[curr_rule] equal[==] name[rule]] begin[:]
return[None]
name[self]._currentRule assign[=] name[rule]
variable[curr_op] assign[=] call[name[self].uiOperatorDDL.currentText, parameter[]]
call[name[self].uiOperatorDDL.blockSignals, parameter[constant[True]]]
call[name[self].uiOperatorDDL.clear, parameter[]]
if name[rule] begin[:]
call[name[self].uiOperatorDDL.addItems, parameter[call[name[rule].operators, parameter[]]]]
variable[index] assign[=] call[name[self].uiOperatorDDL.findText, parameter[name[curr_op]]]
if compare[name[index] not_equal[!=] <ast.UnaryOp object at 0x7da20c796a10>] begin[:]
call[name[self].uiOperatorDDL.setCurrentIndex, parameter[name[index]]]
call[name[self].uiOperatorDDL.blockSignals, parameter[constant[False]]]
call[name[self].updateEditor, parameter[]] | keyword[def] identifier[setCurrentRule] ( identifier[self] , identifier[rule] ):
literal[string]
identifier[curr_rule] = identifier[self] . identifier[currentRule] ()
keyword[if] ( identifier[curr_rule] == identifier[rule] ):
keyword[return]
identifier[self] . identifier[_currentRule] = identifier[rule]
identifier[curr_op] = identifier[self] . identifier[uiOperatorDDL] . identifier[currentText] ()
identifier[self] . identifier[uiOperatorDDL] . identifier[blockSignals] ( keyword[True] )
identifier[self] . identifier[uiOperatorDDL] . identifier[clear] ()
keyword[if] ( identifier[rule] ):
identifier[self] . identifier[uiOperatorDDL] . identifier[addItems] ( identifier[rule] . identifier[operators] ())
identifier[index] = identifier[self] . identifier[uiOperatorDDL] . identifier[findText] ( identifier[curr_op] )
keyword[if] ( identifier[index] !=- literal[int] ):
identifier[self] . identifier[uiOperatorDDL] . identifier[setCurrentIndex] ( identifier[index] )
identifier[self] . identifier[uiOperatorDDL] . identifier[blockSignals] ( keyword[False] )
identifier[self] . identifier[updateEditor] () | def setCurrentRule(self, rule):
"""
Sets the current query rule for this widget, updating its widget editor if the types do not match.
:param rule | <QueryRule> || None
"""
curr_rule = self.currentRule()
if curr_rule == rule:
return # depends on [control=['if'], data=[]]
self._currentRule = rule
curr_op = self.uiOperatorDDL.currentText()
self.uiOperatorDDL.blockSignals(True)
self.uiOperatorDDL.clear()
if rule:
self.uiOperatorDDL.addItems(rule.operators())
index = self.uiOperatorDDL.findText(curr_op)
if index != -1:
self.uiOperatorDDL.setCurrentIndex(index) # depends on [control=['if'], data=['index']] # depends on [control=['if'], data=[]]
self.uiOperatorDDL.blockSignals(False)
self.updateEditor() |
def _from_dict(cls, _dict):
"""Initialize a SemanticRolesResultObject object from a json dictionary."""
args = {}
if 'text' in _dict:
args['text'] = _dict.get('text')
if 'keywords' in _dict:
args['keywords'] = [
SemanticRolesKeyword._from_dict(x)
for x in (_dict.get('keywords'))
]
return cls(**args) | def function[_from_dict, parameter[cls, _dict]]:
constant[Initialize a SemanticRolesResultObject object from a json dictionary.]
variable[args] assign[=] dictionary[[], []]
if compare[constant[text] in name[_dict]] begin[:]
call[name[args]][constant[text]] assign[=] call[name[_dict].get, parameter[constant[text]]]
if compare[constant[keywords] in name[_dict]] begin[:]
call[name[args]][constant[keywords]] assign[=] <ast.ListComp object at 0x7da18bcc81c0>
return[call[name[cls], parameter[]]] | keyword[def] identifier[_from_dict] ( identifier[cls] , identifier[_dict] ):
literal[string]
identifier[args] ={}
keyword[if] literal[string] keyword[in] identifier[_dict] :
identifier[args] [ literal[string] ]= identifier[_dict] . identifier[get] ( literal[string] )
keyword[if] literal[string] keyword[in] identifier[_dict] :
identifier[args] [ literal[string] ]=[
identifier[SemanticRolesKeyword] . identifier[_from_dict] ( identifier[x] )
keyword[for] identifier[x] keyword[in] ( identifier[_dict] . identifier[get] ( literal[string] ))
]
keyword[return] identifier[cls] (** identifier[args] ) | def _from_dict(cls, _dict):
"""Initialize a SemanticRolesResultObject object from a json dictionary."""
args = {}
if 'text' in _dict:
args['text'] = _dict.get('text') # depends on [control=['if'], data=['_dict']]
if 'keywords' in _dict:
args['keywords'] = [SemanticRolesKeyword._from_dict(x) for x in _dict.get('keywords')] # depends on [control=['if'], data=['_dict']]
return cls(**args) |
def gravitational_force(position_a, mass_a, position_b, mass_b):
"""Returns the gravitational force between the two bodies a and b."""
distance = distance_between(position_a, position_b)
# Calculate the direction and magnitude of the force.
angle = math.atan2(position_a[1] - position_b[1], position_a[0] - position_b[0])
magnitude = G * mass_a * mass_b / (distance**2)
# Find the x and y components of the force.
# Determine sign based on which one is the larger body.
sign = -1 if mass_b > mass_a else 1
x_force = sign * magnitude * math.cos(angle)
y_force = sign * magnitude * math.sin(angle)
return x_force, y_force | def function[gravitational_force, parameter[position_a, mass_a, position_b, mass_b]]:
constant[Returns the gravitational force between the two bodies a and b.]
variable[distance] assign[=] call[name[distance_between], parameter[name[position_a], name[position_b]]]
variable[angle] assign[=] call[name[math].atan2, parameter[binary_operation[call[name[position_a]][constant[1]] - call[name[position_b]][constant[1]]], binary_operation[call[name[position_a]][constant[0]] - call[name[position_b]][constant[0]]]]]
variable[magnitude] assign[=] binary_operation[binary_operation[binary_operation[name[G] * name[mass_a]] * name[mass_b]] / binary_operation[name[distance] ** constant[2]]]
variable[sign] assign[=] <ast.IfExp object at 0x7da1b15e8790>
variable[x_force] assign[=] binary_operation[binary_operation[name[sign] * name[magnitude]] * call[name[math].cos, parameter[name[angle]]]]
variable[y_force] assign[=] binary_operation[binary_operation[name[sign] * name[magnitude]] * call[name[math].sin, parameter[name[angle]]]]
return[tuple[[<ast.Name object at 0x7da1b159b5b0>, <ast.Name object at 0x7da1b1599240>]]] | keyword[def] identifier[gravitational_force] ( identifier[position_a] , identifier[mass_a] , identifier[position_b] , identifier[mass_b] ):
literal[string]
identifier[distance] = identifier[distance_between] ( identifier[position_a] , identifier[position_b] )
identifier[angle] = identifier[math] . identifier[atan2] ( identifier[position_a] [ literal[int] ]- identifier[position_b] [ literal[int] ], identifier[position_a] [ literal[int] ]- identifier[position_b] [ literal[int] ])
identifier[magnitude] = identifier[G] * identifier[mass_a] * identifier[mass_b] /( identifier[distance] ** literal[int] )
identifier[sign] =- literal[int] keyword[if] identifier[mass_b] > identifier[mass_a] keyword[else] literal[int]
identifier[x_force] = identifier[sign] * identifier[magnitude] * identifier[math] . identifier[cos] ( identifier[angle] )
identifier[y_force] = identifier[sign] * identifier[magnitude] * identifier[math] . identifier[sin] ( identifier[angle] )
keyword[return] identifier[x_force] , identifier[y_force] | def gravitational_force(position_a, mass_a, position_b, mass_b):
"""Returns the gravitational force between the two bodies a and b."""
distance = distance_between(position_a, position_b)
# Calculate the direction and magnitude of the force.
angle = math.atan2(position_a[1] - position_b[1], position_a[0] - position_b[0])
magnitude = G * mass_a * mass_b / distance ** 2
# Find the x and y components of the force.
# Determine sign based on which one is the larger body.
sign = -1 if mass_b > mass_a else 1
x_force = sign * magnitude * math.cos(angle)
y_force = sign * magnitude * math.sin(angle)
return (x_force, y_force) |
def inline_css(self, html):
"""Inlines CSS defined in external style sheets.
"""
premailer = Premailer(html)
inlined_html = premailer.transform(pretty_print=True)
return inlined_html | def function[inline_css, parameter[self, html]]:
constant[Inlines CSS defined in external style sheets.
]
variable[premailer] assign[=] call[name[Premailer], parameter[name[html]]]
variable[inlined_html] assign[=] call[name[premailer].transform, parameter[]]
return[name[inlined_html]] | keyword[def] identifier[inline_css] ( identifier[self] , identifier[html] ):
literal[string]
identifier[premailer] = identifier[Premailer] ( identifier[html] )
identifier[inlined_html] = identifier[premailer] . identifier[transform] ( identifier[pretty_print] = keyword[True] )
keyword[return] identifier[inlined_html] | def inline_css(self, html):
"""Inlines CSS defined in external style sheets.
"""
premailer = Premailer(html)
inlined_html = premailer.transform(pretty_print=True)
return inlined_html |
def approximate_eig(self, epsilon=1e-6):
""" Compute low-rank approximation of the eigenvalue decomposition of target matrix.
If spd is True, the decomposition will be conducted while ensuring that the spectrum of `A_k^{-1}` is positive.
Parameters
----------
epsilon : float, optional, default 1e-6
Cutoff for eigenvalue norms. If negative eigenvalues occur, with norms larger than epsilon, the largest
negative eigenvalue norm will be used instead of epsilon, i.e. a band including all negative eigenvalues
will be cut off.
Returns
-------
s : ndarray((m,), dtype=float)
approximated eigenvalues. Number of eigenvalues returned is at most the number of columns used in the
Nystroem approximation, but may be smaller depending on epsilon.
W : ndarray((n,m), dtype=float)
approximated eigenvectors in columns. Number of eigenvectors returned is at most the number of columns
used in the Nystroem approximation, but may be smaller depending on epsilon.
"""
L = self.approximate_cholesky(epsilon=epsilon)
LL = np.dot(L.T, L)
s, V = np.linalg.eigh(LL)
# sort
s, V = sort_by_norm(s, V)
# back-transform eigenvectors
Linv = np.linalg.pinv(L.T)
V = np.dot(Linv, V)
# normalize eigenvectors
ncol = V.shape[1]
for i in range(ncol):
if not np.allclose(V[:, i], 0):
V[:, i] /= np.sqrt(np.dot(V[:, i], V[:, i]))
return s, V | def function[approximate_eig, parameter[self, epsilon]]:
constant[ Compute low-rank approximation of the eigenvalue decomposition of target matrix.
If spd is True, the decomposition will be conducted while ensuring that the spectrum of `A_k^{-1}` is positive.
Parameters
----------
epsilon : float, optional, default 1e-6
Cutoff for eigenvalue norms. If negative eigenvalues occur, with norms larger than epsilon, the largest
negative eigenvalue norm will be used instead of epsilon, i.e. a band including all negative eigenvalues
will be cut off.
Returns
-------
s : ndarray((m,), dtype=float)
approximated eigenvalues. Number of eigenvalues returned is at most the number of columns used in the
Nystroem approximation, but may be smaller depending on epsilon.
W : ndarray((n,m), dtype=float)
approximated eigenvectors in columns. Number of eigenvectors returned is at most the number of columns
used in the Nystroem approximation, but may be smaller depending on epsilon.
]
variable[L] assign[=] call[name[self].approximate_cholesky, parameter[]]
variable[LL] assign[=] call[name[np].dot, parameter[name[L].T, name[L]]]
<ast.Tuple object at 0x7da204565720> assign[=] call[name[np].linalg.eigh, parameter[name[LL]]]
<ast.Tuple object at 0x7da204567430> assign[=] call[name[sort_by_norm], parameter[name[s], name[V]]]
variable[Linv] assign[=] call[name[np].linalg.pinv, parameter[name[L].T]]
variable[V] assign[=] call[name[np].dot, parameter[name[Linv], name[V]]]
variable[ncol] assign[=] call[name[V].shape][constant[1]]
for taget[name[i]] in starred[call[name[range], parameter[name[ncol]]]] begin[:]
if <ast.UnaryOp object at 0x7da204566890> begin[:]
<ast.AugAssign object at 0x7da204565510>
return[tuple[[<ast.Name object at 0x7da204564700>, <ast.Name object at 0x7da204566fb0>]]] | keyword[def] identifier[approximate_eig] ( identifier[self] , identifier[epsilon] = literal[int] ):
literal[string]
identifier[L] = identifier[self] . identifier[approximate_cholesky] ( identifier[epsilon] = identifier[epsilon] )
identifier[LL] = identifier[np] . identifier[dot] ( identifier[L] . identifier[T] , identifier[L] )
identifier[s] , identifier[V] = identifier[np] . identifier[linalg] . identifier[eigh] ( identifier[LL] )
identifier[s] , identifier[V] = identifier[sort_by_norm] ( identifier[s] , identifier[V] )
identifier[Linv] = identifier[np] . identifier[linalg] . identifier[pinv] ( identifier[L] . identifier[T] )
identifier[V] = identifier[np] . identifier[dot] ( identifier[Linv] , identifier[V] )
identifier[ncol] = identifier[V] . identifier[shape] [ literal[int] ]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[ncol] ):
keyword[if] keyword[not] identifier[np] . identifier[allclose] ( identifier[V] [:, identifier[i] ], literal[int] ):
identifier[V] [:, identifier[i] ]/= identifier[np] . identifier[sqrt] ( identifier[np] . identifier[dot] ( identifier[V] [:, identifier[i] ], identifier[V] [:, identifier[i] ]))
keyword[return] identifier[s] , identifier[V] | def approximate_eig(self, epsilon=1e-06):
""" Compute low-rank approximation of the eigenvalue decomposition of target matrix.
If spd is True, the decomposition will be conducted while ensuring that the spectrum of `A_k^{-1}` is positive.
Parameters
----------
epsilon : float, optional, default 1e-6
Cutoff for eigenvalue norms. If negative eigenvalues occur, with norms larger than epsilon, the largest
negative eigenvalue norm will be used instead of epsilon, i.e. a band including all negative eigenvalues
will be cut off.
Returns
-------
s : ndarray((m,), dtype=float)
approximated eigenvalues. Number of eigenvalues returned is at most the number of columns used in the
Nystroem approximation, but may be smaller depending on epsilon.
W : ndarray((n,m), dtype=float)
approximated eigenvectors in columns. Number of eigenvectors returned is at most the number of columns
used in the Nystroem approximation, but may be smaller depending on epsilon.
"""
L = self.approximate_cholesky(epsilon=epsilon)
LL = np.dot(L.T, L)
(s, V) = np.linalg.eigh(LL)
# sort
(s, V) = sort_by_norm(s, V)
# back-transform eigenvectors
Linv = np.linalg.pinv(L.T)
V = np.dot(Linv, V)
# normalize eigenvectors
ncol = V.shape[1]
for i in range(ncol):
if not np.allclose(V[:, i], 0):
V[:, i] /= np.sqrt(np.dot(V[:, i], V[:, i])) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
return (s, V) |
def create_unused_courses_report(self, account_id, term_id=None):
"""
Convenience method for create_report, for creating an unused courses
report.
"""
return self.create_report(ReportType.UNUSED_COURSES, account_id,
term_id) | def function[create_unused_courses_report, parameter[self, account_id, term_id]]:
constant[
Convenience method for create_report, for creating an unused courses
report.
]
return[call[name[self].create_report, parameter[name[ReportType].UNUSED_COURSES, name[account_id], name[term_id]]]] | keyword[def] identifier[create_unused_courses_report] ( identifier[self] , identifier[account_id] , identifier[term_id] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[create_report] ( identifier[ReportType] . identifier[UNUSED_COURSES] , identifier[account_id] ,
identifier[term_id] ) | def create_unused_courses_report(self, account_id, term_id=None):
"""
Convenience method for create_report, for creating an unused courses
report.
"""
return self.create_report(ReportType.UNUSED_COURSES, account_id, term_id) |
def mapConcat(func, *iterables):
"""Similar to `map` but the instead of collecting the return values of
`func` in a list, the items of each return value are instaed collected
(so `func` must return an iterable type).
Examples:
>>> mapConcat(lambda x:[x], [1,2,3])
[1, 2, 3]
>>> mapConcat(lambda x: [x,str(x)], [1,2,3])
[1, '1', 2, '2', 3, '3']
"""
return [e for l in imap(func, *iterables) for e in l] | def function[mapConcat, parameter[func]]:
constant[Similar to `map` but the instead of collecting the return values of
`func` in a list, the items of each return value are instaed collected
(so `func` must return an iterable type).
Examples:
>>> mapConcat(lambda x:[x], [1,2,3])
[1, 2, 3]
>>> mapConcat(lambda x: [x,str(x)], [1,2,3])
[1, '1', 2, '2', 3, '3']
]
return[<ast.ListComp object at 0x7da1b1024d00>] | keyword[def] identifier[mapConcat] ( identifier[func] ,* identifier[iterables] ):
literal[string]
keyword[return] [ identifier[e] keyword[for] identifier[l] keyword[in] identifier[imap] ( identifier[func] ,* identifier[iterables] ) keyword[for] identifier[e] keyword[in] identifier[l] ] | def mapConcat(func, *iterables):
"""Similar to `map` but the instead of collecting the return values of
`func` in a list, the items of each return value are instaed collected
(so `func` must return an iterable type).
Examples:
>>> mapConcat(lambda x:[x], [1,2,3])
[1, 2, 3]
>>> mapConcat(lambda x: [x,str(x)], [1,2,3])
[1, '1', 2, '2', 3, '3']
"""
return [e for l in imap(func, *iterables) for e in l] |
def render(self, **kwargs):
"""Render the GeoJson/TopoJson and color scale objects."""
if self.color_scale:
# ColorMap needs Map as its parent
assert isinstance(self._parent, Map), ('Choropleth must be added'
' to a Map object.')
self.color_scale._parent = self._parent
super(Choropleth, self).render(**kwargs) | def function[render, parameter[self]]:
constant[Render the GeoJson/TopoJson and color scale objects.]
if name[self].color_scale begin[:]
assert[call[name[isinstance], parameter[name[self]._parent, name[Map]]]]
name[self].color_scale._parent assign[=] name[self]._parent
call[call[name[super], parameter[name[Choropleth], name[self]]].render, parameter[]] | keyword[def] identifier[render] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[self] . identifier[color_scale] :
keyword[assert] identifier[isinstance] ( identifier[self] . identifier[_parent] , identifier[Map] ),( literal[string]
literal[string] )
identifier[self] . identifier[color_scale] . identifier[_parent] = identifier[self] . identifier[_parent]
identifier[super] ( identifier[Choropleth] , identifier[self] ). identifier[render] (** identifier[kwargs] ) | def render(self, **kwargs):
"""Render the GeoJson/TopoJson and color scale objects."""
if self.color_scale:
# ColorMap needs Map as its parent
assert isinstance(self._parent, Map), 'Choropleth must be added to a Map object.'
self.color_scale._parent = self._parent # depends on [control=['if'], data=[]]
super(Choropleth, self).render(**kwargs) |
def with_target_audience(self, target_audience):
"""Create a copy of these credentials with the specified target
audience.
Args:
target_audience (str): The intended audience for these credentials,
used when requesting the ID Token.
Returns:
google.auth.service_account.IDTokenCredentials: A new credentials
instance.
"""
return self.__class__(
self._signer,
service_account_email=self._service_account_email,
token_uri=self._token_uri,
target_audience=target_audience,
additional_claims=self._additional_claims.copy()) | def function[with_target_audience, parameter[self, target_audience]]:
constant[Create a copy of these credentials with the specified target
audience.
Args:
target_audience (str): The intended audience for these credentials,
used when requesting the ID Token.
Returns:
google.auth.service_account.IDTokenCredentials: A new credentials
instance.
]
return[call[name[self].__class__, parameter[name[self]._signer]]] | keyword[def] identifier[with_target_audience] ( identifier[self] , identifier[target_audience] ):
literal[string]
keyword[return] identifier[self] . identifier[__class__] (
identifier[self] . identifier[_signer] ,
identifier[service_account_email] = identifier[self] . identifier[_service_account_email] ,
identifier[token_uri] = identifier[self] . identifier[_token_uri] ,
identifier[target_audience] = identifier[target_audience] ,
identifier[additional_claims] = identifier[self] . identifier[_additional_claims] . identifier[copy] ()) | def with_target_audience(self, target_audience):
"""Create a copy of these credentials with the specified target
audience.
Args:
target_audience (str): The intended audience for these credentials,
used when requesting the ID Token.
Returns:
google.auth.service_account.IDTokenCredentials: A new credentials
instance.
"""
return self.__class__(self._signer, service_account_email=self._service_account_email, token_uri=self._token_uri, target_audience=target_audience, additional_claims=self._additional_claims.copy()) |
def to_file_object(self, name, out_dir):
"""Dump to a pickle file and return an File object reference of this list
Parameters
----------
name : str
An identifier of this file. Needs to be unique.
out_dir : path
path to place this file
Returns
-------
file : AhopeFile
"""
make_analysis_dir(out_dir)
file_ref = File('ALL', name, self.get_times_covered_by_files(),
extension='.pkl', directory=out_dir)
self.dump(file_ref.storage_path)
return file_ref | def function[to_file_object, parameter[self, name, out_dir]]:
constant[Dump to a pickle file and return an File object reference of this list
Parameters
----------
name : str
An identifier of this file. Needs to be unique.
out_dir : path
path to place this file
Returns
-------
file : AhopeFile
]
call[name[make_analysis_dir], parameter[name[out_dir]]]
variable[file_ref] assign[=] call[name[File], parameter[constant[ALL], name[name], call[name[self].get_times_covered_by_files, parameter[]]]]
call[name[self].dump, parameter[name[file_ref].storage_path]]
return[name[file_ref]] | keyword[def] identifier[to_file_object] ( identifier[self] , identifier[name] , identifier[out_dir] ):
literal[string]
identifier[make_analysis_dir] ( identifier[out_dir] )
identifier[file_ref] = identifier[File] ( literal[string] , identifier[name] , identifier[self] . identifier[get_times_covered_by_files] (),
identifier[extension] = literal[string] , identifier[directory] = identifier[out_dir] )
identifier[self] . identifier[dump] ( identifier[file_ref] . identifier[storage_path] )
keyword[return] identifier[file_ref] | def to_file_object(self, name, out_dir):
"""Dump to a pickle file and return an File object reference of this list
Parameters
----------
name : str
An identifier of this file. Needs to be unique.
out_dir : path
path to place this file
Returns
-------
file : AhopeFile
"""
make_analysis_dir(out_dir)
file_ref = File('ALL', name, self.get_times_covered_by_files(), extension='.pkl', directory=out_dir)
self.dump(file_ref.storage_path)
return file_ref |
def run():
"""Run the examples"""
# NOTE(kiennt): Until now, this example isn't finished yet,
# because we don't have any completed driver
# Get a network client with openstack driver.
network_client = client.Client(version=_VERSION,
resource=_RESOURCES[0], provider=_PROVIDER)
# net = network_client.create('daikk', '10.0.0.0/24')
# list_subnet = network_client.list()
# network_client.show(list_subnet[0].get("id"))
network_client.delete("4b983028-0f8c-4b63-b10c-6e8420bb7903") | def function[run, parameter[]]:
constant[Run the examples]
variable[network_client] assign[=] call[name[client].Client, parameter[]]
call[name[network_client].delete, parameter[constant[4b983028-0f8c-4b63-b10c-6e8420bb7903]]] | keyword[def] identifier[run] ():
literal[string]
identifier[network_client] = identifier[client] . identifier[Client] ( identifier[version] = identifier[_VERSION] ,
identifier[resource] = identifier[_RESOURCES] [ literal[int] ], identifier[provider] = identifier[_PROVIDER] )
identifier[network_client] . identifier[delete] ( literal[string] ) | def run():
"""Run the examples"""
# NOTE(kiennt): Until now, this example isn't finished yet,
# because we don't have any completed driver
# Get a network client with openstack driver.
network_client = client.Client(version=_VERSION, resource=_RESOURCES[0], provider=_PROVIDER)
# net = network_client.create('daikk', '10.0.0.0/24')
# list_subnet = network_client.list()
# network_client.show(list_subnet[0].get("id"))
network_client.delete('4b983028-0f8c-4b63-b10c-6e8420bb7903') |
def _pad_block(self, handle):
'''Pad the file with 0s to the end of the next block boundary.'''
extra = handle.tell() % 512
if extra:
handle.write(b'\x00' * (512 - extra)) | def function[_pad_block, parameter[self, handle]]:
constant[Pad the file with 0s to the end of the next block boundary.]
variable[extra] assign[=] binary_operation[call[name[handle].tell, parameter[]] <ast.Mod object at 0x7da2590d6920> constant[512]]
if name[extra] begin[:]
call[name[handle].write, parameter[binary_operation[constant[b'\x00'] * binary_operation[constant[512] - name[extra]]]]] | keyword[def] identifier[_pad_block] ( identifier[self] , identifier[handle] ):
literal[string]
identifier[extra] = identifier[handle] . identifier[tell] ()% literal[int]
keyword[if] identifier[extra] :
identifier[handle] . identifier[write] ( literal[string] *( literal[int] - identifier[extra] )) | def _pad_block(self, handle):
"""Pad the file with 0s to the end of the next block boundary."""
extra = handle.tell() % 512
if extra:
handle.write(b'\x00' * (512 - extra)) # depends on [control=['if'], data=[]] |
def update_status(modeladmin, request, queryset, status):
"""The workhorse function for the admin action functions that follow."""
# We loop over the objects here rather than use queryset.update() for
# two reasons:
#
# 1. No one should ever be updating zillions of Topics or Questions, so
# performance is not an issue.
# 2. To be tidy, we want to log what the user has done.
#
for obj in queryset:
obj.status = status
obj.save()
# Now log what happened.
# Use ugettext_noop() 'cause this is going straight into the db.
log_message = ugettext_noop(u'Changed status to \'%s\'.' %
obj.get_status_display())
modeladmin.log_change(request, obj, log_message)
# Send a message to the user telling them what has happened.
message_dict = {
'count': queryset.count(),
'object': modeladmin.model._meta.verbose_name,
'verb': dict(STATUS_CHOICES)[status],
}
if not message_dict['count'] == 1:
message_dict['object'] = modeladmin.model._meta.verbose_name_plural
user_message = ungettext(
u'%(count)s %(object)s was successfully %(verb)s.',
u'%(count)s %(object)s were successfully %(verb)s.',
message_dict['count']) % message_dict
modeladmin.message_user(request, user_message)
# Return None to display the change list page again and allow the user
# to reload the page without getting that nasty "Send the form again ..."
# warning from their browser.
return None | def function[update_status, parameter[modeladmin, request, queryset, status]]:
constant[The workhorse function for the admin action functions that follow.]
for taget[name[obj]] in starred[name[queryset]] begin[:]
name[obj].status assign[=] name[status]
call[name[obj].save, parameter[]]
variable[log_message] assign[=] call[name[ugettext_noop], parameter[binary_operation[constant[Changed status to '%s'.] <ast.Mod object at 0x7da2590d6920> call[name[obj].get_status_display, parameter[]]]]]
call[name[modeladmin].log_change, parameter[name[request], name[obj], name[log_message]]]
variable[message_dict] assign[=] dictionary[[<ast.Constant object at 0x7da1b26aed10>, <ast.Constant object at 0x7da1b26ac970>, <ast.Constant object at 0x7da1b26ae260>], [<ast.Call object at 0x7da1b26af520>, <ast.Attribute object at 0x7da1b26ac880>, <ast.Subscript object at 0x7da1b26ac430>]]
if <ast.UnaryOp object at 0x7da1b26ac370> begin[:]
call[name[message_dict]][constant[object]] assign[=] name[modeladmin].model._meta.verbose_name_plural
variable[user_message] assign[=] binary_operation[call[name[ungettext], parameter[constant[%(count)s %(object)s was successfully %(verb)s.], constant[%(count)s %(object)s were successfully %(verb)s.], call[name[message_dict]][constant[count]]]] <ast.Mod object at 0x7da2590d6920> name[message_dict]]
call[name[modeladmin].message_user, parameter[name[request], name[user_message]]]
return[constant[None]] | keyword[def] identifier[update_status] ( identifier[modeladmin] , identifier[request] , identifier[queryset] , identifier[status] ):
literal[string]
keyword[for] identifier[obj] keyword[in] identifier[queryset] :
identifier[obj] . identifier[status] = identifier[status]
identifier[obj] . identifier[save] ()
identifier[log_message] = identifier[ugettext_noop] ( literal[string] %
identifier[obj] . identifier[get_status_display] ())
identifier[modeladmin] . identifier[log_change] ( identifier[request] , identifier[obj] , identifier[log_message] )
identifier[message_dict] ={
literal[string] : identifier[queryset] . identifier[count] (),
literal[string] : identifier[modeladmin] . identifier[model] . identifier[_meta] . identifier[verbose_name] ,
literal[string] : identifier[dict] ( identifier[STATUS_CHOICES] )[ identifier[status] ],
}
keyword[if] keyword[not] identifier[message_dict] [ literal[string] ]== literal[int] :
identifier[message_dict] [ literal[string] ]= identifier[modeladmin] . identifier[model] . identifier[_meta] . identifier[verbose_name_plural]
identifier[user_message] = identifier[ungettext] (
literal[string] ,
literal[string] ,
identifier[message_dict] [ literal[string] ])% identifier[message_dict]
identifier[modeladmin] . identifier[message_user] ( identifier[request] , identifier[user_message] )
keyword[return] keyword[None] | def update_status(modeladmin, request, queryset, status):
"""The workhorse function for the admin action functions that follow."""
# We loop over the objects here rather than use queryset.update() for
# two reasons:
#
# 1. No one should ever be updating zillions of Topics or Questions, so
# performance is not an issue.
# 2. To be tidy, we want to log what the user has done.
#
for obj in queryset:
obj.status = status
obj.save()
# Now log what happened.
# Use ugettext_noop() 'cause this is going straight into the db.
log_message = ugettext_noop(u"Changed status to '%s'." % obj.get_status_display())
modeladmin.log_change(request, obj, log_message) # depends on [control=['for'], data=['obj']]
# Send a message to the user telling them what has happened.
message_dict = {'count': queryset.count(), 'object': modeladmin.model._meta.verbose_name, 'verb': dict(STATUS_CHOICES)[status]}
if not message_dict['count'] == 1:
message_dict['object'] = modeladmin.model._meta.verbose_name_plural # depends on [control=['if'], data=[]]
user_message = ungettext(u'%(count)s %(object)s was successfully %(verb)s.', u'%(count)s %(object)s were successfully %(verb)s.', message_dict['count']) % message_dict
modeladmin.message_user(request, user_message)
# Return None to display the change list page again and allow the user
# to reload the page without getting that nasty "Send the form again ..."
# warning from their browser.
return None |
def layer_norm_vars(filters):
"""Create Variables for layer norm."""
scale = tf.get_variable(
"layer_norm_scale", [filters], initializer=tf.ones_initializer())
bias = tf.get_variable(
"layer_norm_bias", [filters], initializer=tf.zeros_initializer())
return scale, bias | def function[layer_norm_vars, parameter[filters]]:
constant[Create Variables for layer norm.]
variable[scale] assign[=] call[name[tf].get_variable, parameter[constant[layer_norm_scale], list[[<ast.Name object at 0x7da18dc040a0>]]]]
variable[bias] assign[=] call[name[tf].get_variable, parameter[constant[layer_norm_bias], list[[<ast.Name object at 0x7da18dc06110>]]]]
return[tuple[[<ast.Name object at 0x7da18dc072e0>, <ast.Name object at 0x7da18dc06350>]]] | keyword[def] identifier[layer_norm_vars] ( identifier[filters] ):
literal[string]
identifier[scale] = identifier[tf] . identifier[get_variable] (
literal[string] ,[ identifier[filters] ], identifier[initializer] = identifier[tf] . identifier[ones_initializer] ())
identifier[bias] = identifier[tf] . identifier[get_variable] (
literal[string] ,[ identifier[filters] ], identifier[initializer] = identifier[tf] . identifier[zeros_initializer] ())
keyword[return] identifier[scale] , identifier[bias] | def layer_norm_vars(filters):
"""Create Variables for layer norm."""
scale = tf.get_variable('layer_norm_scale', [filters], initializer=tf.ones_initializer())
bias = tf.get_variable('layer_norm_bias', [filters], initializer=tf.zeros_initializer())
return (scale, bias) |
def encode(raw: Any) -> str:
"""
Encode credential attribute value, leaving any (stringified) int32 alone: indy-sdk predicates
operate on int32 values properly only when their encoded values match their raw values.
To disambiguate for decoding, the operation reserves a sentinel for the null value and otherwise adds
2**31 to any non-trivial transform of a non-int32 input, then prepends a digit marking the input type:
* 1: string
* 2: boolean
* 3: non-32-bit integer
* 4: floating point
* 9: other (stringifiable)
:param raw: raw value to encode
:return: encoded value
"""
if raw is None:
return str(I32_BOUND) # sentinel
stringified = str(raw)
if isinstance(raw, bool):
return '{}{}'.format(
ENCODE_PREFIX[bool],
I32_BOUND + 2 if raw else I32_BOUND + 1) # decode gotcha: python bool('False') = True; use 2 sentinels
if isinstance(raw, int) and -I32_BOUND <= raw < I32_BOUND:
return stringified # it's an i32, leave it (as numeric string)
hexed = '{}{}'.format(
ENCODE_PREFIX.get(type(raw), ENCODE_PREFIX[None]),
str(int.from_bytes(hexlify(stringified.encode()), 'big') + I32_BOUND))
return hexed | def function[encode, parameter[raw]]:
constant[
Encode credential attribute value, leaving any (stringified) int32 alone: indy-sdk predicates
operate on int32 values properly only when their encoded values match their raw values.
To disambiguate for decoding, the operation reserves a sentinel for the null value and otherwise adds
2**31 to any non-trivial transform of a non-int32 input, then prepends a digit marking the input type:
* 1: string
* 2: boolean
* 3: non-32-bit integer
* 4: floating point
* 9: other (stringifiable)
:param raw: raw value to encode
:return: encoded value
]
if compare[name[raw] is constant[None]] begin[:]
return[call[name[str], parameter[name[I32_BOUND]]]]
variable[stringified] assign[=] call[name[str], parameter[name[raw]]]
if call[name[isinstance], parameter[name[raw], name[bool]]] begin[:]
return[call[constant[{}{}].format, parameter[call[name[ENCODE_PREFIX]][name[bool]], <ast.IfExp object at 0x7da2047e8910>]]]
if <ast.BoolOp object at 0x7da2047e95a0> begin[:]
return[name[stringified]]
variable[hexed] assign[=] call[constant[{}{}].format, parameter[call[name[ENCODE_PREFIX].get, parameter[call[name[type], parameter[name[raw]]], call[name[ENCODE_PREFIX]][constant[None]]]], call[name[str], parameter[binary_operation[call[name[int].from_bytes, parameter[call[name[hexlify], parameter[call[name[stringified].encode, parameter[]]]], constant[big]]] + name[I32_BOUND]]]]]]
return[name[hexed]] | keyword[def] identifier[encode] ( identifier[raw] : identifier[Any] )-> identifier[str] :
literal[string]
keyword[if] identifier[raw] keyword[is] keyword[None] :
keyword[return] identifier[str] ( identifier[I32_BOUND] )
identifier[stringified] = identifier[str] ( identifier[raw] )
keyword[if] identifier[isinstance] ( identifier[raw] , identifier[bool] ):
keyword[return] literal[string] . identifier[format] (
identifier[ENCODE_PREFIX] [ identifier[bool] ],
identifier[I32_BOUND] + literal[int] keyword[if] identifier[raw] keyword[else] identifier[I32_BOUND] + literal[int] )
keyword[if] identifier[isinstance] ( identifier[raw] , identifier[int] ) keyword[and] - identifier[I32_BOUND] <= identifier[raw] < identifier[I32_BOUND] :
keyword[return] identifier[stringified]
identifier[hexed] = literal[string] . identifier[format] (
identifier[ENCODE_PREFIX] . identifier[get] ( identifier[type] ( identifier[raw] ), identifier[ENCODE_PREFIX] [ keyword[None] ]),
identifier[str] ( identifier[int] . identifier[from_bytes] ( identifier[hexlify] ( identifier[stringified] . identifier[encode] ()), literal[string] )+ identifier[I32_BOUND] ))
keyword[return] identifier[hexed] | def encode(raw: Any) -> str:
"""
Encode credential attribute value, leaving any (stringified) int32 alone: indy-sdk predicates
operate on int32 values properly only when their encoded values match their raw values.
To disambiguate for decoding, the operation reserves a sentinel for the null value and otherwise adds
2**31 to any non-trivial transform of a non-int32 input, then prepends a digit marking the input type:
* 1: string
* 2: boolean
* 3: non-32-bit integer
* 4: floating point
* 9: other (stringifiable)
:param raw: raw value to encode
:return: encoded value
"""
if raw is None:
return str(I32_BOUND) # sentinel # depends on [control=['if'], data=[]]
stringified = str(raw)
if isinstance(raw, bool):
return '{}{}'.format(ENCODE_PREFIX[bool], I32_BOUND + 2 if raw else I32_BOUND + 1) # decode gotcha: python bool('False') = True; use 2 sentinels # depends on [control=['if'], data=[]]
if isinstance(raw, int) and -I32_BOUND <= raw < I32_BOUND:
return stringified # it's an i32, leave it (as numeric string) # depends on [control=['if'], data=[]]
hexed = '{}{}'.format(ENCODE_PREFIX.get(type(raw), ENCODE_PREFIX[None]), str(int.from_bytes(hexlify(stringified.encode()), 'big') + I32_BOUND))
return hexed |
def _get_casing_permutations(cls, input_string):
""" Takes a string and gives all possible permutations of casing for comparative purposes
:param input_string: str, name of object
:return: Generator(str), iterator of all possible permutations of casing for the input_string
"""
if not input_string:
yield ""
else:
first = input_string[:1]
for sub_casing in cls._get_casing_permutations(input_string[1:]):
yield first.lower() + sub_casing
yield first.upper() + sub_casing | def function[_get_casing_permutations, parameter[cls, input_string]]:
constant[ Takes a string and gives all possible permutations of casing for comparative purposes
:param input_string: str, name of object
:return: Generator(str), iterator of all possible permutations of casing for the input_string
]
if <ast.UnaryOp object at 0x7da18ede6bf0> begin[:]
<ast.Yield object at 0x7da18ede50c0> | keyword[def] identifier[_get_casing_permutations] ( identifier[cls] , identifier[input_string] ):
literal[string]
keyword[if] keyword[not] identifier[input_string] :
keyword[yield] literal[string]
keyword[else] :
identifier[first] = identifier[input_string] [: literal[int] ]
keyword[for] identifier[sub_casing] keyword[in] identifier[cls] . identifier[_get_casing_permutations] ( identifier[input_string] [ literal[int] :]):
keyword[yield] identifier[first] . identifier[lower] ()+ identifier[sub_casing]
keyword[yield] identifier[first] . identifier[upper] ()+ identifier[sub_casing] | def _get_casing_permutations(cls, input_string):
""" Takes a string and gives all possible permutations of casing for comparative purposes
:param input_string: str, name of object
:return: Generator(str), iterator of all possible permutations of casing for the input_string
"""
if not input_string:
yield '' # depends on [control=['if'], data=[]]
else:
first = input_string[:1]
for sub_casing in cls._get_casing_permutations(input_string[1:]):
yield (first.lower() + sub_casing)
yield (first.upper() + sub_casing) # depends on [control=['for'], data=['sub_casing']] |
def get_region(b):
"""Tries to get the bucket region from Location.LocationConstraint
Special cases:
LocationConstraint EU defaults to eu-west-1
LocationConstraint null defaults to us-east-1
Args:
b (object): A bucket object
Returns:
string: an aws region string
"""
remap = {None: 'us-east-1', 'EU': 'eu-west-1'}
region = b.get('Location', {}).get('LocationConstraint')
return remap.get(region, region) | def function[get_region, parameter[b]]:
constant[Tries to get the bucket region from Location.LocationConstraint
Special cases:
LocationConstraint EU defaults to eu-west-1
LocationConstraint null defaults to us-east-1
Args:
b (object): A bucket object
Returns:
string: an aws region string
]
variable[remap] assign[=] dictionary[[<ast.Constant object at 0x7da18f09d750>, <ast.Constant object at 0x7da18f09d330>], [<ast.Constant object at 0x7da18f09c190>, <ast.Constant object at 0x7da18f09fb20>]]
variable[region] assign[=] call[call[name[b].get, parameter[constant[Location], dictionary[[], []]]].get, parameter[constant[LocationConstraint]]]
return[call[name[remap].get, parameter[name[region], name[region]]]] | keyword[def] identifier[get_region] ( identifier[b] ):
literal[string]
identifier[remap] ={ keyword[None] : literal[string] , literal[string] : literal[string] }
identifier[region] = identifier[b] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] )
keyword[return] identifier[remap] . identifier[get] ( identifier[region] , identifier[region] ) | def get_region(b):
"""Tries to get the bucket region from Location.LocationConstraint
Special cases:
LocationConstraint EU defaults to eu-west-1
LocationConstraint null defaults to us-east-1
Args:
b (object): A bucket object
Returns:
string: an aws region string
"""
remap = {None: 'us-east-1', 'EU': 'eu-west-1'}
region = b.get('Location', {}).get('LocationConstraint')
return remap.get(region, region) |
def gather_lines(self):
"""
Return the number of lines.
"""
total_lines = 0
for file in self.all_files:
full_path = os.path.join(self.paths["role"], file)
with open(full_path, "r") as f:
for line in f:
total_lines += 1
if full_path.endswith(".yml"):
self.yaml_files.append(full_path)
return total_lines | def function[gather_lines, parameter[self]]:
constant[
Return the number of lines.
]
variable[total_lines] assign[=] constant[0]
for taget[name[file]] in starred[name[self].all_files] begin[:]
variable[full_path] assign[=] call[name[os].path.join, parameter[call[name[self].paths][constant[role]], name[file]]]
with call[name[open], parameter[name[full_path], constant[r]]] begin[:]
for taget[name[line]] in starred[name[f]] begin[:]
<ast.AugAssign object at 0x7da1b0b62c80>
if call[name[full_path].endswith, parameter[constant[.yml]]] begin[:]
call[name[self].yaml_files.append, parameter[name[full_path]]]
return[name[total_lines]] | keyword[def] identifier[gather_lines] ( identifier[self] ):
literal[string]
identifier[total_lines] = literal[int]
keyword[for] identifier[file] keyword[in] identifier[self] . identifier[all_files] :
identifier[full_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[paths] [ literal[string] ], identifier[file] )
keyword[with] identifier[open] ( identifier[full_path] , literal[string] ) keyword[as] identifier[f] :
keyword[for] identifier[line] keyword[in] identifier[f] :
identifier[total_lines] += literal[int]
keyword[if] identifier[full_path] . identifier[endswith] ( literal[string] ):
identifier[self] . identifier[yaml_files] . identifier[append] ( identifier[full_path] )
keyword[return] identifier[total_lines] | def gather_lines(self):
"""
Return the number of lines.
"""
total_lines = 0
for file in self.all_files:
full_path = os.path.join(self.paths['role'], file)
with open(full_path, 'r') as f:
for line in f:
total_lines += 1 # depends on [control=['for'], data=[]] # depends on [control=['with'], data=['f']]
if full_path.endswith('.yml'):
self.yaml_files.append(full_path) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['file']]
return total_lines |
def find(self, query=None, func=None, labels=None, colors=None, pinned=None, archived=None, trashed=False): # pylint: disable=too-many-arguments
"""Find Notes based on the specified criteria.
Args:
query (Union[_sre.SRE_Pattern, str, None]): A str or regular expression to match against the title and text.
func (Union[callable, None]): A filter function.
labels (Union[List[str], None]): A list of label ids or objects to match. An empty list matches notes with no labels.
colors (Union[List[str], None]): A list of colors to match.
pinned (Union[bool, None]): Whether to match pinned notes.
archived (Union[bool, None]): Whether to match archived notes.
trashed (Union[bool, None]): Whether to match trashed notes.
Return:
List[gkeepapi.node.TopLevelNode]: Results.
"""
if labels is not None:
labels = [i.id if isinstance(i, _node.Label) else i for i in labels]
return (node for node in self.all() if
(query is None or (
(isinstance(query, six.string_types) and (query in node.title or query in node.text)) or
(isinstance(query, Pattern) and (
query.search(node.title) or query.search(node.text)
))
)) and
(func is None or func(node)) and \
(labels is None or \
(not labels and not node.labels.all()) or \
(any((node.labels.get(i) is not None for i in labels)))
) and \
(colors is None or node.color in colors) and \
(pinned is None or node.pinned == pinned) and \
(archived is None or node.archived == archived) and \
(trashed is None or node.trashed == trashed)
) | def function[find, parameter[self, query, func, labels, colors, pinned, archived, trashed]]:
constant[Find Notes based on the specified criteria.
Args:
query (Union[_sre.SRE_Pattern, str, None]): A str or regular expression to match against the title and text.
func (Union[callable, None]): A filter function.
labels (Union[List[str], None]): A list of label ids or objects to match. An empty list matches notes with no labels.
colors (Union[List[str], None]): A list of colors to match.
pinned (Union[bool, None]): Whether to match pinned notes.
archived (Union[bool, None]): Whether to match archived notes.
trashed (Union[bool, None]): Whether to match trashed notes.
Return:
List[gkeepapi.node.TopLevelNode]: Results.
]
if compare[name[labels] is_not constant[None]] begin[:]
variable[labels] assign[=] <ast.ListComp object at 0x7da18ede73d0>
return[<ast.GeneratorExp object at 0x7da18ede56c0>] | keyword[def] identifier[find] ( identifier[self] , identifier[query] = keyword[None] , identifier[func] = keyword[None] , identifier[labels] = keyword[None] , identifier[colors] = keyword[None] , identifier[pinned] = keyword[None] , identifier[archived] = keyword[None] , identifier[trashed] = keyword[False] ):
literal[string]
keyword[if] identifier[labels] keyword[is] keyword[not] keyword[None] :
identifier[labels] =[ identifier[i] . identifier[id] keyword[if] identifier[isinstance] ( identifier[i] , identifier[_node] . identifier[Label] ) keyword[else] identifier[i] keyword[for] identifier[i] keyword[in] identifier[labels] ]
keyword[return] ( identifier[node] keyword[for] identifier[node] keyword[in] identifier[self] . identifier[all] () keyword[if]
( identifier[query] keyword[is] keyword[None] keyword[or] (
( identifier[isinstance] ( identifier[query] , identifier[six] . identifier[string_types] ) keyword[and] ( identifier[query] keyword[in] identifier[node] . identifier[title] keyword[or] identifier[query] keyword[in] identifier[node] . identifier[text] )) keyword[or]
( identifier[isinstance] ( identifier[query] , identifier[Pattern] ) keyword[and] (
identifier[query] . identifier[search] ( identifier[node] . identifier[title] ) keyword[or] identifier[query] . identifier[search] ( identifier[node] . identifier[text] )
))
)) keyword[and]
( identifier[func] keyword[is] keyword[None] keyword[or] identifier[func] ( identifier[node] )) keyword[and] ( identifier[labels] keyword[is] keyword[None] keyword[or] ( keyword[not] identifier[labels] keyword[and] keyword[not] identifier[node] . identifier[labels] . identifier[all] ()) keyword[or] ( identifier[any] (( identifier[node] . identifier[labels] . identifier[get] ( identifier[i] ) keyword[is] keyword[not] keyword[None] keyword[for] identifier[i] keyword[in] identifier[labels] )))
) keyword[and] ( identifier[colors] keyword[is] keyword[None] keyword[or] identifier[node] . identifier[color] keyword[in] identifier[colors] ) keyword[and] ( identifier[pinned] keyword[is] keyword[None] keyword[or] identifier[node] . identifier[pinned] == identifier[pinned] ) keyword[and] ( identifier[archived] keyword[is] keyword[None] keyword[or] identifier[node] . identifier[archived] == identifier[archived] ) keyword[and] ( identifier[trashed] keyword[is] keyword[None] keyword[or] identifier[node] . identifier[trashed] == identifier[trashed] )
) | def find(self, query=None, func=None, labels=None, colors=None, pinned=None, archived=None, trashed=False): # pylint: disable=too-many-arguments
'Find Notes based on the specified criteria.\n\n Args:\n query (Union[_sre.SRE_Pattern, str, None]): A str or regular expression to match against the title and text.\n func (Union[callable, None]): A filter function.\n labels (Union[List[str], None]): A list of label ids or objects to match. An empty list matches notes with no labels.\n colors (Union[List[str], None]): A list of colors to match.\n pinned (Union[bool, None]): Whether to match pinned notes.\n archived (Union[bool, None]): Whether to match archived notes.\n trashed (Union[bool, None]): Whether to match trashed notes.\n\n Return:\n List[gkeepapi.node.TopLevelNode]: Results.\n '
if labels is not None:
labels = [i.id if isinstance(i, _node.Label) else i for i in labels] # depends on [control=['if'], data=['labels']]
return (node for node in self.all() if (query is None or (isinstance(query, six.string_types) and (query in node.title or query in node.text) or (isinstance(query, Pattern) and (query.search(node.title) or query.search(node.text))))) and (func is None or func(node)) and (labels is None or (not labels and (not node.labels.all())) or any((node.labels.get(i) is not None for i in labels))) and (colors is None or node.color in colors) and (pinned is None or node.pinned == pinned) and (archived is None or node.archived == archived) and (trashed is None or node.trashed == trashed)) |
def main():
"""
NAME
find_EI.py
DESCRIPTION
Applies series of assumed flattening factor and "unsquishes" inclinations assuming tangent function.
Finds flattening factor that gives elongation/inclination pair consistent with TK03.
Finds bootstrap confidence bounds
SYNTAX
find_EI.py [command line options]
OPTIONS
-h prints help message and quits
-f FILE specify input file name
-n N specify number of bootstraps - the more the better, but slower!, default is 1000
-sc uses a "site-level" correction to a Fisherian distribution instead
of a "study-level" correction to a TK03-consistent distribution.
Note that many directions (~ 100) are needed for this correction to be reliable.
-fmt [svg,png,eps,pdf..] change plot format, default is svg
-sav saves the figures and quits
INPUT
dec/inc pairs, delimited with space or tabs
OUTPUT
four plots: 1) equal area plot of original directions
2) Elongation/inclination pairs as a function of f, data plus 25 bootstrap samples
3) Cumulative distribution of bootstrapped optimal inclinations plus uncertainties.
Estimate from original data set plotted as solid line
4) Orientation of principle direction through unflattening
NOTE: If distribution does not have a solution, plot labeled: Pathological. Some bootstrap samples may have
valid solutions and those are plotted in the CDFs and E/I plot.
"""
fmt,nb='svg',1000
plot=0
if '-h' in sys.argv:
print(main.__doc__)
sys.exit() # graceful quit
elif '-f' in sys.argv:
ind=sys.argv.index('-f')
file=sys.argv[ind+1]
else:
print(main.__doc__)
sys.exit()
if '-n' in sys.argv:
ind=sys.argv.index('-n')
nb=int(sys.argv[ind+1])
if '-sc' in sys.argv:
site_correction = True
else:
site_correction = False
if '-fmt' in sys.argv:
ind=sys.argv.index('-fmt')
fmt=sys.argv[ind+1]
if '-sav' in sys.argv:plot=1
data=numpy.loadtxt(file)
upper,lower=int(round(.975*nb)),int(round(.025*nb))
E,I=[],[]
PLTS={'eq':1,'ei':2,'cdf':3,'v2':4}
pmagplotlib.plot_init(PLTS['eq'],6,6)
pmagplotlib.plot_init(PLTS['ei'],5,5)
pmagplotlib.plot_init(PLTS['cdf'],5,5)
pmagplotlib.plot_init(PLTS['v2'],5,5)
pmagplotlib.plot_eq(PLTS['eq'],data,'Data')
# this is a problem
#if plot==0:pmagplotlib.draw_figs(PLTS)
ppars=pmag.doprinc(data)
Io=ppars['inc']
n=ppars["N"]
Es,Is,Fs,V2s=pmag.find_f(data)
if site_correction:
Inc,Elong=Is[Es.index(min(Es))],Es[Es.index(min(Es))]
flat_f = Fs[Es.index(min(Es))]
else:
Inc,Elong=Is[-1],Es[-1]
flat_f = Fs[-1]
pmagplotlib.plot_ei(PLTS['ei'],Es,Is,flat_f)
pmagplotlib.plot_v2s(PLTS['v2'],V2s,Is,flat_f)
b=0
print("Bootstrapping.... be patient")
while b<nb:
bdata=pmag.pseudo(data)
Esb,Isb,Fsb,V2sb=pmag.find_f(bdata)
if b<25:
pmagplotlib.plot_ei(PLTS['ei'],Esb,Isb,Fsb[-1])
if Esb[-1]!=0:
ppars=pmag.doprinc(bdata)
if site_correction:
I.append(abs(Isb[Esb.index(min(Esb))]))
E.append(Esb[Esb.index(min(Esb))])
else:
I.append(abs(Isb[-1]))
E.append(Esb[-1])
b+=1
if b%25==0:print(b,' out of ',nb)
I.sort()
E.sort()
Eexp=[]
for i in I:
Eexp.append(pmag.EI(i))
if Inc==0:
title= 'Pathological Distribution: '+'[%7.1f, %7.1f]' %(I[lower],I[upper])
else:
title= '%7.1f [%7.1f, %7.1f]' %( Inc, I[lower],I[upper])
pmagplotlib.plot_ei(PLTS['ei'],Eexp,I,1)
pmagplotlib.plot_cdf(PLTS['cdf'],I,'Inclinations','r',title)
pmagplotlib.plot_vs(PLTS['cdf'],[I[lower],I[upper]],'b','--')
pmagplotlib.plot_vs(PLTS['cdf'],[Inc],'g','-')
pmagplotlib.plot_vs(PLTS['cdf'],[Io],'k','-')
if plot==0:
print('%7.1f %s %7.1f _ %7.1f ^ %7.1f: %6.4f _ %6.4f ^ %6.4f' %(Io, " => ", Inc, I[lower],I[upper], Elong, E[lower],E[upper]))
print("Io Inc I_lower, I_upper, Elon, E_lower, E_upper")
pmagplotlib.draw_figs(PLTS)
ans = ""
while ans not in ['q', 'a']:
ans= input("S[a]ve plots - <q> to quit: ")
if ans=='q':
print("\n Good bye\n")
sys.exit()
files={}
files['eq']='findEI_eq.'+fmt
files['ei']='findEI_ei.'+fmt
files['cdf']='findEI_cdf.'+fmt
files['v2']='findEI_v2.'+fmt
pmagplotlib.save_plots(PLTS,files) | def function[main, parameter[]]:
constant[
NAME
find_EI.py
DESCRIPTION
Applies series of assumed flattening factor and "unsquishes" inclinations assuming tangent function.
Finds flattening factor that gives elongation/inclination pair consistent with TK03.
Finds bootstrap confidence bounds
SYNTAX
find_EI.py [command line options]
OPTIONS
-h prints help message and quits
-f FILE specify input file name
-n N specify number of bootstraps - the more the better, but slower!, default is 1000
-sc uses a "site-level" correction to a Fisherian distribution instead
of a "study-level" correction to a TK03-consistent distribution.
Note that many directions (~ 100) are needed for this correction to be reliable.
-fmt [svg,png,eps,pdf..] change plot format, default is svg
-sav saves the figures and quits
INPUT
dec/inc pairs, delimited with space or tabs
OUTPUT
four plots: 1) equal area plot of original directions
2) Elongation/inclination pairs as a function of f, data plus 25 bootstrap samples
3) Cumulative distribution of bootstrapped optimal inclinations plus uncertainties.
Estimate from original data set plotted as solid line
4) Orientation of principle direction through unflattening
NOTE: If distribution does not have a solution, plot labeled: Pathological. Some bootstrap samples may have
valid solutions and those are plotted in the CDFs and E/I plot.
]
<ast.Tuple object at 0x7da1b047e2c0> assign[=] tuple[[<ast.Constant object at 0x7da1b047e200>, <ast.Constant object at 0x7da1b047e1d0>]]
variable[plot] assign[=] constant[0]
if compare[constant[-h] in name[sys].argv] begin[:]
call[name[print], parameter[name[main].__doc__]]
call[name[sys].exit, parameter[]]
if compare[constant[-n] in name[sys].argv] begin[:]
variable[ind] assign[=] call[name[sys].argv.index, parameter[constant[-n]]]
variable[nb] assign[=] call[name[int], parameter[call[name[sys].argv][binary_operation[name[ind] + constant[1]]]]]
if compare[constant[-sc] in name[sys].argv] begin[:]
variable[site_correction] assign[=] constant[True]
if compare[constant[-fmt] in name[sys].argv] begin[:]
variable[ind] assign[=] call[name[sys].argv.index, parameter[constant[-fmt]]]
variable[fmt] assign[=] call[name[sys].argv][binary_operation[name[ind] + constant[1]]]
if compare[constant[-sav] in name[sys].argv] begin[:]
variable[plot] assign[=] constant[1]
variable[data] assign[=] call[name[numpy].loadtxt, parameter[name[file]]]
<ast.Tuple object at 0x7da1b047cb80> assign[=] tuple[[<ast.Call object at 0x7da1b047cac0>, <ast.Call object at 0x7da1b047c970>]]
<ast.Tuple object at 0x7da1b047c7f0> assign[=] tuple[[<ast.List object at 0x7da1b047c730>, <ast.List object at 0x7da1b047c700>]]
variable[PLTS] assign[=] dictionary[[<ast.Constant object at 0x7da1b047c640>, <ast.Constant object at 0x7da1b047c610>, <ast.Constant object at 0x7da1b047c5e0>, <ast.Constant object at 0x7da1b047c5b0>], [<ast.Constant object at 0x7da1b047c580>, <ast.Constant object at 0x7da1b047c550>, <ast.Constant object at 0x7da1b047c520>, <ast.Constant object at 0x7da1b047c4f0>]]
call[name[pmagplotlib].plot_init, parameter[call[name[PLTS]][constant[eq]], constant[6], constant[6]]]
call[name[pmagplotlib].plot_init, parameter[call[name[PLTS]][constant[ei]], constant[5], constant[5]]]
call[name[pmagplotlib].plot_init, parameter[call[name[PLTS]][constant[cdf]], constant[5], constant[5]]]
call[name[pmagplotlib].plot_init, parameter[call[name[PLTS]][constant[v2]], constant[5], constant[5]]]
call[name[pmagplotlib].plot_eq, parameter[call[name[PLTS]][constant[eq]], name[data], constant[Data]]]
variable[ppars] assign[=] call[name[pmag].doprinc, parameter[name[data]]]
variable[Io] assign[=] call[name[ppars]][constant[inc]]
variable[n] assign[=] call[name[ppars]][constant[N]]
<ast.Tuple object at 0x7da1b044d390> assign[=] call[name[pmag].find_f, parameter[name[data]]]
if name[site_correction] begin[:]
<ast.Tuple object at 0x7da1b044d0c0> assign[=] tuple[[<ast.Subscript object at 0x7da1b044d030>, <ast.Subscript object at 0x7da1b044cc70>]]
variable[flat_f] assign[=] call[name[Fs]][call[name[Es].index, parameter[call[name[min], parameter[name[Es]]]]]]
call[name[pmagplotlib].plot_ei, parameter[call[name[PLTS]][constant[ei]], name[Es], name[Is], name[flat_f]]]
call[name[pmagplotlib].plot_v2s, parameter[call[name[PLTS]][constant[v2]], name[V2s], name[Is], name[flat_f]]]
variable[b] assign[=] constant[0]
call[name[print], parameter[constant[Bootstrapping.... be patient]]]
while compare[name[b] less[<] name[nb]] begin[:]
variable[bdata] assign[=] call[name[pmag].pseudo, parameter[name[data]]]
<ast.Tuple object at 0x7da1b044f9d0> assign[=] call[name[pmag].find_f, parameter[name[bdata]]]
if compare[name[b] less[<] constant[25]] begin[:]
call[name[pmagplotlib].plot_ei, parameter[call[name[PLTS]][constant[ei]], name[Esb], name[Isb], call[name[Fsb]][<ast.UnaryOp object at 0x7da1b044cd00>]]]
if compare[call[name[Esb]][<ast.UnaryOp object at 0x7da1b044c430>] not_equal[!=] constant[0]] begin[:]
variable[ppars] assign[=] call[name[pmag].doprinc, parameter[name[bdata]]]
if name[site_correction] begin[:]
call[name[I].append, parameter[call[name[abs], parameter[call[name[Isb]][call[name[Esb].index, parameter[call[name[min], parameter[name[Esb]]]]]]]]]]
call[name[E].append, parameter[call[name[Esb]][call[name[Esb].index, parameter[call[name[min], parameter[name[Esb]]]]]]]]
<ast.AugAssign object at 0x7da1b044e6b0>
if compare[binary_operation[name[b] <ast.Mod object at 0x7da2590d6920> constant[25]] equal[==] constant[0]] begin[:]
call[name[print], parameter[name[b], constant[ out of ], name[nb]]]
call[name[I].sort, parameter[]]
call[name[E].sort, parameter[]]
variable[Eexp] assign[=] list[[]]
for taget[name[i]] in starred[name[I]] begin[:]
call[name[Eexp].append, parameter[call[name[pmag].EI, parameter[name[i]]]]]
if compare[name[Inc] equal[==] constant[0]] begin[:]
variable[title] assign[=] binary_operation[constant[Pathological Distribution: ] + binary_operation[constant[[%7.1f, %7.1f]] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b044dc60>, <ast.Subscript object at 0x7da1b044e1d0>]]]]
call[name[pmagplotlib].plot_ei, parameter[call[name[PLTS]][constant[ei]], name[Eexp], name[I], constant[1]]]
call[name[pmagplotlib].plot_cdf, parameter[call[name[PLTS]][constant[cdf]], name[I], constant[Inclinations], constant[r], name[title]]]
call[name[pmagplotlib].plot_vs, parameter[call[name[PLTS]][constant[cdf]], list[[<ast.Subscript object at 0x7da1b0450f10>, <ast.Subscript object at 0x7da1b04518a0>]], constant[b], constant[--]]]
call[name[pmagplotlib].plot_vs, parameter[call[name[PLTS]][constant[cdf]], list[[<ast.Name object at 0x7da1b0450dc0>]], constant[g], constant[-]]]
call[name[pmagplotlib].plot_vs, parameter[call[name[PLTS]][constant[cdf]], list[[<ast.Name object at 0x7da1b0451390>]], constant[k], constant[-]]]
if compare[name[plot] equal[==] constant[0]] begin[:]
call[name[print], parameter[binary_operation[constant[%7.1f %s %7.1f _ %7.1f ^ %7.1f: %6.4f _ %6.4f ^ %6.4f] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0450c10>, <ast.Constant object at 0x7da1b0450bb0>, <ast.Name object at 0x7da1b04509a0>, <ast.Subscript object at 0x7da1b0450a00>, <ast.Subscript object at 0x7da1b04fee00>, <ast.Name object at 0x7da1b04fc0a0>, <ast.Subscript object at 0x7da1b04ffaf0>, <ast.Subscript object at 0x7da1b04fc340>]]]]]
call[name[print], parameter[constant[Io Inc I_lower, I_upper, Elon, E_lower, E_upper]]]
call[name[pmagplotlib].draw_figs, parameter[name[PLTS]]]
variable[ans] assign[=] constant[]
while compare[name[ans] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Constant object at 0x7da1b04c8b50>, <ast.Constant object at 0x7da1b04cbdc0>]]] begin[:]
variable[ans] assign[=] call[name[input], parameter[constant[S[a]ve plots - <q> to quit: ]]]
if compare[name[ans] equal[==] constant[q]] begin[:]
call[name[print], parameter[constant[
Good bye
]]]
call[name[sys].exit, parameter[]]
variable[files] assign[=] dictionary[[], []]
call[name[files]][constant[eq]] assign[=] binary_operation[constant[findEI_eq.] + name[fmt]]
call[name[files]][constant[ei]] assign[=] binary_operation[constant[findEI_ei.] + name[fmt]]
call[name[files]][constant[cdf]] assign[=] binary_operation[constant[findEI_cdf.] + name[fmt]]
call[name[files]][constant[v2]] assign[=] binary_operation[constant[findEI_v2.] + name[fmt]]
call[name[pmagplotlib].save_plots, parameter[name[PLTS], name[files]]] | keyword[def] identifier[main] ():
literal[string]
identifier[fmt] , identifier[nb] = literal[string] , literal[int]
identifier[plot] = literal[int]
keyword[if] literal[string] keyword[in] identifier[sys] . identifier[argv] :
identifier[print] ( identifier[main] . identifier[__doc__] )
identifier[sys] . identifier[exit] ()
keyword[elif] literal[string] keyword[in] identifier[sys] . identifier[argv] :
identifier[ind] = identifier[sys] . identifier[argv] . identifier[index] ( literal[string] )
identifier[file] = identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ]
keyword[else] :
identifier[print] ( identifier[main] . identifier[__doc__] )
identifier[sys] . identifier[exit] ()
keyword[if] literal[string] keyword[in] identifier[sys] . identifier[argv] :
identifier[ind] = identifier[sys] . identifier[argv] . identifier[index] ( literal[string] )
identifier[nb] = identifier[int] ( identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ])
keyword[if] literal[string] keyword[in] identifier[sys] . identifier[argv] :
identifier[site_correction] = keyword[True]
keyword[else] :
identifier[site_correction] = keyword[False]
keyword[if] literal[string] keyword[in] identifier[sys] . identifier[argv] :
identifier[ind] = identifier[sys] . identifier[argv] . identifier[index] ( literal[string] )
identifier[fmt] = identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ]
keyword[if] literal[string] keyword[in] identifier[sys] . identifier[argv] : identifier[plot] = literal[int]
identifier[data] = identifier[numpy] . identifier[loadtxt] ( identifier[file] )
identifier[upper] , identifier[lower] = identifier[int] ( identifier[round] ( literal[int] * identifier[nb] )), identifier[int] ( identifier[round] ( literal[int] * identifier[nb] ))
identifier[E] , identifier[I] =[],[]
identifier[PLTS] ={ literal[string] : literal[int] , literal[string] : literal[int] , literal[string] : literal[int] , literal[string] : literal[int] }
identifier[pmagplotlib] . identifier[plot_init] ( identifier[PLTS] [ literal[string] ], literal[int] , literal[int] )
identifier[pmagplotlib] . identifier[plot_init] ( identifier[PLTS] [ literal[string] ], literal[int] , literal[int] )
identifier[pmagplotlib] . identifier[plot_init] ( identifier[PLTS] [ literal[string] ], literal[int] , literal[int] )
identifier[pmagplotlib] . identifier[plot_init] ( identifier[PLTS] [ literal[string] ], literal[int] , literal[int] )
identifier[pmagplotlib] . identifier[plot_eq] ( identifier[PLTS] [ literal[string] ], identifier[data] , literal[string] )
identifier[ppars] = identifier[pmag] . identifier[doprinc] ( identifier[data] )
identifier[Io] = identifier[ppars] [ literal[string] ]
identifier[n] = identifier[ppars] [ literal[string] ]
identifier[Es] , identifier[Is] , identifier[Fs] , identifier[V2s] = identifier[pmag] . identifier[find_f] ( identifier[data] )
keyword[if] identifier[site_correction] :
identifier[Inc] , identifier[Elong] = identifier[Is] [ identifier[Es] . identifier[index] ( identifier[min] ( identifier[Es] ))], identifier[Es] [ identifier[Es] . identifier[index] ( identifier[min] ( identifier[Es] ))]
identifier[flat_f] = identifier[Fs] [ identifier[Es] . identifier[index] ( identifier[min] ( identifier[Es] ))]
keyword[else] :
identifier[Inc] , identifier[Elong] = identifier[Is] [- literal[int] ], identifier[Es] [- literal[int] ]
identifier[flat_f] = identifier[Fs] [- literal[int] ]
identifier[pmagplotlib] . identifier[plot_ei] ( identifier[PLTS] [ literal[string] ], identifier[Es] , identifier[Is] , identifier[flat_f] )
identifier[pmagplotlib] . identifier[plot_v2s] ( identifier[PLTS] [ literal[string] ], identifier[V2s] , identifier[Is] , identifier[flat_f] )
identifier[b] = literal[int]
identifier[print] ( literal[string] )
keyword[while] identifier[b] < identifier[nb] :
identifier[bdata] = identifier[pmag] . identifier[pseudo] ( identifier[data] )
identifier[Esb] , identifier[Isb] , identifier[Fsb] , identifier[V2sb] = identifier[pmag] . identifier[find_f] ( identifier[bdata] )
keyword[if] identifier[b] < literal[int] :
identifier[pmagplotlib] . identifier[plot_ei] ( identifier[PLTS] [ literal[string] ], identifier[Esb] , identifier[Isb] , identifier[Fsb] [- literal[int] ])
keyword[if] identifier[Esb] [- literal[int] ]!= literal[int] :
identifier[ppars] = identifier[pmag] . identifier[doprinc] ( identifier[bdata] )
keyword[if] identifier[site_correction] :
identifier[I] . identifier[append] ( identifier[abs] ( identifier[Isb] [ identifier[Esb] . identifier[index] ( identifier[min] ( identifier[Esb] ))]))
identifier[E] . identifier[append] ( identifier[Esb] [ identifier[Esb] . identifier[index] ( identifier[min] ( identifier[Esb] ))])
keyword[else] :
identifier[I] . identifier[append] ( identifier[abs] ( identifier[Isb] [- literal[int] ]))
identifier[E] . identifier[append] ( identifier[Esb] [- literal[int] ])
identifier[b] += literal[int]
keyword[if] identifier[b] % literal[int] == literal[int] : identifier[print] ( identifier[b] , literal[string] , identifier[nb] )
identifier[I] . identifier[sort] ()
identifier[E] . identifier[sort] ()
identifier[Eexp] =[]
keyword[for] identifier[i] keyword[in] identifier[I] :
identifier[Eexp] . identifier[append] ( identifier[pmag] . identifier[EI] ( identifier[i] ))
keyword[if] identifier[Inc] == literal[int] :
identifier[title] = literal[string] + literal[string] %( identifier[I] [ identifier[lower] ], identifier[I] [ identifier[upper] ])
keyword[else] :
identifier[title] = literal[string] %( identifier[Inc] , identifier[I] [ identifier[lower] ], identifier[I] [ identifier[upper] ])
identifier[pmagplotlib] . identifier[plot_ei] ( identifier[PLTS] [ literal[string] ], identifier[Eexp] , identifier[I] , literal[int] )
identifier[pmagplotlib] . identifier[plot_cdf] ( identifier[PLTS] [ literal[string] ], identifier[I] , literal[string] , literal[string] , identifier[title] )
identifier[pmagplotlib] . identifier[plot_vs] ( identifier[PLTS] [ literal[string] ],[ identifier[I] [ identifier[lower] ], identifier[I] [ identifier[upper] ]], literal[string] , literal[string] )
identifier[pmagplotlib] . identifier[plot_vs] ( identifier[PLTS] [ literal[string] ],[ identifier[Inc] ], literal[string] , literal[string] )
identifier[pmagplotlib] . identifier[plot_vs] ( identifier[PLTS] [ literal[string] ],[ identifier[Io] ], literal[string] , literal[string] )
keyword[if] identifier[plot] == literal[int] :
identifier[print] ( literal[string] %( identifier[Io] , literal[string] , identifier[Inc] , identifier[I] [ identifier[lower] ], identifier[I] [ identifier[upper] ], identifier[Elong] , identifier[E] [ identifier[lower] ], identifier[E] [ identifier[upper] ]))
identifier[print] ( literal[string] )
identifier[pmagplotlib] . identifier[draw_figs] ( identifier[PLTS] )
identifier[ans] = literal[string]
keyword[while] identifier[ans] keyword[not] keyword[in] [ literal[string] , literal[string] ]:
identifier[ans] = identifier[input] ( literal[string] )
keyword[if] identifier[ans] == literal[string] :
identifier[print] ( literal[string] )
identifier[sys] . identifier[exit] ()
identifier[files] ={}
identifier[files] [ literal[string] ]= literal[string] + identifier[fmt]
identifier[files] [ literal[string] ]= literal[string] + identifier[fmt]
identifier[files] [ literal[string] ]= literal[string] + identifier[fmt]
identifier[files] [ literal[string] ]= literal[string] + identifier[fmt]
identifier[pmagplotlib] . identifier[save_plots] ( identifier[PLTS] , identifier[files] ) | def main():
"""
NAME
find_EI.py
DESCRIPTION
Applies series of assumed flattening factor and "unsquishes" inclinations assuming tangent function.
Finds flattening factor that gives elongation/inclination pair consistent with TK03.
Finds bootstrap confidence bounds
SYNTAX
find_EI.py [command line options]
OPTIONS
-h prints help message and quits
-f FILE specify input file name
-n N specify number of bootstraps - the more the better, but slower!, default is 1000
-sc uses a "site-level" correction to a Fisherian distribution instead
of a "study-level" correction to a TK03-consistent distribution.
Note that many directions (~ 100) are needed for this correction to be reliable.
-fmt [svg,png,eps,pdf..] change plot format, default is svg
-sav saves the figures and quits
INPUT
dec/inc pairs, delimited with space or tabs
OUTPUT
four plots: 1) equal area plot of original directions
2) Elongation/inclination pairs as a function of f, data plus 25 bootstrap samples
3) Cumulative distribution of bootstrapped optimal inclinations plus uncertainties.
Estimate from original data set plotted as solid line
4) Orientation of principle direction through unflattening
NOTE: If distribution does not have a solution, plot labeled: Pathological. Some bootstrap samples may have
valid solutions and those are plotted in the CDFs and E/I plot.
"""
(fmt, nb) = ('svg', 1000)
plot = 0
if '-h' in sys.argv:
print(main.__doc__)
sys.exit() # graceful quit # depends on [control=['if'], data=[]]
elif '-f' in sys.argv:
ind = sys.argv.index('-f')
file = sys.argv[ind + 1] # depends on [control=['if'], data=[]]
else:
print(main.__doc__)
sys.exit()
if '-n' in sys.argv:
ind = sys.argv.index('-n')
nb = int(sys.argv[ind + 1]) # depends on [control=['if'], data=[]]
if '-sc' in sys.argv:
site_correction = True # depends on [control=['if'], data=[]]
else:
site_correction = False
if '-fmt' in sys.argv:
ind = sys.argv.index('-fmt')
fmt = sys.argv[ind + 1] # depends on [control=['if'], data=[]]
if '-sav' in sys.argv:
plot = 1 # depends on [control=['if'], data=[]]
data = numpy.loadtxt(file)
(upper, lower) = (int(round(0.975 * nb)), int(round(0.025 * nb)))
(E, I) = ([], [])
PLTS = {'eq': 1, 'ei': 2, 'cdf': 3, 'v2': 4}
pmagplotlib.plot_init(PLTS['eq'], 6, 6)
pmagplotlib.plot_init(PLTS['ei'], 5, 5)
pmagplotlib.plot_init(PLTS['cdf'], 5, 5)
pmagplotlib.plot_init(PLTS['v2'], 5, 5)
pmagplotlib.plot_eq(PLTS['eq'], data, 'Data')
# this is a problem
#if plot==0:pmagplotlib.draw_figs(PLTS)
ppars = pmag.doprinc(data)
Io = ppars['inc']
n = ppars['N']
(Es, Is, Fs, V2s) = pmag.find_f(data)
if site_correction:
(Inc, Elong) = (Is[Es.index(min(Es))], Es[Es.index(min(Es))])
flat_f = Fs[Es.index(min(Es))] # depends on [control=['if'], data=[]]
else:
(Inc, Elong) = (Is[-1], Es[-1])
flat_f = Fs[-1]
pmagplotlib.plot_ei(PLTS['ei'], Es, Is, flat_f)
pmagplotlib.plot_v2s(PLTS['v2'], V2s, Is, flat_f)
b = 0
print('Bootstrapping.... be patient')
while b < nb:
bdata = pmag.pseudo(data)
(Esb, Isb, Fsb, V2sb) = pmag.find_f(bdata)
if b < 25:
pmagplotlib.plot_ei(PLTS['ei'], Esb, Isb, Fsb[-1]) # depends on [control=['if'], data=[]]
if Esb[-1] != 0:
ppars = pmag.doprinc(bdata)
if site_correction:
I.append(abs(Isb[Esb.index(min(Esb))]))
E.append(Esb[Esb.index(min(Esb))]) # depends on [control=['if'], data=[]]
else:
I.append(abs(Isb[-1]))
E.append(Esb[-1])
b += 1
if b % 25 == 0:
print(b, ' out of ', nb) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['while'], data=['b', 'nb']]
I.sort()
E.sort()
Eexp = []
for i in I:
Eexp.append(pmag.EI(i)) # depends on [control=['for'], data=['i']]
if Inc == 0:
title = 'Pathological Distribution: ' + '[%7.1f, %7.1f]' % (I[lower], I[upper]) # depends on [control=['if'], data=[]]
else:
title = '%7.1f [%7.1f, %7.1f]' % (Inc, I[lower], I[upper])
pmagplotlib.plot_ei(PLTS['ei'], Eexp, I, 1)
pmagplotlib.plot_cdf(PLTS['cdf'], I, 'Inclinations', 'r', title)
pmagplotlib.plot_vs(PLTS['cdf'], [I[lower], I[upper]], 'b', '--')
pmagplotlib.plot_vs(PLTS['cdf'], [Inc], 'g', '-')
pmagplotlib.plot_vs(PLTS['cdf'], [Io], 'k', '-')
if plot == 0:
print('%7.1f %s %7.1f _ %7.1f ^ %7.1f: %6.4f _ %6.4f ^ %6.4f' % (Io, ' => ', Inc, I[lower], I[upper], Elong, E[lower], E[upper]))
print('Io Inc I_lower, I_upper, Elon, E_lower, E_upper')
pmagplotlib.draw_figs(PLTS)
ans = ''
while ans not in ['q', 'a']:
ans = input('S[a]ve plots - <q> to quit: ') # depends on [control=['while'], data=['ans']]
if ans == 'q':
print('\n Good bye\n')
sys.exit() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
files = {}
files['eq'] = 'findEI_eq.' + fmt
files['ei'] = 'findEI_ei.' + fmt
files['cdf'] = 'findEI_cdf.' + fmt
files['v2'] = 'findEI_v2.' + fmt
pmagplotlib.save_plots(PLTS, files) |
def _urlextract_cli():
"""
urlextract - command line program that will print all URLs to stdout
Usage: urlextract [input_file] [-u] [-v]
input_file - text file with URLs to extract
"""
import argparse
def get_args():
"""
Parse programs arguments
"""
parser = argparse.ArgumentParser(
description='urlextract - prints out all URLs that were '
'found in input file or stdin based on locating '
'their TLDs')
ver = URLExtract.get_version()
parser.add_argument("-v", "--version", action="version",
version='%(prog)s - version {}'.format(ver))
parser.add_argument(
"-u", "--unique", dest='unique', action='store_true',
help='print out only unique URLs found in file.')
parser.add_argument(
'input_file', nargs='?', metavar='<input_file>',
type=argparse.FileType(encoding="UTF-8"), default=sys.stdin,
help='input text file with URLs to extract. [UTF-8]')
parsed_args = parser.parse_args()
return parsed_args
args = get_args()
logging.basicConfig(
level=logging.INFO, stream=sys.stderr,
format='%(asctime)s - %(levelname)s (%(name)s): %(message)s')
logger = logging.getLogger('urlextract')
try:
urlextract = URLExtract()
urlextract.update_when_older(30)
content = args.input_file.read()
for url in urlextract.find_urls(content, args.unique):
print(url)
except CacheFileError as e:
logger.error(str(e))
sys.exit(-1)
finally:
args.input_file.close() | def function[_urlextract_cli, parameter[]]:
constant[
urlextract - command line program that will print all URLs to stdout
Usage: urlextract [input_file] [-u] [-v]
input_file - text file with URLs to extract
]
import module[argparse]
def function[get_args, parameter[]]:
constant[
Parse programs arguments
]
variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[]]
variable[ver] assign[=] call[name[URLExtract].get_version, parameter[]]
call[name[parser].add_argument, parameter[constant[-v], constant[--version]]]
call[name[parser].add_argument, parameter[constant[-u], constant[--unique]]]
call[name[parser].add_argument, parameter[constant[input_file]]]
variable[parsed_args] assign[=] call[name[parser].parse_args, parameter[]]
return[name[parsed_args]]
variable[args] assign[=] call[name[get_args], parameter[]]
call[name[logging].basicConfig, parameter[]]
variable[logger] assign[=] call[name[logging].getLogger, parameter[constant[urlextract]]]
<ast.Try object at 0x7da204344490> | keyword[def] identifier[_urlextract_cli] ():
literal[string]
keyword[import] identifier[argparse]
keyword[def] identifier[get_args] ():
literal[string]
identifier[parser] = identifier[argparse] . identifier[ArgumentParser] (
identifier[description] = literal[string]
literal[string]
literal[string] )
identifier[ver] = identifier[URLExtract] . identifier[get_version] ()
identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[action] = literal[string] ,
identifier[version] = literal[string] . identifier[format] ( identifier[ver] ))
identifier[parser] . identifier[add_argument] (
literal[string] , literal[string] , identifier[dest] = literal[string] , identifier[action] = literal[string] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] (
literal[string] , identifier[nargs] = literal[string] , identifier[metavar] = literal[string] ,
identifier[type] = identifier[argparse] . identifier[FileType] ( identifier[encoding] = literal[string] ), identifier[default] = identifier[sys] . identifier[stdin] ,
identifier[help] = literal[string] )
identifier[parsed_args] = identifier[parser] . identifier[parse_args] ()
keyword[return] identifier[parsed_args]
identifier[args] = identifier[get_args] ()
identifier[logging] . identifier[basicConfig] (
identifier[level] = identifier[logging] . identifier[INFO] , identifier[stream] = identifier[sys] . identifier[stderr] ,
identifier[format] = literal[string] )
identifier[logger] = identifier[logging] . identifier[getLogger] ( literal[string] )
keyword[try] :
identifier[urlextract] = identifier[URLExtract] ()
identifier[urlextract] . identifier[update_when_older] ( literal[int] )
identifier[content] = identifier[args] . identifier[input_file] . identifier[read] ()
keyword[for] identifier[url] keyword[in] identifier[urlextract] . identifier[find_urls] ( identifier[content] , identifier[args] . identifier[unique] ):
identifier[print] ( identifier[url] )
keyword[except] identifier[CacheFileError] keyword[as] identifier[e] :
identifier[logger] . identifier[error] ( identifier[str] ( identifier[e] ))
identifier[sys] . identifier[exit] (- literal[int] )
keyword[finally] :
identifier[args] . identifier[input_file] . identifier[close] () | def _urlextract_cli():
"""
urlextract - command line program that will print all URLs to stdout
Usage: urlextract [input_file] [-u] [-v]
input_file - text file with URLs to extract
"""
import argparse
def get_args():
"""
Parse programs arguments
"""
parser = argparse.ArgumentParser(description='urlextract - prints out all URLs that were found in input file or stdin based on locating their TLDs')
ver = URLExtract.get_version()
parser.add_argument('-v', '--version', action='version', version='%(prog)s - version {}'.format(ver))
parser.add_argument('-u', '--unique', dest='unique', action='store_true', help='print out only unique URLs found in file.')
parser.add_argument('input_file', nargs='?', metavar='<input_file>', type=argparse.FileType(encoding='UTF-8'), default=sys.stdin, help='input text file with URLs to extract. [UTF-8]')
parsed_args = parser.parse_args()
return parsed_args
args = get_args()
logging.basicConfig(level=logging.INFO, stream=sys.stderr, format='%(asctime)s - %(levelname)s (%(name)s): %(message)s')
logger = logging.getLogger('urlextract')
try:
urlextract = URLExtract()
urlextract.update_when_older(30)
content = args.input_file.read()
for url in urlextract.find_urls(content, args.unique):
print(url) # depends on [control=['for'], data=['url']] # depends on [control=['try'], data=[]]
except CacheFileError as e:
logger.error(str(e))
sys.exit(-1) # depends on [control=['except'], data=['e']]
finally:
args.input_file.close() |
def close(self):
"""Close connection to device."""
if self._transport:
self._transport.close()
self._transport = None
self._chacha = None | def function[close, parameter[self]]:
constant[Close connection to device.]
if name[self]._transport begin[:]
call[name[self]._transport.close, parameter[]]
name[self]._transport assign[=] constant[None]
name[self]._chacha assign[=] constant[None] | keyword[def] identifier[close] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_transport] :
identifier[self] . identifier[_transport] . identifier[close] ()
identifier[self] . identifier[_transport] = keyword[None]
identifier[self] . identifier[_chacha] = keyword[None] | def close(self):
"""Close connection to device."""
if self._transport:
self._transport.close() # depends on [control=['if'], data=[]]
self._transport = None
self._chacha = None |
def pangocairo_create_context(cr):
"""
If python-gi-cairo is not installed, using PangoCairo.create_context
dies with an unhelpful KeyError, check for that and output somethig
useful.
"""
# TODO move this to core.backend
try:
return PangoCairo.create_context(cr)
except KeyError as e:
if e.args == ('could not find foreign type Context',):
raise ShoebotInstallError("Error creating PangoCairo missing dependency: python-gi-cairo")
else:
raise | def function[pangocairo_create_context, parameter[cr]]:
constant[
If python-gi-cairo is not installed, using PangoCairo.create_context
dies with an unhelpful KeyError, check for that and output somethig
useful.
]
<ast.Try object at 0x7da18dc06110> | keyword[def] identifier[pangocairo_create_context] ( identifier[cr] ):
literal[string]
keyword[try] :
keyword[return] identifier[PangoCairo] . identifier[create_context] ( identifier[cr] )
keyword[except] identifier[KeyError] keyword[as] identifier[e] :
keyword[if] identifier[e] . identifier[args] ==( literal[string] ,):
keyword[raise] identifier[ShoebotInstallError] ( literal[string] )
keyword[else] :
keyword[raise] | def pangocairo_create_context(cr):
"""
If python-gi-cairo is not installed, using PangoCairo.create_context
dies with an unhelpful KeyError, check for that and output somethig
useful.
"""
# TODO move this to core.backend
try:
return PangoCairo.create_context(cr) # depends on [control=['try'], data=[]]
except KeyError as e:
if e.args == ('could not find foreign type Context',):
raise ShoebotInstallError('Error creating PangoCairo missing dependency: python-gi-cairo') # depends on [control=['if'], data=[]]
else:
raise # depends on [control=['except'], data=['e']] |
def delete_before(self, segment_info):
"""
Delete all base backups and WAL before a given segment
This is the most commonly-used deletion operator; to delete
old backups and WAL.
"""
# This will delete all base backup data before segment_info.
self._delete_base_backups_before(segment_info)
# This will delete all WAL segments before segment_info.
self._delete_wals_before(segment_info)
if self.deleter:
self.deleter.close() | def function[delete_before, parameter[self, segment_info]]:
constant[
Delete all base backups and WAL before a given segment
This is the most commonly-used deletion operator; to delete
old backups and WAL.
]
call[name[self]._delete_base_backups_before, parameter[name[segment_info]]]
call[name[self]._delete_wals_before, parameter[name[segment_info]]]
if name[self].deleter begin[:]
call[name[self].deleter.close, parameter[]] | keyword[def] identifier[delete_before] ( identifier[self] , identifier[segment_info] ):
literal[string]
identifier[self] . identifier[_delete_base_backups_before] ( identifier[segment_info] )
identifier[self] . identifier[_delete_wals_before] ( identifier[segment_info] )
keyword[if] identifier[self] . identifier[deleter] :
identifier[self] . identifier[deleter] . identifier[close] () | def delete_before(self, segment_info):
"""
Delete all base backups and WAL before a given segment
This is the most commonly-used deletion operator; to delete
old backups and WAL.
"""
# This will delete all base backup data before segment_info.
self._delete_base_backups_before(segment_info)
# This will delete all WAL segments before segment_info.
self._delete_wals_before(segment_info)
if self.deleter:
self.deleter.close() # depends on [control=['if'], data=[]] |
def register_updates(self, *updates):
"""
Register updates that will be executed in each iteration.
"""
for key, node in updates:
if key not in self._registered_updates:
self.updates.append((key, node))
self._registered_updates.add(key) | def function[register_updates, parameter[self]]:
constant[
Register updates that will be executed in each iteration.
]
for taget[tuple[[<ast.Name object at 0x7da1b0381780>, <ast.Name object at 0x7da1b0380fa0>]]] in starred[name[updates]] begin[:]
if compare[name[key] <ast.NotIn object at 0x7da2590d7190> name[self]._registered_updates] begin[:]
call[name[self].updates.append, parameter[tuple[[<ast.Name object at 0x7da1b0351c00>, <ast.Name object at 0x7da1b03524a0>]]]]
call[name[self]._registered_updates.add, parameter[name[key]]] | keyword[def] identifier[register_updates] ( identifier[self] ,* identifier[updates] ):
literal[string]
keyword[for] identifier[key] , identifier[node] keyword[in] identifier[updates] :
keyword[if] identifier[key] keyword[not] keyword[in] identifier[self] . identifier[_registered_updates] :
identifier[self] . identifier[updates] . identifier[append] (( identifier[key] , identifier[node] ))
identifier[self] . identifier[_registered_updates] . identifier[add] ( identifier[key] ) | def register_updates(self, *updates):
"""
Register updates that will be executed in each iteration.
"""
for (key, node) in updates:
if key not in self._registered_updates:
self.updates.append((key, node))
self._registered_updates.add(key) # depends on [control=['if'], data=['key']] # depends on [control=['for'], data=[]] |
def seasons(ephemeris):
"""Build a function of time that returns the quarter of the year.
The function that this returns will expect a single argument that is
a :class:`~skyfield.timelib.Time` and will return 0 through 3 for
the seasons Spring, Summer, Autumn, and Winter.
"""
earth = ephemeris['earth']
sun = ephemeris['sun']
def season_at(t):
"""Return season 0 (Spring) through 3 (Winter) at time `t`."""
t._nutation_angles = iau2000b(t.tt)
e = earth.at(t)
_, slon, _ = e.observe(sun).apparent().ecliptic_latlon('date')
return (slon.radians // (tau / 4) % 4).astype(int)
season_at.rough_period = 90.0
return season_at | def function[seasons, parameter[ephemeris]]:
constant[Build a function of time that returns the quarter of the year.
The function that this returns will expect a single argument that is
a :class:`~skyfield.timelib.Time` and will return 0 through 3 for
the seasons Spring, Summer, Autumn, and Winter.
]
variable[earth] assign[=] call[name[ephemeris]][constant[earth]]
variable[sun] assign[=] call[name[ephemeris]][constant[sun]]
def function[season_at, parameter[t]]:
constant[Return season 0 (Spring) through 3 (Winter) at time `t`.]
name[t]._nutation_angles assign[=] call[name[iau2000b], parameter[name[t].tt]]
variable[e] assign[=] call[name[earth].at, parameter[name[t]]]
<ast.Tuple object at 0x7da1b175c9d0> assign[=] call[call[call[name[e].observe, parameter[name[sun]]].apparent, parameter[]].ecliptic_latlon, parameter[constant[date]]]
return[call[binary_operation[binary_operation[name[slon].radians <ast.FloorDiv object at 0x7da2590d6bc0> binary_operation[name[tau] / constant[4]]] <ast.Mod object at 0x7da2590d6920> constant[4]].astype, parameter[name[int]]]]
name[season_at].rough_period assign[=] constant[90.0]
return[name[season_at]] | keyword[def] identifier[seasons] ( identifier[ephemeris] ):
literal[string]
identifier[earth] = identifier[ephemeris] [ literal[string] ]
identifier[sun] = identifier[ephemeris] [ literal[string] ]
keyword[def] identifier[season_at] ( identifier[t] ):
literal[string]
identifier[t] . identifier[_nutation_angles] = identifier[iau2000b] ( identifier[t] . identifier[tt] )
identifier[e] = identifier[earth] . identifier[at] ( identifier[t] )
identifier[_] , identifier[slon] , identifier[_] = identifier[e] . identifier[observe] ( identifier[sun] ). identifier[apparent] (). identifier[ecliptic_latlon] ( literal[string] )
keyword[return] ( identifier[slon] . identifier[radians] //( identifier[tau] / literal[int] )% literal[int] ). identifier[astype] ( identifier[int] )
identifier[season_at] . identifier[rough_period] = literal[int]
keyword[return] identifier[season_at] | def seasons(ephemeris):
"""Build a function of time that returns the quarter of the year.
The function that this returns will expect a single argument that is
a :class:`~skyfield.timelib.Time` and will return 0 through 3 for
the seasons Spring, Summer, Autumn, and Winter.
"""
earth = ephemeris['earth']
sun = ephemeris['sun']
def season_at(t):
"""Return season 0 (Spring) through 3 (Winter) at time `t`."""
t._nutation_angles = iau2000b(t.tt)
e = earth.at(t)
(_, slon, _) = e.observe(sun).apparent().ecliptic_latlon('date')
return (slon.radians // (tau / 4) % 4).astype(int)
season_at.rough_period = 90.0
return season_at |
def sources_add(name, ruby=None, user=None):
'''
Make sure that a gem source is added.
name
The URL of the gem source to be added
ruby: None
For RVM or rbenv installations: the ruby version and gemset to target.
user: None
The user under which to run the ``gem`` command
.. versionadded:: 0.17.0
'''
ret = {'name': name, 'result': None, 'comment': '', 'changes': {}}
if name in __salt__['gem.sources_list'](ruby, runas=user):
ret['result'] = True
ret['comment'] = 'Gem source is already added.'
return ret
if __opts__['test']:
ret['comment'] = 'The gem source {0} would have been added.'.format(name)
return ret
if __salt__['gem.sources_add'](source_uri=name, ruby=ruby, runas=user):
ret['result'] = True
ret['changes'][name] = 'Installed'
ret['comment'] = 'Gem source was successfully added.'
else:
ret['result'] = False
ret['comment'] = 'Could not add gem source.'
return ret | def function[sources_add, parameter[name, ruby, user]]:
constant[
Make sure that a gem source is added.
name
The URL of the gem source to be added
ruby: None
For RVM or rbenv installations: the ruby version and gemset to target.
user: None
The user under which to run the ``gem`` command
.. versionadded:: 0.17.0
]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da2045640a0>, <ast.Constant object at 0x7da204567ee0>, <ast.Constant object at 0x7da204567eb0>, <ast.Constant object at 0x7da204566f20>], [<ast.Name object at 0x7da204564ac0>, <ast.Constant object at 0x7da204564190>, <ast.Constant object at 0x7da204566cb0>, <ast.Dict object at 0x7da2045662c0>]]
if compare[name[name] in call[call[name[__salt__]][constant[gem.sources_list]], parameter[name[ruby]]]] begin[:]
call[name[ret]][constant[result]] assign[=] constant[True]
call[name[ret]][constant[comment]] assign[=] constant[Gem source is already added.]
return[name[ret]]
if call[name[__opts__]][constant[test]] begin[:]
call[name[ret]][constant[comment]] assign[=] call[constant[The gem source {0} would have been added.].format, parameter[name[name]]]
return[name[ret]]
if call[call[name[__salt__]][constant[gem.sources_add]], parameter[]] begin[:]
call[name[ret]][constant[result]] assign[=] constant[True]
call[call[name[ret]][constant[changes]]][name[name]] assign[=] constant[Installed]
call[name[ret]][constant[comment]] assign[=] constant[Gem source was successfully added.]
return[name[ret]] | keyword[def] identifier[sources_add] ( identifier[name] , identifier[ruby] = keyword[None] , identifier[user] = keyword[None] ):
literal[string]
identifier[ret] ={ literal[string] : identifier[name] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] :{}}
keyword[if] identifier[name] keyword[in] identifier[__salt__] [ literal[string] ]( identifier[ruby] , identifier[runas] = identifier[user] ):
identifier[ret] [ literal[string] ]= keyword[True]
identifier[ret] [ literal[string] ]= literal[string]
keyword[return] identifier[ret]
keyword[if] identifier[__opts__] [ literal[string] ]:
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
keyword[return] identifier[ret]
keyword[if] identifier[__salt__] [ literal[string] ]( identifier[source_uri] = identifier[name] , identifier[ruby] = identifier[ruby] , identifier[runas] = identifier[user] ):
identifier[ret] [ literal[string] ]= keyword[True]
identifier[ret] [ literal[string] ][ identifier[name] ]= literal[string]
identifier[ret] [ literal[string] ]= literal[string]
keyword[else] :
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]= literal[string]
keyword[return] identifier[ret] | def sources_add(name, ruby=None, user=None):
"""
Make sure that a gem source is added.
name
The URL of the gem source to be added
ruby: None
For RVM or rbenv installations: the ruby version and gemset to target.
user: None
The user under which to run the ``gem`` command
.. versionadded:: 0.17.0
"""
ret = {'name': name, 'result': None, 'comment': '', 'changes': {}}
if name in __salt__['gem.sources_list'](ruby, runas=user):
ret['result'] = True
ret['comment'] = 'Gem source is already added.'
return ret # depends on [control=['if'], data=[]]
if __opts__['test']:
ret['comment'] = 'The gem source {0} would have been added.'.format(name)
return ret # depends on [control=['if'], data=[]]
if __salt__['gem.sources_add'](source_uri=name, ruby=ruby, runas=user):
ret['result'] = True
ret['changes'][name] = 'Installed'
ret['comment'] = 'Gem source was successfully added.' # depends on [control=['if'], data=[]]
else:
ret['result'] = False
ret['comment'] = 'Could not add gem source.'
return ret |
def InitFromNotification(self, notification, is_pending=False):
"""Initializes this object from an existing notification.
Args:
notification: A rdfvalues.flows.Notification object.
is_pending: Indicates whether the user has already seen this notification
or not.
Returns:
The current instance.
"""
self.timestamp = notification.timestamp
self.message = notification.message
self.subject = str(notification.subject)
self.is_pending = is_pending
reference_type_enum = ApiNotificationReference.Type
# Please see the comments to aff4_objects.GRRUser.Notify implementation
# for the details of notification.type format. Short summary:
# notification.type may be one of legacy values (i.e. "ViewObject") or
# have a format of "[legacy value]:[new-style notification type]", i.e.
# "ViewObject:TYPE_CLIENT_INTERROGATED".
legacy_type = None
if ":" in notification.type:
legacy_type, new_type = notification.type.split(":", 2)
self.notification_type = new_type
else:
legacy_type = notification.type
# TODO(user): refactor notifications, so that we send a meaningful
# notification from the start, so that we don't have to do the
# bridging/conversion/guessing here.
components = self._GetUrnComponents(notification)
if legacy_type == "Discovery":
self.reference.type = reference_type_enum.CLIENT
self.reference.client = ApiNotificationClientReference(
client_id=components[0])
elif legacy_type == "ViewObject":
if len(components) >= 2 and components[0] == "hunts":
self.reference.type = reference_type_enum.HUNT
self.reference.hunt.hunt_id = components[1]
elif len(components) >= 2 and components[0] == "cron":
self.reference.type = reference_type_enum.CRON
self.reference.cron.cron_job_id = components[1]
elif len(components) >= 3 and components[1] == "flows":
self.reference.type = reference_type_enum.FLOW
self.reference.flow.flow_id = components[2]
self.reference.flow.client_id = components[0]
elif len(components) == 1 and rdf_client.ClientURN.Validate(
components[0]):
self.reference.type = reference_type_enum.CLIENT
self.reference.client.client_id = components[0]
else:
if notification.subject:
path = notification.subject.Path()
for prefix in itervalues(rdf_paths.PathSpec.AFF4_PREFIXES):
part = "/%s%s" % (components[0], prefix)
if path.startswith(part):
self.reference.type = reference_type_enum.VFS
self.reference.vfs.client_id = components[0]
self.reference.vfs.vfs_path = (prefix +
path[len(part):]).lstrip("/")
break
if self.reference.type != reference_type_enum.VFS:
self.reference.type = reference_type_enum.UNKNOWN
self.reference.unknown.subject_urn = notification.subject
elif legacy_type == "FlowStatus":
if not components or not rdf_client.ClientURN.Validate(components[0]):
self.reference.type = reference_type_enum.UNKNOWN
self.reference.unknown.subject_urn = notification.subject
else:
self.reference.type = reference_type_enum.FLOW
self.reference.flow.flow_id = notification.source.Basename()
self.reference.flow.client_id = components[0]
# TODO(user): refactor GrantAccess notification so that we don't have
# to infer approval type from the URN.
elif legacy_type == "GrantAccess":
if rdf_client.ClientURN.Validate(components[1]):
self.reference.type = reference_type_enum.CLIENT_APPROVAL
self.reference.client_approval.client_id = components[1]
self.reference.client_approval.approval_id = components[-1]
self.reference.client_approval.username = components[-2]
elif components[1] == "hunts":
self.reference.type = reference_type_enum.HUNT_APPROVAL
self.reference.hunt_approval.hunt_id = components[2]
self.reference.hunt_approval.approval_id = components[-1]
self.reference.hunt_approval.username = components[-2]
elif components[1] == "cron":
self.reference.type = reference_type_enum.CRON_JOB_APPROVAL
self.reference.cron_job_approval.cron_job_id = components[2]
self.reference.cron_job_approval.approval_id = components[-1]
self.reference.cron_job_approval.username = components[-2]
else:
self.reference.type = reference_type_enum.UNKNOWN
self.reference.unknown.subject_urn = notification.subject
self.reference.unknown.source_urn = notification.source
return self | def function[InitFromNotification, parameter[self, notification, is_pending]]:
constant[Initializes this object from an existing notification.
Args:
notification: A rdfvalues.flows.Notification object.
is_pending: Indicates whether the user has already seen this notification
or not.
Returns:
The current instance.
]
name[self].timestamp assign[=] name[notification].timestamp
name[self].message assign[=] name[notification].message
name[self].subject assign[=] call[name[str], parameter[name[notification].subject]]
name[self].is_pending assign[=] name[is_pending]
variable[reference_type_enum] assign[=] name[ApiNotificationReference].Type
variable[legacy_type] assign[=] constant[None]
if compare[constant[:] in name[notification].type] begin[:]
<ast.Tuple object at 0x7da1b1c1bd90> assign[=] call[name[notification].type.split, parameter[constant[:], constant[2]]]
name[self].notification_type assign[=] name[new_type]
variable[components] assign[=] call[name[self]._GetUrnComponents, parameter[name[notification]]]
if compare[name[legacy_type] equal[==] constant[Discovery]] begin[:]
name[self].reference.type assign[=] name[reference_type_enum].CLIENT
name[self].reference.client assign[=] call[name[ApiNotificationClientReference], parameter[]]
return[name[self]] | keyword[def] identifier[InitFromNotification] ( identifier[self] , identifier[notification] , identifier[is_pending] = keyword[False] ):
literal[string]
identifier[self] . identifier[timestamp] = identifier[notification] . identifier[timestamp]
identifier[self] . identifier[message] = identifier[notification] . identifier[message]
identifier[self] . identifier[subject] = identifier[str] ( identifier[notification] . identifier[subject] )
identifier[self] . identifier[is_pending] = identifier[is_pending]
identifier[reference_type_enum] = identifier[ApiNotificationReference] . identifier[Type]
identifier[legacy_type] = keyword[None]
keyword[if] literal[string] keyword[in] identifier[notification] . identifier[type] :
identifier[legacy_type] , identifier[new_type] = identifier[notification] . identifier[type] . identifier[split] ( literal[string] , literal[int] )
identifier[self] . identifier[notification_type] = identifier[new_type]
keyword[else] :
identifier[legacy_type] = identifier[notification] . identifier[type]
identifier[components] = identifier[self] . identifier[_GetUrnComponents] ( identifier[notification] )
keyword[if] identifier[legacy_type] == literal[string] :
identifier[self] . identifier[reference] . identifier[type] = identifier[reference_type_enum] . identifier[CLIENT]
identifier[self] . identifier[reference] . identifier[client] = identifier[ApiNotificationClientReference] (
identifier[client_id] = identifier[components] [ literal[int] ])
keyword[elif] identifier[legacy_type] == literal[string] :
keyword[if] identifier[len] ( identifier[components] )>= literal[int] keyword[and] identifier[components] [ literal[int] ]== literal[string] :
identifier[self] . identifier[reference] . identifier[type] = identifier[reference_type_enum] . identifier[HUNT]
identifier[self] . identifier[reference] . identifier[hunt] . identifier[hunt_id] = identifier[components] [ literal[int] ]
keyword[elif] identifier[len] ( identifier[components] )>= literal[int] keyword[and] identifier[components] [ literal[int] ]== literal[string] :
identifier[self] . identifier[reference] . identifier[type] = identifier[reference_type_enum] . identifier[CRON]
identifier[self] . identifier[reference] . identifier[cron] . identifier[cron_job_id] = identifier[components] [ literal[int] ]
keyword[elif] identifier[len] ( identifier[components] )>= literal[int] keyword[and] identifier[components] [ literal[int] ]== literal[string] :
identifier[self] . identifier[reference] . identifier[type] = identifier[reference_type_enum] . identifier[FLOW]
identifier[self] . identifier[reference] . identifier[flow] . identifier[flow_id] = identifier[components] [ literal[int] ]
identifier[self] . identifier[reference] . identifier[flow] . identifier[client_id] = identifier[components] [ literal[int] ]
keyword[elif] identifier[len] ( identifier[components] )== literal[int] keyword[and] identifier[rdf_client] . identifier[ClientURN] . identifier[Validate] (
identifier[components] [ literal[int] ]):
identifier[self] . identifier[reference] . identifier[type] = identifier[reference_type_enum] . identifier[CLIENT]
identifier[self] . identifier[reference] . identifier[client] . identifier[client_id] = identifier[components] [ literal[int] ]
keyword[else] :
keyword[if] identifier[notification] . identifier[subject] :
identifier[path] = identifier[notification] . identifier[subject] . identifier[Path] ()
keyword[for] identifier[prefix] keyword[in] identifier[itervalues] ( identifier[rdf_paths] . identifier[PathSpec] . identifier[AFF4_PREFIXES] ):
identifier[part] = literal[string] %( identifier[components] [ literal[int] ], identifier[prefix] )
keyword[if] identifier[path] . identifier[startswith] ( identifier[part] ):
identifier[self] . identifier[reference] . identifier[type] = identifier[reference_type_enum] . identifier[VFS]
identifier[self] . identifier[reference] . identifier[vfs] . identifier[client_id] = identifier[components] [ literal[int] ]
identifier[self] . identifier[reference] . identifier[vfs] . identifier[vfs_path] =( identifier[prefix] +
identifier[path] [ identifier[len] ( identifier[part] ):]). identifier[lstrip] ( literal[string] )
keyword[break]
keyword[if] identifier[self] . identifier[reference] . identifier[type] != identifier[reference_type_enum] . identifier[VFS] :
identifier[self] . identifier[reference] . identifier[type] = identifier[reference_type_enum] . identifier[UNKNOWN]
identifier[self] . identifier[reference] . identifier[unknown] . identifier[subject_urn] = identifier[notification] . identifier[subject]
keyword[elif] identifier[legacy_type] == literal[string] :
keyword[if] keyword[not] identifier[components] keyword[or] keyword[not] identifier[rdf_client] . identifier[ClientURN] . identifier[Validate] ( identifier[components] [ literal[int] ]):
identifier[self] . identifier[reference] . identifier[type] = identifier[reference_type_enum] . identifier[UNKNOWN]
identifier[self] . identifier[reference] . identifier[unknown] . identifier[subject_urn] = identifier[notification] . identifier[subject]
keyword[else] :
identifier[self] . identifier[reference] . identifier[type] = identifier[reference_type_enum] . identifier[FLOW]
identifier[self] . identifier[reference] . identifier[flow] . identifier[flow_id] = identifier[notification] . identifier[source] . identifier[Basename] ()
identifier[self] . identifier[reference] . identifier[flow] . identifier[client_id] = identifier[components] [ literal[int] ]
keyword[elif] identifier[legacy_type] == literal[string] :
keyword[if] identifier[rdf_client] . identifier[ClientURN] . identifier[Validate] ( identifier[components] [ literal[int] ]):
identifier[self] . identifier[reference] . identifier[type] = identifier[reference_type_enum] . identifier[CLIENT_APPROVAL]
identifier[self] . identifier[reference] . identifier[client_approval] . identifier[client_id] = identifier[components] [ literal[int] ]
identifier[self] . identifier[reference] . identifier[client_approval] . identifier[approval_id] = identifier[components] [- literal[int] ]
identifier[self] . identifier[reference] . identifier[client_approval] . identifier[username] = identifier[components] [- literal[int] ]
keyword[elif] identifier[components] [ literal[int] ]== literal[string] :
identifier[self] . identifier[reference] . identifier[type] = identifier[reference_type_enum] . identifier[HUNT_APPROVAL]
identifier[self] . identifier[reference] . identifier[hunt_approval] . identifier[hunt_id] = identifier[components] [ literal[int] ]
identifier[self] . identifier[reference] . identifier[hunt_approval] . identifier[approval_id] = identifier[components] [- literal[int] ]
identifier[self] . identifier[reference] . identifier[hunt_approval] . identifier[username] = identifier[components] [- literal[int] ]
keyword[elif] identifier[components] [ literal[int] ]== literal[string] :
identifier[self] . identifier[reference] . identifier[type] = identifier[reference_type_enum] . identifier[CRON_JOB_APPROVAL]
identifier[self] . identifier[reference] . identifier[cron_job_approval] . identifier[cron_job_id] = identifier[components] [ literal[int] ]
identifier[self] . identifier[reference] . identifier[cron_job_approval] . identifier[approval_id] = identifier[components] [- literal[int] ]
identifier[self] . identifier[reference] . identifier[cron_job_approval] . identifier[username] = identifier[components] [- literal[int] ]
keyword[else] :
identifier[self] . identifier[reference] . identifier[type] = identifier[reference_type_enum] . identifier[UNKNOWN]
identifier[self] . identifier[reference] . identifier[unknown] . identifier[subject_urn] = identifier[notification] . identifier[subject]
identifier[self] . identifier[reference] . identifier[unknown] . identifier[source_urn] = identifier[notification] . identifier[source]
keyword[return] identifier[self] | def InitFromNotification(self, notification, is_pending=False):
"""Initializes this object from an existing notification.
Args:
notification: A rdfvalues.flows.Notification object.
is_pending: Indicates whether the user has already seen this notification
or not.
Returns:
The current instance.
"""
self.timestamp = notification.timestamp
self.message = notification.message
self.subject = str(notification.subject)
self.is_pending = is_pending
reference_type_enum = ApiNotificationReference.Type
# Please see the comments to aff4_objects.GRRUser.Notify implementation
# for the details of notification.type format. Short summary:
# notification.type may be one of legacy values (i.e. "ViewObject") or
# have a format of "[legacy value]:[new-style notification type]", i.e.
# "ViewObject:TYPE_CLIENT_INTERROGATED".
legacy_type = None
if ':' in notification.type:
(legacy_type, new_type) = notification.type.split(':', 2)
self.notification_type = new_type # depends on [control=['if'], data=[]]
else:
legacy_type = notification.type
# TODO(user): refactor notifications, so that we send a meaningful
# notification from the start, so that we don't have to do the
# bridging/conversion/guessing here.
components = self._GetUrnComponents(notification)
if legacy_type == 'Discovery':
self.reference.type = reference_type_enum.CLIENT
self.reference.client = ApiNotificationClientReference(client_id=components[0]) # depends on [control=['if'], data=[]]
elif legacy_type == 'ViewObject':
if len(components) >= 2 and components[0] == 'hunts':
self.reference.type = reference_type_enum.HUNT
self.reference.hunt.hunt_id = components[1] # depends on [control=['if'], data=[]]
elif len(components) >= 2 and components[0] == 'cron':
self.reference.type = reference_type_enum.CRON
self.reference.cron.cron_job_id = components[1] # depends on [control=['if'], data=[]]
elif len(components) >= 3 and components[1] == 'flows':
self.reference.type = reference_type_enum.FLOW
self.reference.flow.flow_id = components[2]
self.reference.flow.client_id = components[0] # depends on [control=['if'], data=[]]
elif len(components) == 1 and rdf_client.ClientURN.Validate(components[0]):
self.reference.type = reference_type_enum.CLIENT
self.reference.client.client_id = components[0] # depends on [control=['if'], data=[]]
else:
if notification.subject:
path = notification.subject.Path()
for prefix in itervalues(rdf_paths.PathSpec.AFF4_PREFIXES):
part = '/%s%s' % (components[0], prefix)
if path.startswith(part):
self.reference.type = reference_type_enum.VFS
self.reference.vfs.client_id = components[0]
self.reference.vfs.vfs_path = (prefix + path[len(part):]).lstrip('/')
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['prefix']] # depends on [control=['if'], data=[]]
if self.reference.type != reference_type_enum.VFS:
self.reference.type = reference_type_enum.UNKNOWN
self.reference.unknown.subject_urn = notification.subject # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif legacy_type == 'FlowStatus':
if not components or not rdf_client.ClientURN.Validate(components[0]):
self.reference.type = reference_type_enum.UNKNOWN
self.reference.unknown.subject_urn = notification.subject # depends on [control=['if'], data=[]]
else:
self.reference.type = reference_type_enum.FLOW
self.reference.flow.flow_id = notification.source.Basename()
self.reference.flow.client_id = components[0] # depends on [control=['if'], data=[]]
# TODO(user): refactor GrantAccess notification so that we don't have
# to infer approval type from the URN.
elif legacy_type == 'GrantAccess':
if rdf_client.ClientURN.Validate(components[1]):
self.reference.type = reference_type_enum.CLIENT_APPROVAL
self.reference.client_approval.client_id = components[1]
self.reference.client_approval.approval_id = components[-1]
self.reference.client_approval.username = components[-2] # depends on [control=['if'], data=[]]
elif components[1] == 'hunts':
self.reference.type = reference_type_enum.HUNT_APPROVAL
self.reference.hunt_approval.hunt_id = components[2]
self.reference.hunt_approval.approval_id = components[-1]
self.reference.hunt_approval.username = components[-2] # depends on [control=['if'], data=[]]
elif components[1] == 'cron':
self.reference.type = reference_type_enum.CRON_JOB_APPROVAL
self.reference.cron_job_approval.cron_job_id = components[2]
self.reference.cron_job_approval.approval_id = components[-1]
self.reference.cron_job_approval.username = components[-2] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
self.reference.type = reference_type_enum.UNKNOWN
self.reference.unknown.subject_urn = notification.subject
self.reference.unknown.source_urn = notification.source
return self |
def getStatuses(self, repo_user, repo_name, sha):
"""
:param sha: Full sha to list the statuses from.
:return: A defered with the result from GitHub.
"""
return self.api.makeRequest(
['repos', repo_user, repo_name, 'statuses', sha],
method='GET') | def function[getStatuses, parameter[self, repo_user, repo_name, sha]]:
constant[
:param sha: Full sha to list the statuses from.
:return: A defered with the result from GitHub.
]
return[call[name[self].api.makeRequest, parameter[list[[<ast.Constant object at 0x7da1b27e0700>, <ast.Name object at 0x7da1b27e1f30>, <ast.Name object at 0x7da1b27e16c0>, <ast.Constant object at 0x7da1b27e3970>, <ast.Name object at 0x7da1b27e0880>]]]]] | keyword[def] identifier[getStatuses] ( identifier[self] , identifier[repo_user] , identifier[repo_name] , identifier[sha] ):
literal[string]
keyword[return] identifier[self] . identifier[api] . identifier[makeRequest] (
[ literal[string] , identifier[repo_user] , identifier[repo_name] , literal[string] , identifier[sha] ],
identifier[method] = literal[string] ) | def getStatuses(self, repo_user, repo_name, sha):
"""
:param sha: Full sha to list the statuses from.
:return: A defered with the result from GitHub.
"""
return self.api.makeRequest(['repos', repo_user, repo_name, 'statuses', sha], method='GET') |
def parent(groups,ID):
"""given a groups dictionary and an ID, return its actual parent ID."""
if ID in groups.keys():
return ID # already a parent
if not ID in groups.keys():
for actualParent in groups.keys():
if ID in groups[actualParent]:
return actualParent # found the actual parent
return None | def function[parent, parameter[groups, ID]]:
constant[given a groups dictionary and an ID, return its actual parent ID.]
if compare[name[ID] in call[name[groups].keys, parameter[]]] begin[:]
return[name[ID]]
if <ast.UnaryOp object at 0x7da1afe06e90> begin[:]
for taget[name[actualParent]] in starred[call[name[groups].keys, parameter[]]] begin[:]
if compare[name[ID] in call[name[groups]][name[actualParent]]] begin[:]
return[name[actualParent]]
return[constant[None]] | keyword[def] identifier[parent] ( identifier[groups] , identifier[ID] ):
literal[string]
keyword[if] identifier[ID] keyword[in] identifier[groups] . identifier[keys] ():
keyword[return] identifier[ID]
keyword[if] keyword[not] identifier[ID] keyword[in] identifier[groups] . identifier[keys] ():
keyword[for] identifier[actualParent] keyword[in] identifier[groups] . identifier[keys] ():
keyword[if] identifier[ID] keyword[in] identifier[groups] [ identifier[actualParent] ]:
keyword[return] identifier[actualParent]
keyword[return] keyword[None] | def parent(groups, ID):
"""given a groups dictionary and an ID, return its actual parent ID."""
if ID in groups.keys():
return ID # already a parent # depends on [control=['if'], data=['ID']]
if not ID in groups.keys():
for actualParent in groups.keys():
if ID in groups[actualParent]:
return actualParent # found the actual parent # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['actualParent']] # depends on [control=['if'], data=[]]
return None |
def _drop_tables(self):
"""
Clear the subdomain db's tables
"""
drop_cmd = "DROP TABLE IF EXISTS {};"
for table in [self.subdomain_table, self.blocked_table]:
cursor = self.conn.cursor()
db_query_execute(cursor, drop_cmd.format(table), ()) | def function[_drop_tables, parameter[self]]:
constant[
Clear the subdomain db's tables
]
variable[drop_cmd] assign[=] constant[DROP TABLE IF EXISTS {};]
for taget[name[table]] in starred[list[[<ast.Attribute object at 0x7da1b2347c70>, <ast.Attribute object at 0x7da1b2346d40>]]] begin[:]
variable[cursor] assign[=] call[name[self].conn.cursor, parameter[]]
call[name[db_query_execute], parameter[name[cursor], call[name[drop_cmd].format, parameter[name[table]]], tuple[[]]]] | keyword[def] identifier[_drop_tables] ( identifier[self] ):
literal[string]
identifier[drop_cmd] = literal[string]
keyword[for] identifier[table] keyword[in] [ identifier[self] . identifier[subdomain_table] , identifier[self] . identifier[blocked_table] ]:
identifier[cursor] = identifier[self] . identifier[conn] . identifier[cursor] ()
identifier[db_query_execute] ( identifier[cursor] , identifier[drop_cmd] . identifier[format] ( identifier[table] ),()) | def _drop_tables(self):
"""
Clear the subdomain db's tables
"""
drop_cmd = 'DROP TABLE IF EXISTS {};'
for table in [self.subdomain_table, self.blocked_table]:
cursor = self.conn.cursor()
db_query_execute(cursor, drop_cmd.format(table), ()) # depends on [control=['for'], data=['table']] |
def xrefs(self, nid, bidirectional=False):
"""
Fetches xrefs for a node
Arguments
---------
nid : str
Node identifier for entity to be queried
bidirection : bool
If True, include nodes xreffed to nid
Return
------
list[str]
"""
if self.xref_graph is not None:
xg = self.xref_graph
if nid not in xg:
return []
if bidirectional:
return list(xg.neighbors(nid))
else:
return [x for x in xg.neighbors(nid) if xg[nid][x][0]['source'] == nid]
return [] | def function[xrefs, parameter[self, nid, bidirectional]]:
constant[
Fetches xrefs for a node
Arguments
---------
nid : str
Node identifier for entity to be queried
bidirection : bool
If True, include nodes xreffed to nid
Return
------
list[str]
]
if compare[name[self].xref_graph is_not constant[None]] begin[:]
variable[xg] assign[=] name[self].xref_graph
if compare[name[nid] <ast.NotIn object at 0x7da2590d7190> name[xg]] begin[:]
return[list[[]]]
if name[bidirectional] begin[:]
return[call[name[list], parameter[call[name[xg].neighbors, parameter[name[nid]]]]]]
return[list[[]]] | keyword[def] identifier[xrefs] ( identifier[self] , identifier[nid] , identifier[bidirectional] = keyword[False] ):
literal[string]
keyword[if] identifier[self] . identifier[xref_graph] keyword[is] keyword[not] keyword[None] :
identifier[xg] = identifier[self] . identifier[xref_graph]
keyword[if] identifier[nid] keyword[not] keyword[in] identifier[xg] :
keyword[return] []
keyword[if] identifier[bidirectional] :
keyword[return] identifier[list] ( identifier[xg] . identifier[neighbors] ( identifier[nid] ))
keyword[else] :
keyword[return] [ identifier[x] keyword[for] identifier[x] keyword[in] identifier[xg] . identifier[neighbors] ( identifier[nid] ) keyword[if] identifier[xg] [ identifier[nid] ][ identifier[x] ][ literal[int] ][ literal[string] ]== identifier[nid] ]
keyword[return] [] | def xrefs(self, nid, bidirectional=False):
"""
Fetches xrefs for a node
Arguments
---------
nid : str
Node identifier for entity to be queried
bidirection : bool
If True, include nodes xreffed to nid
Return
------
list[str]
"""
if self.xref_graph is not None:
xg = self.xref_graph
if nid not in xg:
return [] # depends on [control=['if'], data=[]]
if bidirectional:
return list(xg.neighbors(nid)) # depends on [control=['if'], data=[]]
else:
return [x for x in xg.neighbors(nid) if xg[nid][x][0]['source'] == nid] # depends on [control=['if'], data=[]]
return [] |
def error_respond(self, error):
"""Create an error response to this request.
When processing the request produces an error condition this method can be used to
create the error response object.
:param error: Specifies what error occurred.
:type error: str or Exception
:returns: An error response object that can be serialized and sent to the client.
:rtype: :py:class:`JSONRPCErrorResponse`
"""
if self.unique_id is None:
return None
response = JSONRPCErrorResponse()
response.unique_id = None if self.one_way else self.unique_id
code, msg, data = _get_code_message_and_data(error)
response.error = msg
response._jsonrpc_error_code = code
if data:
response.data = data
return response | def function[error_respond, parameter[self, error]]:
constant[Create an error response to this request.
When processing the request produces an error condition this method can be used to
create the error response object.
:param error: Specifies what error occurred.
:type error: str or Exception
:returns: An error response object that can be serialized and sent to the client.
:rtype: :py:class:`JSONRPCErrorResponse`
]
if compare[name[self].unique_id is constant[None]] begin[:]
return[constant[None]]
variable[response] assign[=] call[name[JSONRPCErrorResponse], parameter[]]
name[response].unique_id assign[=] <ast.IfExp object at 0x7da18f720a30>
<ast.Tuple object at 0x7da18f720e20> assign[=] call[name[_get_code_message_and_data], parameter[name[error]]]
name[response].error assign[=] name[msg]
name[response]._jsonrpc_error_code assign[=] name[code]
if name[data] begin[:]
name[response].data assign[=] name[data]
return[name[response]] | keyword[def] identifier[error_respond] ( identifier[self] , identifier[error] ):
literal[string]
keyword[if] identifier[self] . identifier[unique_id] keyword[is] keyword[None] :
keyword[return] keyword[None]
identifier[response] = identifier[JSONRPCErrorResponse] ()
identifier[response] . identifier[unique_id] = keyword[None] keyword[if] identifier[self] . identifier[one_way] keyword[else] identifier[self] . identifier[unique_id]
identifier[code] , identifier[msg] , identifier[data] = identifier[_get_code_message_and_data] ( identifier[error] )
identifier[response] . identifier[error] = identifier[msg]
identifier[response] . identifier[_jsonrpc_error_code] = identifier[code]
keyword[if] identifier[data] :
identifier[response] . identifier[data] = identifier[data]
keyword[return] identifier[response] | def error_respond(self, error):
"""Create an error response to this request.
When processing the request produces an error condition this method can be used to
create the error response object.
:param error: Specifies what error occurred.
:type error: str or Exception
:returns: An error response object that can be serialized and sent to the client.
:rtype: :py:class:`JSONRPCErrorResponse`
"""
if self.unique_id is None:
return None # depends on [control=['if'], data=[]]
response = JSONRPCErrorResponse()
response.unique_id = None if self.one_way else self.unique_id
(code, msg, data) = _get_code_message_and_data(error)
response.error = msg
response._jsonrpc_error_code = code
if data:
response.data = data # depends on [control=['if'], data=[]]
return response |
def compile_command(context, backend, config):
"""
Compile Sass project sources to CSS
"""
logger = logging.getLogger("boussole")
logger.info(u"Building project")
# Discover settings file
try:
discovering = Discover(backends=[SettingsBackendJson,
SettingsBackendYaml])
config_filepath, config_engine = discovering.search(
filepath=config,
basedir=os.getcwd(),
kind=backend
)
project = ProjectBase(backend_name=config_engine._kind_name)
settings = project.backend_engine.load(filepath=config_filepath)
except BoussoleBaseException as e:
logger.critical(six.text_type(e))
raise click.Abort()
logger.debug(u"Settings file: {} ({})".format(
config_filepath, config_engine._kind_name))
logger.debug(u"Project sources directory: {}".format(
settings.SOURCES_PATH))
logger.debug(u"Project destination directory: {}".format(
settings.TARGET_PATH))
logger.debug(u"Exclude patterns: {}".format(
settings.EXCLUDES))
# Find all sources with their destination path
try:
compilable_files = ScssFinder().mirror_sources(
settings.SOURCES_PATH,
targetdir=settings.TARGET_PATH,
excludes=settings.EXCLUDES
)
except BoussoleBaseException as e:
logger.error(six.text_type(e))
raise click.Abort()
# Build all compilable stylesheets
compiler = SassCompileHelper()
errors = 0
for src, dst in compilable_files:
logger.debug(u"Compile: {}".format(src))
output_opts = {}
success, message = compiler.safe_compile(settings, src, dst)
if success:
logger.info(u"Output: {}".format(message), **output_opts)
else:
errors += 1
logger.error(message)
# Ensure correct exit code if error has occured
if errors:
raise click.Abort() | def function[compile_command, parameter[context, backend, config]]:
constant[
Compile Sass project sources to CSS
]
variable[logger] assign[=] call[name[logging].getLogger, parameter[constant[boussole]]]
call[name[logger].info, parameter[constant[Building project]]]
<ast.Try object at 0x7da1b0a48ac0>
call[name[logger].debug, parameter[call[constant[Settings file: {} ({})].format, parameter[name[config_filepath], name[config_engine]._kind_name]]]]
call[name[logger].debug, parameter[call[constant[Project sources directory: {}].format, parameter[name[settings].SOURCES_PATH]]]]
call[name[logger].debug, parameter[call[constant[Project destination directory: {}].format, parameter[name[settings].TARGET_PATH]]]]
call[name[logger].debug, parameter[call[constant[Exclude patterns: {}].format, parameter[name[settings].EXCLUDES]]]]
<ast.Try object at 0x7da1b0aee680>
variable[compiler] assign[=] call[name[SassCompileHelper], parameter[]]
variable[errors] assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da1b0af0940>, <ast.Name object at 0x7da1b0af0ca0>]]] in starred[name[compilable_files]] begin[:]
call[name[logger].debug, parameter[call[constant[Compile: {}].format, parameter[name[src]]]]]
variable[output_opts] assign[=] dictionary[[], []]
<ast.Tuple object at 0x7da1b0af16f0> assign[=] call[name[compiler].safe_compile, parameter[name[settings], name[src], name[dst]]]
if name[success] begin[:]
call[name[logger].info, parameter[call[constant[Output: {}].format, parameter[name[message]]]]]
if name[errors] begin[:]
<ast.Raise object at 0x7da1b0a70190> | keyword[def] identifier[compile_command] ( identifier[context] , identifier[backend] , identifier[config] ):
literal[string]
identifier[logger] = identifier[logging] . identifier[getLogger] ( literal[string] )
identifier[logger] . identifier[info] ( literal[string] )
keyword[try] :
identifier[discovering] = identifier[Discover] ( identifier[backends] =[ identifier[SettingsBackendJson] ,
identifier[SettingsBackendYaml] ])
identifier[config_filepath] , identifier[config_engine] = identifier[discovering] . identifier[search] (
identifier[filepath] = identifier[config] ,
identifier[basedir] = identifier[os] . identifier[getcwd] (),
identifier[kind] = identifier[backend]
)
identifier[project] = identifier[ProjectBase] ( identifier[backend_name] = identifier[config_engine] . identifier[_kind_name] )
identifier[settings] = identifier[project] . identifier[backend_engine] . identifier[load] ( identifier[filepath] = identifier[config_filepath] )
keyword[except] identifier[BoussoleBaseException] keyword[as] identifier[e] :
identifier[logger] . identifier[critical] ( identifier[six] . identifier[text_type] ( identifier[e] ))
keyword[raise] identifier[click] . identifier[Abort] ()
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] (
identifier[config_filepath] , identifier[config_engine] . identifier[_kind_name] ))
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] (
identifier[settings] . identifier[SOURCES_PATH] ))
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] (
identifier[settings] . identifier[TARGET_PATH] ))
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] (
identifier[settings] . identifier[EXCLUDES] ))
keyword[try] :
identifier[compilable_files] = identifier[ScssFinder] (). identifier[mirror_sources] (
identifier[settings] . identifier[SOURCES_PATH] ,
identifier[targetdir] = identifier[settings] . identifier[TARGET_PATH] ,
identifier[excludes] = identifier[settings] . identifier[EXCLUDES]
)
keyword[except] identifier[BoussoleBaseException] keyword[as] identifier[e] :
identifier[logger] . identifier[error] ( identifier[six] . identifier[text_type] ( identifier[e] ))
keyword[raise] identifier[click] . identifier[Abort] ()
identifier[compiler] = identifier[SassCompileHelper] ()
identifier[errors] = literal[int]
keyword[for] identifier[src] , identifier[dst] keyword[in] identifier[compilable_files] :
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[src] ))
identifier[output_opts] ={}
identifier[success] , identifier[message] = identifier[compiler] . identifier[safe_compile] ( identifier[settings] , identifier[src] , identifier[dst] )
keyword[if] identifier[success] :
identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[message] ),** identifier[output_opts] )
keyword[else] :
identifier[errors] += literal[int]
identifier[logger] . identifier[error] ( identifier[message] )
keyword[if] identifier[errors] :
keyword[raise] identifier[click] . identifier[Abort] () | def compile_command(context, backend, config):
"""
Compile Sass project sources to CSS
"""
logger = logging.getLogger('boussole')
logger.info(u'Building project')
# Discover settings file
try:
discovering = Discover(backends=[SettingsBackendJson, SettingsBackendYaml])
(config_filepath, config_engine) = discovering.search(filepath=config, basedir=os.getcwd(), kind=backend)
project = ProjectBase(backend_name=config_engine._kind_name)
settings = project.backend_engine.load(filepath=config_filepath) # depends on [control=['try'], data=[]]
except BoussoleBaseException as e:
logger.critical(six.text_type(e))
raise click.Abort() # depends on [control=['except'], data=['e']]
logger.debug(u'Settings file: {} ({})'.format(config_filepath, config_engine._kind_name))
logger.debug(u'Project sources directory: {}'.format(settings.SOURCES_PATH))
logger.debug(u'Project destination directory: {}'.format(settings.TARGET_PATH))
logger.debug(u'Exclude patterns: {}'.format(settings.EXCLUDES))
# Find all sources with their destination path
try:
compilable_files = ScssFinder().mirror_sources(settings.SOURCES_PATH, targetdir=settings.TARGET_PATH, excludes=settings.EXCLUDES) # depends on [control=['try'], data=[]]
except BoussoleBaseException as e:
logger.error(six.text_type(e))
raise click.Abort() # depends on [control=['except'], data=['e']]
# Build all compilable stylesheets
compiler = SassCompileHelper()
errors = 0
for (src, dst) in compilable_files:
logger.debug(u'Compile: {}'.format(src))
output_opts = {}
(success, message) = compiler.safe_compile(settings, src, dst)
if success:
logger.info(u'Output: {}'.format(message), **output_opts) # depends on [control=['if'], data=[]]
else:
errors += 1
logger.error(message) # depends on [control=['for'], data=[]]
# Ensure correct exit code if error has occured
if errors:
raise click.Abort() # depends on [control=['if'], data=[]] |
def resolve_url(self, url, follow_redirect=True):
"""Attempts to find a plugin that can use this URL.
The default protocol (http) will be prefixed to the URL if
not specified.
Raises :exc:`NoPluginError` on failure.
:param url: a URL to match against loaded plugins
:param follow_redirect: follow redirects
"""
url = update_scheme("http://", url)
available_plugins = []
for name, plugin in self.plugins.items():
if plugin.can_handle_url(url):
available_plugins.append(plugin)
available_plugins.sort(key=lambda x: x.priority(url), reverse=True)
if available_plugins:
return available_plugins[0](url)
if follow_redirect:
# Attempt to handle a redirect URL
try:
res = self.http.head(url, allow_redirects=True, acceptable_status=[501])
# Fall back to GET request if server doesn't handle HEAD.
if res.status_code == 501:
res = self.http.get(url, stream=True)
if res.url != url:
return self.resolve_url(res.url, follow_redirect=follow_redirect)
except PluginError:
pass
raise NoPluginError | def function[resolve_url, parameter[self, url, follow_redirect]]:
constant[Attempts to find a plugin that can use this URL.
The default protocol (http) will be prefixed to the URL if
not specified.
Raises :exc:`NoPluginError` on failure.
:param url: a URL to match against loaded plugins
:param follow_redirect: follow redirects
]
variable[url] assign[=] call[name[update_scheme], parameter[constant[http://], name[url]]]
variable[available_plugins] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da20c6e6770>, <ast.Name object at 0x7da20c6e6290>]]] in starred[call[name[self].plugins.items, parameter[]]] begin[:]
if call[name[plugin].can_handle_url, parameter[name[url]]] begin[:]
call[name[available_plugins].append, parameter[name[plugin]]]
call[name[available_plugins].sort, parameter[]]
if name[available_plugins] begin[:]
return[call[call[name[available_plugins]][constant[0]], parameter[name[url]]]]
if name[follow_redirect] begin[:]
<ast.Try object at 0x7da20c6e7790>
<ast.Raise object at 0x7da18f09e500> | keyword[def] identifier[resolve_url] ( identifier[self] , identifier[url] , identifier[follow_redirect] = keyword[True] ):
literal[string]
identifier[url] = identifier[update_scheme] ( literal[string] , identifier[url] )
identifier[available_plugins] =[]
keyword[for] identifier[name] , identifier[plugin] keyword[in] identifier[self] . identifier[plugins] . identifier[items] ():
keyword[if] identifier[plugin] . identifier[can_handle_url] ( identifier[url] ):
identifier[available_plugins] . identifier[append] ( identifier[plugin] )
identifier[available_plugins] . identifier[sort] ( identifier[key] = keyword[lambda] identifier[x] : identifier[x] . identifier[priority] ( identifier[url] ), identifier[reverse] = keyword[True] )
keyword[if] identifier[available_plugins] :
keyword[return] identifier[available_plugins] [ literal[int] ]( identifier[url] )
keyword[if] identifier[follow_redirect] :
keyword[try] :
identifier[res] = identifier[self] . identifier[http] . identifier[head] ( identifier[url] , identifier[allow_redirects] = keyword[True] , identifier[acceptable_status] =[ literal[int] ])
keyword[if] identifier[res] . identifier[status_code] == literal[int] :
identifier[res] = identifier[self] . identifier[http] . identifier[get] ( identifier[url] , identifier[stream] = keyword[True] )
keyword[if] identifier[res] . identifier[url] != identifier[url] :
keyword[return] identifier[self] . identifier[resolve_url] ( identifier[res] . identifier[url] , identifier[follow_redirect] = identifier[follow_redirect] )
keyword[except] identifier[PluginError] :
keyword[pass]
keyword[raise] identifier[NoPluginError] | def resolve_url(self, url, follow_redirect=True):
"""Attempts to find a plugin that can use this URL.
The default protocol (http) will be prefixed to the URL if
not specified.
Raises :exc:`NoPluginError` on failure.
:param url: a URL to match against loaded plugins
:param follow_redirect: follow redirects
"""
url = update_scheme('http://', url)
available_plugins = []
for (name, plugin) in self.plugins.items():
if plugin.can_handle_url(url):
available_plugins.append(plugin) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
available_plugins.sort(key=lambda x: x.priority(url), reverse=True)
if available_plugins:
return available_plugins[0](url) # depends on [control=['if'], data=[]]
if follow_redirect:
# Attempt to handle a redirect URL
try:
res = self.http.head(url, allow_redirects=True, acceptable_status=[501])
# Fall back to GET request if server doesn't handle HEAD.
if res.status_code == 501:
res = self.http.get(url, stream=True) # depends on [control=['if'], data=[]]
if res.url != url:
return self.resolve_url(res.url, follow_redirect=follow_redirect) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except PluginError:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
raise NoPluginError |
def get_creation_date(
self,
bucket: str,
key: str,
) -> datetime:
"""
Retrieves the creation date for a given key in a given bucket.
:param bucket: the bucket the object resides in.
:param key: the key of the object for which the creation date is being retrieved.
:return: the creation date
"""
# An S3 object's creation date is stored in its LastModified field which stores the
# most recent value between the two.
return self.get_last_modified_date(bucket, key) | def function[get_creation_date, parameter[self, bucket, key]]:
constant[
Retrieves the creation date for a given key in a given bucket.
:param bucket: the bucket the object resides in.
:param key: the key of the object for which the creation date is being retrieved.
:return: the creation date
]
return[call[name[self].get_last_modified_date, parameter[name[bucket], name[key]]]] | keyword[def] identifier[get_creation_date] (
identifier[self] ,
identifier[bucket] : identifier[str] ,
identifier[key] : identifier[str] ,
)-> identifier[datetime] :
literal[string]
keyword[return] identifier[self] . identifier[get_last_modified_date] ( identifier[bucket] , identifier[key] ) | def get_creation_date(self, bucket: str, key: str) -> datetime:
"""
Retrieves the creation date for a given key in a given bucket.
:param bucket: the bucket the object resides in.
:param key: the key of the object for which the creation date is being retrieved.
:return: the creation date
"""
# An S3 object's creation date is stored in its LastModified field which stores the
# most recent value between the two.
return self.get_last_modified_date(bucket, key) |
def send_rpc_async(self, conn_id, address, rpc_id, payload, timeout, callback):
"""Asynchronously send an RPC to this IOTile device."""
future = self._loop.launch_coroutine(self._adapter.send_rpc(conn_id, address, rpc_id, payload, timeout))
def format_response(future):
payload = None
exception = future.exception()
rpc_status = None
rpc_response = b''
failure = None
success = True
if exception is None:
payload = future.result()
rpc_status, rpc_response = pack_rpc_response(payload, exception)
elif isinstance(exception, (RPCInvalidIDError, TileNotFoundError, RPCNotFoundError,
RPCErrorCode, BusyRPCResponse)):
rpc_status, rpc_response = pack_rpc_response(payload, exception)
else:
success = False
failure = str(exception)
callback(conn_id, self.id, success, failure, rpc_status, rpc_response)
future.add_done_callback(format_response) | def function[send_rpc_async, parameter[self, conn_id, address, rpc_id, payload, timeout, callback]]:
constant[Asynchronously send an RPC to this IOTile device.]
variable[future] assign[=] call[name[self]._loop.launch_coroutine, parameter[call[name[self]._adapter.send_rpc, parameter[name[conn_id], name[address], name[rpc_id], name[payload], name[timeout]]]]]
def function[format_response, parameter[future]]:
variable[payload] assign[=] constant[None]
variable[exception] assign[=] call[name[future].exception, parameter[]]
variable[rpc_status] assign[=] constant[None]
variable[rpc_response] assign[=] constant[b'']
variable[failure] assign[=] constant[None]
variable[success] assign[=] constant[True]
if compare[name[exception] is constant[None]] begin[:]
variable[payload] assign[=] call[name[future].result, parameter[]]
<ast.Tuple object at 0x7da204347460> assign[=] call[name[pack_rpc_response], parameter[name[payload], name[exception]]]
call[name[callback], parameter[name[conn_id], name[self].id, name[success], name[failure], name[rpc_status], name[rpc_response]]]
call[name[future].add_done_callback, parameter[name[format_response]]] | keyword[def] identifier[send_rpc_async] ( identifier[self] , identifier[conn_id] , identifier[address] , identifier[rpc_id] , identifier[payload] , identifier[timeout] , identifier[callback] ):
literal[string]
identifier[future] = identifier[self] . identifier[_loop] . identifier[launch_coroutine] ( identifier[self] . identifier[_adapter] . identifier[send_rpc] ( identifier[conn_id] , identifier[address] , identifier[rpc_id] , identifier[payload] , identifier[timeout] ))
keyword[def] identifier[format_response] ( identifier[future] ):
identifier[payload] = keyword[None]
identifier[exception] = identifier[future] . identifier[exception] ()
identifier[rpc_status] = keyword[None]
identifier[rpc_response] = literal[string]
identifier[failure] = keyword[None]
identifier[success] = keyword[True]
keyword[if] identifier[exception] keyword[is] keyword[None] :
identifier[payload] = identifier[future] . identifier[result] ()
identifier[rpc_status] , identifier[rpc_response] = identifier[pack_rpc_response] ( identifier[payload] , identifier[exception] )
keyword[elif] identifier[isinstance] ( identifier[exception] ,( identifier[RPCInvalidIDError] , identifier[TileNotFoundError] , identifier[RPCNotFoundError] ,
identifier[RPCErrorCode] , identifier[BusyRPCResponse] )):
identifier[rpc_status] , identifier[rpc_response] = identifier[pack_rpc_response] ( identifier[payload] , identifier[exception] )
keyword[else] :
identifier[success] = keyword[False]
identifier[failure] = identifier[str] ( identifier[exception] )
identifier[callback] ( identifier[conn_id] , identifier[self] . identifier[id] , identifier[success] , identifier[failure] , identifier[rpc_status] , identifier[rpc_response] )
identifier[future] . identifier[add_done_callback] ( identifier[format_response] ) | def send_rpc_async(self, conn_id, address, rpc_id, payload, timeout, callback):
"""Asynchronously send an RPC to this IOTile device."""
future = self._loop.launch_coroutine(self._adapter.send_rpc(conn_id, address, rpc_id, payload, timeout))
def format_response(future):
payload = None
exception = future.exception()
rpc_status = None
rpc_response = b''
failure = None
success = True
if exception is None:
payload = future.result()
(rpc_status, rpc_response) = pack_rpc_response(payload, exception) # depends on [control=['if'], data=['exception']]
elif isinstance(exception, (RPCInvalidIDError, TileNotFoundError, RPCNotFoundError, RPCErrorCode, BusyRPCResponse)):
(rpc_status, rpc_response) = pack_rpc_response(payload, exception) # depends on [control=['if'], data=[]]
else:
success = False
failure = str(exception)
callback(conn_id, self.id, success, failure, rpc_status, rpc_response)
future.add_done_callback(format_response) |
def require_variable(df, variable, unit=None, year=None, exclude_on_fail=False,
**kwargs):
"""Check whether all scenarios have a required variable
Parameters
----------
df: IamDataFrame instance
args: see `IamDataFrame.require_variable()` for details
kwargs: passed to `df.filter()`
"""
fdf = df.filter(**kwargs)
if len(fdf.data) > 0:
vdf = fdf.require_variable(variable=variable, unit=unit, year=year,
exclude_on_fail=exclude_on_fail)
df.meta['exclude'] |= fdf.meta['exclude'] # update if any excluded
return vdf | def function[require_variable, parameter[df, variable, unit, year, exclude_on_fail]]:
constant[Check whether all scenarios have a required variable
Parameters
----------
df: IamDataFrame instance
args: see `IamDataFrame.require_variable()` for details
kwargs: passed to `df.filter()`
]
variable[fdf] assign[=] call[name[df].filter, parameter[]]
if compare[call[name[len], parameter[name[fdf].data]] greater[>] constant[0]] begin[:]
variable[vdf] assign[=] call[name[fdf].require_variable, parameter[]]
<ast.AugAssign object at 0x7da18bcc8700>
return[name[vdf]] | keyword[def] identifier[require_variable] ( identifier[df] , identifier[variable] , identifier[unit] = keyword[None] , identifier[year] = keyword[None] , identifier[exclude_on_fail] = keyword[False] ,
** identifier[kwargs] ):
literal[string]
identifier[fdf] = identifier[df] . identifier[filter] (** identifier[kwargs] )
keyword[if] identifier[len] ( identifier[fdf] . identifier[data] )> literal[int] :
identifier[vdf] = identifier[fdf] . identifier[require_variable] ( identifier[variable] = identifier[variable] , identifier[unit] = identifier[unit] , identifier[year] = identifier[year] ,
identifier[exclude_on_fail] = identifier[exclude_on_fail] )
identifier[df] . identifier[meta] [ literal[string] ]|= identifier[fdf] . identifier[meta] [ literal[string] ]
keyword[return] identifier[vdf] | def require_variable(df, variable, unit=None, year=None, exclude_on_fail=False, **kwargs):
"""Check whether all scenarios have a required variable
Parameters
----------
df: IamDataFrame instance
args: see `IamDataFrame.require_variable()` for details
kwargs: passed to `df.filter()`
"""
fdf = df.filter(**kwargs)
if len(fdf.data) > 0:
vdf = fdf.require_variable(variable=variable, unit=unit, year=year, exclude_on_fail=exclude_on_fail)
df.meta['exclude'] |= fdf.meta['exclude'] # update if any excluded
return vdf # depends on [control=['if'], data=[]] |
def _parse_proc_cgroups(self):
"""Parse /proc/cgroups"""
"""
#subsys_name hierarchy num_cgroups enabled
cpuset 0 1 1
ns 0 1 1
cpu 1 10 1
cpuacct 0 1 1
memory 0 1 1
devices 0 1 1
freezer 0 1 1
net_cls 0 1 1
"""
for line in fileops.readlines('/proc/cgroups'):
m = self._RE_CGROUPS.match(line)
if m is None:
continue
name = m.group('name')
hierarchy = int(m.group('hier'))
n_cgroups = int(m.group('n'))
if m.group('enabled') == '1':
enabled = True
else:
enabled = False
if name not in self:
self[name] = {}
self[name]['name'] = name
self[name]['hierarchy'] = hierarchy
self[name]['num_cgroups'] = n_cgroups
self[name]['enabled'] = enabled | def function[_parse_proc_cgroups, parameter[self]]:
constant[Parse /proc/cgroups]
constant[
#subsys_name hierarchy num_cgroups enabled
cpuset 0 1 1
ns 0 1 1
cpu 1 10 1
cpuacct 0 1 1
memory 0 1 1
devices 0 1 1
freezer 0 1 1
net_cls 0 1 1
]
for taget[name[line]] in starred[call[name[fileops].readlines, parameter[constant[/proc/cgroups]]]] begin[:]
variable[m] assign[=] call[name[self]._RE_CGROUPS.match, parameter[name[line]]]
if compare[name[m] is constant[None]] begin[:]
continue
variable[name] assign[=] call[name[m].group, parameter[constant[name]]]
variable[hierarchy] assign[=] call[name[int], parameter[call[name[m].group, parameter[constant[hier]]]]]
variable[n_cgroups] assign[=] call[name[int], parameter[call[name[m].group, parameter[constant[n]]]]]
if compare[call[name[m].group, parameter[constant[enabled]]] equal[==] constant[1]] begin[:]
variable[enabled] assign[=] constant[True]
if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[self]] begin[:]
call[name[self]][name[name]] assign[=] dictionary[[], []]
call[call[name[self]][name[name]]][constant[name]] assign[=] name[name]
call[call[name[self]][name[name]]][constant[hierarchy]] assign[=] name[hierarchy]
call[call[name[self]][name[name]]][constant[num_cgroups]] assign[=] name[n_cgroups]
call[call[name[self]][name[name]]][constant[enabled]] assign[=] name[enabled] | keyword[def] identifier[_parse_proc_cgroups] ( identifier[self] ):
literal[string]
literal[string]
keyword[for] identifier[line] keyword[in] identifier[fileops] . identifier[readlines] ( literal[string] ):
identifier[m] = identifier[self] . identifier[_RE_CGROUPS] . identifier[match] ( identifier[line] )
keyword[if] identifier[m] keyword[is] keyword[None] :
keyword[continue]
identifier[name] = identifier[m] . identifier[group] ( literal[string] )
identifier[hierarchy] = identifier[int] ( identifier[m] . identifier[group] ( literal[string] ))
identifier[n_cgroups] = identifier[int] ( identifier[m] . identifier[group] ( literal[string] ))
keyword[if] identifier[m] . identifier[group] ( literal[string] )== literal[string] :
identifier[enabled] = keyword[True]
keyword[else] :
identifier[enabled] = keyword[False]
keyword[if] identifier[name] keyword[not] keyword[in] identifier[self] :
identifier[self] [ identifier[name] ]={}
identifier[self] [ identifier[name] ][ literal[string] ]= identifier[name]
identifier[self] [ identifier[name] ][ literal[string] ]= identifier[hierarchy]
identifier[self] [ identifier[name] ][ literal[string] ]= identifier[n_cgroups]
identifier[self] [ identifier[name] ][ literal[string] ]= identifier[enabled] | def _parse_proc_cgroups(self):
"""Parse /proc/cgroups"""
'\n #subsys_name\thierarchy\tnum_cgroups\tenabled\n cpuset\t0\t1\t1\n ns\t0\t1\t1\n cpu\t1\t10\t1\n cpuacct\t0\t1\t1\n memory\t0\t1\t1\n devices\t0\t1\t1\n freezer\t0\t1\t1\n net_cls\t0\t1\t1\n '
for line in fileops.readlines('/proc/cgroups'):
m = self._RE_CGROUPS.match(line)
if m is None:
continue # depends on [control=['if'], data=[]]
name = m.group('name')
hierarchy = int(m.group('hier'))
n_cgroups = int(m.group('n'))
if m.group('enabled') == '1':
enabled = True # depends on [control=['if'], data=[]]
else:
enabled = False
if name not in self:
self[name] = {} # depends on [control=['if'], data=['name', 'self']]
self[name]['name'] = name
self[name]['hierarchy'] = hierarchy
self[name]['num_cgroups'] = n_cgroups
self[name]['enabled'] = enabled # depends on [control=['for'], data=['line']] |
def init(name, storage_backend='dir', trust_password=None,
network_address=None, network_port=None, storage_create_device=None,
storage_create_loop=None, storage_pool=None,
done_file='%SALT_CONFIG_DIR%/lxd_initialized'):
'''
Initalizes the LXD Daemon, as LXD doesn't tell if its initialized
we touch the the done_file and check if it exist.
This can only be called once per host unless you remove the done_file.
name :
Ignore this. This is just here for salt.
storage_backend :
Storage backend to use (zfs or dir, default: dir)
trust_password :
Password required to add new clients
network_address : None
Address to bind LXD to (default: none)
network_port : None
Port to bind LXD to (Default: 8443)
storage_create_device : None
Setup device based storage using this DEVICE
storage_create_loop : None
Setup loop based storage with this SIZE in GB
storage_pool : None
Storage pool to use or create
done_file :
Path where we check that this method has been called,
as it can run only once and theres currently no way
to ask LXD if init has been called.
'''
ret = {
'name': name,
'storage_backend': storage_backend,
'trust_password': True if trust_password is not None else False,
'network_address': network_address,
'network_port': network_port,
'storage_create_device': storage_create_device,
'storage_create_loop': storage_create_loop,
'storage_pool': storage_pool,
'done_file': done_file,
}
# TODO: Get a better path and don't hardcode '/etc/salt'
done_file = done_file.replace('%SALT_CONFIG_DIR%', '/etc/salt')
if os.path.exists(done_file):
# Success we already did that.
return _success(ret, 'LXD is already initialized')
if __opts__['test']:
return _success(ret, 'Would initialize LXD')
# We always touch the done_file, so when LXD is already initialized
# we don't run this over and over.
__salt__['file.touch'](done_file)
try:
__salt__['lxd.init'](
storage_backend if storage_backend else None,
trust_password if trust_password else None,
network_address if network_address else None,
network_port if network_port else None,
storage_create_device if storage_create_device else None,
storage_create_loop if storage_create_loop else None,
storage_pool if storage_pool else None
)
except CommandExecutionError as e:
return _error(ret, six.text_type(e))
return _success(ret, 'Initialized the LXD Daemon') | def function[init, parameter[name, storage_backend, trust_password, network_address, network_port, storage_create_device, storage_create_loop, storage_pool, done_file]]:
constant[
Initalizes the LXD Daemon, as LXD doesn't tell if its initialized
we touch the the done_file and check if it exist.
This can only be called once per host unless you remove the done_file.
name :
Ignore this. This is just here for salt.
storage_backend :
Storage backend to use (zfs or dir, default: dir)
trust_password :
Password required to add new clients
network_address : None
Address to bind LXD to (default: none)
network_port : None
Port to bind LXD to (Default: 8443)
storage_create_device : None
Setup device based storage using this DEVICE
storage_create_loop : None
Setup loop based storage with this SIZE in GB
storage_pool : None
Storage pool to use or create
done_file :
Path where we check that this method has been called,
as it can run only once and theres currently no way
to ask LXD if init has been called.
]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b20b8dc0>, <ast.Constant object at 0x7da1b20ba410>, <ast.Constant object at 0x7da1b20bb4c0>, <ast.Constant object at 0x7da1b20bb970>, <ast.Constant object at 0x7da1b20b9f00>, <ast.Constant object at 0x7da1b20b9240>, <ast.Constant object at 0x7da1b20b8250>, <ast.Constant object at 0x7da1b20b9c00>, <ast.Constant object at 0x7da1b20ba920>], [<ast.Name object at 0x7da1b20b8370>, <ast.Name object at 0x7da1b20b8eb0>, <ast.IfExp object at 0x7da1b20b9930>, <ast.Name object at 0x7da1b20ba950>, <ast.Name object at 0x7da1b20baa70>, <ast.Name object at 0x7da1b20b9360>, <ast.Name object at 0x7da1b20b94e0>, <ast.Name object at 0x7da1b20ba4a0>, <ast.Name object at 0x7da1b20bb040>]]
variable[done_file] assign[=] call[name[done_file].replace, parameter[constant[%SALT_CONFIG_DIR%], constant[/etc/salt]]]
if call[name[os].path.exists, parameter[name[done_file]]] begin[:]
return[call[name[_success], parameter[name[ret], constant[LXD is already initialized]]]]
if call[name[__opts__]][constant[test]] begin[:]
return[call[name[_success], parameter[name[ret], constant[Would initialize LXD]]]]
call[call[name[__salt__]][constant[file.touch]], parameter[name[done_file]]]
<ast.Try object at 0x7da1b20bafe0>
return[call[name[_success], parameter[name[ret], constant[Initialized the LXD Daemon]]]] | keyword[def] identifier[init] ( identifier[name] , identifier[storage_backend] = literal[string] , identifier[trust_password] = keyword[None] ,
identifier[network_address] = keyword[None] , identifier[network_port] = keyword[None] , identifier[storage_create_device] = keyword[None] ,
identifier[storage_create_loop] = keyword[None] , identifier[storage_pool] = keyword[None] ,
identifier[done_file] = literal[string] ):
literal[string]
identifier[ret] ={
literal[string] : identifier[name] ,
literal[string] : identifier[storage_backend] ,
literal[string] : keyword[True] keyword[if] identifier[trust_password] keyword[is] keyword[not] keyword[None] keyword[else] keyword[False] ,
literal[string] : identifier[network_address] ,
literal[string] : identifier[network_port] ,
literal[string] : identifier[storage_create_device] ,
literal[string] : identifier[storage_create_loop] ,
literal[string] : identifier[storage_pool] ,
literal[string] : identifier[done_file] ,
}
identifier[done_file] = identifier[done_file] . identifier[replace] ( literal[string] , literal[string] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[done_file] ):
keyword[return] identifier[_success] ( identifier[ret] , literal[string] )
keyword[if] identifier[__opts__] [ literal[string] ]:
keyword[return] identifier[_success] ( identifier[ret] , literal[string] )
identifier[__salt__] [ literal[string] ]( identifier[done_file] )
keyword[try] :
identifier[__salt__] [ literal[string] ](
identifier[storage_backend] keyword[if] identifier[storage_backend] keyword[else] keyword[None] ,
identifier[trust_password] keyword[if] identifier[trust_password] keyword[else] keyword[None] ,
identifier[network_address] keyword[if] identifier[network_address] keyword[else] keyword[None] ,
identifier[network_port] keyword[if] identifier[network_port] keyword[else] keyword[None] ,
identifier[storage_create_device] keyword[if] identifier[storage_create_device] keyword[else] keyword[None] ,
identifier[storage_create_loop] keyword[if] identifier[storage_create_loop] keyword[else] keyword[None] ,
identifier[storage_pool] keyword[if] identifier[storage_pool] keyword[else] keyword[None]
)
keyword[except] identifier[CommandExecutionError] keyword[as] identifier[e] :
keyword[return] identifier[_error] ( identifier[ret] , identifier[six] . identifier[text_type] ( identifier[e] ))
keyword[return] identifier[_success] ( identifier[ret] , literal[string] ) | def init(name, storage_backend='dir', trust_password=None, network_address=None, network_port=None, storage_create_device=None, storage_create_loop=None, storage_pool=None, done_file='%SALT_CONFIG_DIR%/lxd_initialized'):
"""
Initalizes the LXD Daemon, as LXD doesn't tell if its initialized
we touch the the done_file and check if it exist.
This can only be called once per host unless you remove the done_file.
name :
Ignore this. This is just here for salt.
storage_backend :
Storage backend to use (zfs or dir, default: dir)
trust_password :
Password required to add new clients
network_address : None
Address to bind LXD to (default: none)
network_port : None
Port to bind LXD to (Default: 8443)
storage_create_device : None
Setup device based storage using this DEVICE
storage_create_loop : None
Setup loop based storage with this SIZE in GB
storage_pool : None
Storage pool to use or create
done_file :
Path where we check that this method has been called,
as it can run only once and theres currently no way
to ask LXD if init has been called.
"""
ret = {'name': name, 'storage_backend': storage_backend, 'trust_password': True if trust_password is not None else False, 'network_address': network_address, 'network_port': network_port, 'storage_create_device': storage_create_device, 'storage_create_loop': storage_create_loop, 'storage_pool': storage_pool, 'done_file': done_file}
# TODO: Get a better path and don't hardcode '/etc/salt'
done_file = done_file.replace('%SALT_CONFIG_DIR%', '/etc/salt')
if os.path.exists(done_file):
# Success we already did that.
return _success(ret, 'LXD is already initialized') # depends on [control=['if'], data=[]]
if __opts__['test']:
return _success(ret, 'Would initialize LXD') # depends on [control=['if'], data=[]]
# We always touch the done_file, so when LXD is already initialized
# we don't run this over and over.
__salt__['file.touch'](done_file)
try:
__salt__['lxd.init'](storage_backend if storage_backend else None, trust_password if trust_password else None, network_address if network_address else None, network_port if network_port else None, storage_create_device if storage_create_device else None, storage_create_loop if storage_create_loop else None, storage_pool if storage_pool else None) # depends on [control=['try'], data=[]]
except CommandExecutionError as e:
return _error(ret, six.text_type(e)) # depends on [control=['except'], data=['e']]
return _success(ret, 'Initialized the LXD Daemon') |
def list_storage(kwargs=None, conn=None, call=None):
'''
.. versionadded:: 2015.8.0
List storage accounts associated with the account
CLI Example:
.. code-block:: bash
salt-cloud -f list_storage my-azure
'''
if call != 'function':
raise SaltCloudSystemExit(
'The list_storage function must be called with -f or --function.'
)
if not conn:
conn = get_conn()
data = conn.list_storage_accounts()
pprint.pprint(dir(data))
ret = {}
for item in data.storage_services:
ret[item.service_name] = object_to_dict(item)
return ret | def function[list_storage, parameter[kwargs, conn, call]]:
constant[
.. versionadded:: 2015.8.0
List storage accounts associated with the account
CLI Example:
.. code-block:: bash
salt-cloud -f list_storage my-azure
]
if compare[name[call] not_equal[!=] constant[function]] begin[:]
<ast.Raise object at 0x7da18f00e440>
if <ast.UnaryOp object at 0x7da18f00f850> begin[:]
variable[conn] assign[=] call[name[get_conn], parameter[]]
variable[data] assign[=] call[name[conn].list_storage_accounts, parameter[]]
call[name[pprint].pprint, parameter[call[name[dir], parameter[name[data]]]]]
variable[ret] assign[=] dictionary[[], []]
for taget[name[item]] in starred[name[data].storage_services] begin[:]
call[name[ret]][name[item].service_name] assign[=] call[name[object_to_dict], parameter[name[item]]]
return[name[ret]] | keyword[def] identifier[list_storage] ( identifier[kwargs] = keyword[None] , identifier[conn] = keyword[None] , identifier[call] = keyword[None] ):
literal[string]
keyword[if] identifier[call] != literal[string] :
keyword[raise] identifier[SaltCloudSystemExit] (
literal[string]
)
keyword[if] keyword[not] identifier[conn] :
identifier[conn] = identifier[get_conn] ()
identifier[data] = identifier[conn] . identifier[list_storage_accounts] ()
identifier[pprint] . identifier[pprint] ( identifier[dir] ( identifier[data] ))
identifier[ret] ={}
keyword[for] identifier[item] keyword[in] identifier[data] . identifier[storage_services] :
identifier[ret] [ identifier[item] . identifier[service_name] ]= identifier[object_to_dict] ( identifier[item] )
keyword[return] identifier[ret] | def list_storage(kwargs=None, conn=None, call=None):
"""
.. versionadded:: 2015.8.0
List storage accounts associated with the account
CLI Example:
.. code-block:: bash
salt-cloud -f list_storage my-azure
"""
if call != 'function':
raise SaltCloudSystemExit('The list_storage function must be called with -f or --function.') # depends on [control=['if'], data=[]]
if not conn:
conn = get_conn() # depends on [control=['if'], data=[]]
data = conn.list_storage_accounts()
pprint.pprint(dir(data))
ret = {}
for item in data.storage_services:
ret[item.service_name] = object_to_dict(item) # depends on [control=['for'], data=['item']]
return ret |
def configure(log_file):
'''
Configure root logger to log INFO to stderr and DEBUG to log file.
The log file is appended to. Stderr uses a terse format, while the log file
uses a verbose unambiguous format.
Root level is set to INFO.
Parameters
----------
log_file : ~pathlib.Path
File to log to.
Returns
-------
~typing.Tuple[~logging.StreamHandler, ~logging.FileHandler]
Stderr and file handler respectively.
'''
# Note: do not use logging.basicConfig as it does not play along with caplog in testing
root_logger = logging.getLogger()
root_logger.setLevel(logging.INFO)
# log info to stderr in terse format
stderr_handler = logging.StreamHandler() # to stderr
stderr_handler.setLevel(logging.INFO)
stderr_handler.setFormatter(logging.Formatter('{levelname[0]}: {message}', style='{'))
root_logger.addHandler(stderr_handler)
# log debug to file in full format
file_handler = logging.FileHandler(str(log_file))
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(logging.Formatter('{levelname[0]} {asctime} {name} ({module}:{lineno}):\n{message}\n', style='{'))
root_logger.addHandler(file_handler)
return stderr_handler, file_handler | def function[configure, parameter[log_file]]:
constant[
Configure root logger to log INFO to stderr and DEBUG to log file.
The log file is appended to. Stderr uses a terse format, while the log file
uses a verbose unambiguous format.
Root level is set to INFO.
Parameters
----------
log_file : ~pathlib.Path
File to log to.
Returns
-------
~typing.Tuple[~logging.StreamHandler, ~logging.FileHandler]
Stderr and file handler respectively.
]
variable[root_logger] assign[=] call[name[logging].getLogger, parameter[]]
call[name[root_logger].setLevel, parameter[name[logging].INFO]]
variable[stderr_handler] assign[=] call[name[logging].StreamHandler, parameter[]]
call[name[stderr_handler].setLevel, parameter[name[logging].INFO]]
call[name[stderr_handler].setFormatter, parameter[call[name[logging].Formatter, parameter[constant[{levelname[0]}: {message}]]]]]
call[name[root_logger].addHandler, parameter[name[stderr_handler]]]
variable[file_handler] assign[=] call[name[logging].FileHandler, parameter[call[name[str], parameter[name[log_file]]]]]
call[name[file_handler].setLevel, parameter[name[logging].DEBUG]]
call[name[file_handler].setFormatter, parameter[call[name[logging].Formatter, parameter[constant[{levelname[0]} {asctime} {name} ({module}:{lineno}):
{message}
]]]]]
call[name[root_logger].addHandler, parameter[name[file_handler]]]
return[tuple[[<ast.Name object at 0x7da20c76fd60>, <ast.Name object at 0x7da20c76c550>]]] | keyword[def] identifier[configure] ( identifier[log_file] ):
literal[string]
identifier[root_logger] = identifier[logging] . identifier[getLogger] ()
identifier[root_logger] . identifier[setLevel] ( identifier[logging] . identifier[INFO] )
identifier[stderr_handler] = identifier[logging] . identifier[StreamHandler] ()
identifier[stderr_handler] . identifier[setLevel] ( identifier[logging] . identifier[INFO] )
identifier[stderr_handler] . identifier[setFormatter] ( identifier[logging] . identifier[Formatter] ( literal[string] , identifier[style] = literal[string] ))
identifier[root_logger] . identifier[addHandler] ( identifier[stderr_handler] )
identifier[file_handler] = identifier[logging] . identifier[FileHandler] ( identifier[str] ( identifier[log_file] ))
identifier[file_handler] . identifier[setLevel] ( identifier[logging] . identifier[DEBUG] )
identifier[file_handler] . identifier[setFormatter] ( identifier[logging] . identifier[Formatter] ( literal[string] , identifier[style] = literal[string] ))
identifier[root_logger] . identifier[addHandler] ( identifier[file_handler] )
keyword[return] identifier[stderr_handler] , identifier[file_handler] | def configure(log_file):
"""
Configure root logger to log INFO to stderr and DEBUG to log file.
The log file is appended to. Stderr uses a terse format, while the log file
uses a verbose unambiguous format.
Root level is set to INFO.
Parameters
----------
log_file : ~pathlib.Path
File to log to.
Returns
-------
~typing.Tuple[~logging.StreamHandler, ~logging.FileHandler]
Stderr and file handler respectively.
"""
# Note: do not use logging.basicConfig as it does not play along with caplog in testing
root_logger = logging.getLogger()
root_logger.setLevel(logging.INFO)
# log info to stderr in terse format
stderr_handler = logging.StreamHandler() # to stderr
stderr_handler.setLevel(logging.INFO)
stderr_handler.setFormatter(logging.Formatter('{levelname[0]}: {message}', style='{'))
root_logger.addHandler(stderr_handler)
# log debug to file in full format
file_handler = logging.FileHandler(str(log_file))
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(logging.Formatter('{levelname[0]} {asctime} {name} ({module}:{lineno}):\n{message}\n', style='{'))
root_logger.addHandler(file_handler)
return (stderr_handler, file_handler) |
def description(self, description):
"""
Sets the description of this AdditionalRecipient.
The description of the additional recipient.
:param description: The description of this AdditionalRecipient.
:type: str
"""
if description is None:
raise ValueError("Invalid value for `description`, must not be `None`")
if len(description) > 100:
raise ValueError("Invalid value for `description`, length must be less than `100`")
if len(description) < 1:
raise ValueError("Invalid value for `description`, length must be greater than or equal to `1`")
self._description = description | def function[description, parameter[self, description]]:
constant[
Sets the description of this AdditionalRecipient.
The description of the additional recipient.
:param description: The description of this AdditionalRecipient.
:type: str
]
if compare[name[description] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b1c1aaa0>
if compare[call[name[len], parameter[name[description]]] greater[>] constant[100]] begin[:]
<ast.Raise object at 0x7da1b1c18d60>
if compare[call[name[len], parameter[name[description]]] less[<] constant[1]] begin[:]
<ast.Raise object at 0x7da1b1c1beb0>
name[self]._description assign[=] name[description] | keyword[def] identifier[description] ( identifier[self] , identifier[description] ):
literal[string]
keyword[if] identifier[description] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[len] ( identifier[description] )> literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[len] ( identifier[description] )< literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[self] . identifier[_description] = identifier[description] | def description(self, description):
"""
Sets the description of this AdditionalRecipient.
The description of the additional recipient.
:param description: The description of this AdditionalRecipient.
:type: str
"""
if description is None:
raise ValueError('Invalid value for `description`, must not be `None`') # depends on [control=['if'], data=[]]
if len(description) > 100:
raise ValueError('Invalid value for `description`, length must be less than `100`') # depends on [control=['if'], data=[]]
if len(description) < 1:
raise ValueError('Invalid value for `description`, length must be greater than or equal to `1`') # depends on [control=['if'], data=[]]
self._description = description |
def _max_gain_split(self, examples):
"""
Returns an OnlineInformationGain of the attribute with
max gain based on `examples`.
"""
gains = self._new_set_of_gain_counters()
for example in examples:
for gain in gains:
gain.add(example)
winner = max(gains, key=lambda gain: gain.get_gain())
if not winner.get_target_class_counts():
raise ValueError("Dataset is empty")
return winner | def function[_max_gain_split, parameter[self, examples]]:
constant[
Returns an OnlineInformationGain of the attribute with
max gain based on `examples`.
]
variable[gains] assign[=] call[name[self]._new_set_of_gain_counters, parameter[]]
for taget[name[example]] in starred[name[examples]] begin[:]
for taget[name[gain]] in starred[name[gains]] begin[:]
call[name[gain].add, parameter[name[example]]]
variable[winner] assign[=] call[name[max], parameter[name[gains]]]
if <ast.UnaryOp object at 0x7da1b137bc10> begin[:]
<ast.Raise object at 0x7da1b137ab60>
return[name[winner]] | keyword[def] identifier[_max_gain_split] ( identifier[self] , identifier[examples] ):
literal[string]
identifier[gains] = identifier[self] . identifier[_new_set_of_gain_counters] ()
keyword[for] identifier[example] keyword[in] identifier[examples] :
keyword[for] identifier[gain] keyword[in] identifier[gains] :
identifier[gain] . identifier[add] ( identifier[example] )
identifier[winner] = identifier[max] ( identifier[gains] , identifier[key] = keyword[lambda] identifier[gain] : identifier[gain] . identifier[get_gain] ())
keyword[if] keyword[not] identifier[winner] . identifier[get_target_class_counts] ():
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] identifier[winner] | def _max_gain_split(self, examples):
"""
Returns an OnlineInformationGain of the attribute with
max gain based on `examples`.
"""
gains = self._new_set_of_gain_counters()
for example in examples:
for gain in gains:
gain.add(example) # depends on [control=['for'], data=['gain']] # depends on [control=['for'], data=['example']]
winner = max(gains, key=lambda gain: gain.get_gain())
if not winner.get_target_class_counts():
raise ValueError('Dataset is empty') # depends on [control=['if'], data=[]]
return winner |
def get(self, url, params=None, **kwargs):
""" Shorthand for self.oauth_request(url, 'get')
:param str url: url to send get oauth request to
:param dict params: request parameter to get the service data
:param kwargs: extra params to send to request api
:return: Response of the request
:rtype: requests.Response
"""
return self.oauth_request(url, 'get', params=params, **kwargs) | def function[get, parameter[self, url, params]]:
constant[ Shorthand for self.oauth_request(url, 'get')
:param str url: url to send get oauth request to
:param dict params: request parameter to get the service data
:param kwargs: extra params to send to request api
:return: Response of the request
:rtype: requests.Response
]
return[call[name[self].oauth_request, parameter[name[url], constant[get]]]] | keyword[def] identifier[get] ( identifier[self] , identifier[url] , identifier[params] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[self] . identifier[oauth_request] ( identifier[url] , literal[string] , identifier[params] = identifier[params] ,** identifier[kwargs] ) | def get(self, url, params=None, **kwargs):
""" Shorthand for self.oauth_request(url, 'get')
:param str url: url to send get oauth request to
:param dict params: request parameter to get the service data
:param kwargs: extra params to send to request api
:return: Response of the request
:rtype: requests.Response
"""
return self.oauth_request(url, 'get', params=params, **kwargs) |
def _classify_load_memory(self, regs_init, regs_fini, mem_fini, written_regs, read_regs):
"""Classify load-memory gadgets.
"""
matches = []
regs_init_inv = self._invert_dictionary(regs_init)
# Check for "dst_reg <- mem[src_reg + offset]" pattern.
for dst_reg, dst_val in regs_fini.items():
# Make sure the *dst* register was written.
if dst_reg not in written_regs:
continue
dst_size = self._arch_regs_size[dst_reg]
# Look for memory addresses that contain *dst_val*.
for src_addr in mem_fini.read_inverse(dst_val, dst_size // 8):
# Look for registers whose values are used as memory
# addresses.
for src_reg, src_val in regs_init.items():
# Make sure the *src* register was read.
if src_reg not in read_regs:
continue
# Check restrictions.
if self._arch_regs_size[src_reg] != self._address_size:
continue
offset = (src_addr - src_val) & (2**self._address_size - 1)
src_reg_ir = ReilRegisterOperand(src_reg, self._arch_regs_size[src_reg])
src_off_ir = ReilImmediateOperand(offset, self._arch_regs_size[src_reg])
dst_reg_ir = ReilRegisterOperand(dst_reg, self._arch_regs_size[dst_reg])
matches.append({
"src": [src_reg_ir, src_off_ir],
"dst": [dst_reg_ir]
})
# Check for "dst_reg <- mem[offset]" pattern.
for dst_reg, dst_val in regs_fini.items():
# Make sure the *dst* register was written.
if dst_reg not in written_regs:
continue
dst_size = self._arch_regs_size[dst_reg]
for src_addr in mem_fini.read_inverse(dst_val, dst_size // 8):
src_reg_ir = ReilEmptyOperand()
src_off_ir = ReilImmediateOperand(src_addr, self._address_size)
dst_reg_ir = ReilRegisterOperand(dst_reg, self._arch_regs_size[dst_reg])
matches.append({
"src": [src_reg_ir, src_off_ir],
"dst": [dst_reg_ir]
})
return matches | def function[_classify_load_memory, parameter[self, regs_init, regs_fini, mem_fini, written_regs, read_regs]]:
constant[Classify load-memory gadgets.
]
variable[matches] assign[=] list[[]]
variable[regs_init_inv] assign[=] call[name[self]._invert_dictionary, parameter[name[regs_init]]]
for taget[tuple[[<ast.Name object at 0x7da1b0889360>, <ast.Name object at 0x7da1b088b0a0>]]] in starred[call[name[regs_fini].items, parameter[]]] begin[:]
if compare[name[dst_reg] <ast.NotIn object at 0x7da2590d7190> name[written_regs]] begin[:]
continue
variable[dst_size] assign[=] call[name[self]._arch_regs_size][name[dst_reg]]
for taget[name[src_addr]] in starred[call[name[mem_fini].read_inverse, parameter[name[dst_val], binary_operation[name[dst_size] <ast.FloorDiv object at 0x7da2590d6bc0> constant[8]]]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b088aec0>, <ast.Name object at 0x7da1b0888130>]]] in starred[call[name[regs_init].items, parameter[]]] begin[:]
if compare[name[src_reg] <ast.NotIn object at 0x7da2590d7190> name[read_regs]] begin[:]
continue
if compare[call[name[self]._arch_regs_size][name[src_reg]] not_equal[!=] name[self]._address_size] begin[:]
continue
variable[offset] assign[=] binary_operation[binary_operation[name[src_addr] - name[src_val]] <ast.BitAnd object at 0x7da2590d6b60> binary_operation[binary_operation[constant[2] ** name[self]._address_size] - constant[1]]]
variable[src_reg_ir] assign[=] call[name[ReilRegisterOperand], parameter[name[src_reg], call[name[self]._arch_regs_size][name[src_reg]]]]
variable[src_off_ir] assign[=] call[name[ReilImmediateOperand], parameter[name[offset], call[name[self]._arch_regs_size][name[src_reg]]]]
variable[dst_reg_ir] assign[=] call[name[ReilRegisterOperand], parameter[name[dst_reg], call[name[self]._arch_regs_size][name[dst_reg]]]]
call[name[matches].append, parameter[dictionary[[<ast.Constant object at 0x7da1b088beb0>, <ast.Constant object at 0x7da1b08897b0>], [<ast.List object at 0x7da20c7cad10>, <ast.List object at 0x7da20c7c99c0>]]]]
for taget[tuple[[<ast.Name object at 0x7da20c7caaa0>, <ast.Name object at 0x7da20c7ca3b0>]]] in starred[call[name[regs_fini].items, parameter[]]] begin[:]
if compare[name[dst_reg] <ast.NotIn object at 0x7da2590d7190> name[written_regs]] begin[:]
continue
variable[dst_size] assign[=] call[name[self]._arch_regs_size][name[dst_reg]]
for taget[name[src_addr]] in starred[call[name[mem_fini].read_inverse, parameter[name[dst_val], binary_operation[name[dst_size] <ast.FloorDiv object at 0x7da2590d6bc0> constant[8]]]]] begin[:]
variable[src_reg_ir] assign[=] call[name[ReilEmptyOperand], parameter[]]
variable[src_off_ir] assign[=] call[name[ReilImmediateOperand], parameter[name[src_addr], name[self]._address_size]]
variable[dst_reg_ir] assign[=] call[name[ReilRegisterOperand], parameter[name[dst_reg], call[name[self]._arch_regs_size][name[dst_reg]]]]
call[name[matches].append, parameter[dictionary[[<ast.Constant object at 0x7da18ede6410>, <ast.Constant object at 0x7da18ede4250>], [<ast.List object at 0x7da18ede5c60>, <ast.List object at 0x7da18ede7c10>]]]]
return[name[matches]] | keyword[def] identifier[_classify_load_memory] ( identifier[self] , identifier[regs_init] , identifier[regs_fini] , identifier[mem_fini] , identifier[written_regs] , identifier[read_regs] ):
literal[string]
identifier[matches] =[]
identifier[regs_init_inv] = identifier[self] . identifier[_invert_dictionary] ( identifier[regs_init] )
keyword[for] identifier[dst_reg] , identifier[dst_val] keyword[in] identifier[regs_fini] . identifier[items] ():
keyword[if] identifier[dst_reg] keyword[not] keyword[in] identifier[written_regs] :
keyword[continue]
identifier[dst_size] = identifier[self] . identifier[_arch_regs_size] [ identifier[dst_reg] ]
keyword[for] identifier[src_addr] keyword[in] identifier[mem_fini] . identifier[read_inverse] ( identifier[dst_val] , identifier[dst_size] // literal[int] ):
keyword[for] identifier[src_reg] , identifier[src_val] keyword[in] identifier[regs_init] . identifier[items] ():
keyword[if] identifier[src_reg] keyword[not] keyword[in] identifier[read_regs] :
keyword[continue]
keyword[if] identifier[self] . identifier[_arch_regs_size] [ identifier[src_reg] ]!= identifier[self] . identifier[_address_size] :
keyword[continue]
identifier[offset] =( identifier[src_addr] - identifier[src_val] )&( literal[int] ** identifier[self] . identifier[_address_size] - literal[int] )
identifier[src_reg_ir] = identifier[ReilRegisterOperand] ( identifier[src_reg] , identifier[self] . identifier[_arch_regs_size] [ identifier[src_reg] ])
identifier[src_off_ir] = identifier[ReilImmediateOperand] ( identifier[offset] , identifier[self] . identifier[_arch_regs_size] [ identifier[src_reg] ])
identifier[dst_reg_ir] = identifier[ReilRegisterOperand] ( identifier[dst_reg] , identifier[self] . identifier[_arch_regs_size] [ identifier[dst_reg] ])
identifier[matches] . identifier[append] ({
literal[string] :[ identifier[src_reg_ir] , identifier[src_off_ir] ],
literal[string] :[ identifier[dst_reg_ir] ]
})
keyword[for] identifier[dst_reg] , identifier[dst_val] keyword[in] identifier[regs_fini] . identifier[items] ():
keyword[if] identifier[dst_reg] keyword[not] keyword[in] identifier[written_regs] :
keyword[continue]
identifier[dst_size] = identifier[self] . identifier[_arch_regs_size] [ identifier[dst_reg] ]
keyword[for] identifier[src_addr] keyword[in] identifier[mem_fini] . identifier[read_inverse] ( identifier[dst_val] , identifier[dst_size] // literal[int] ):
identifier[src_reg_ir] = identifier[ReilEmptyOperand] ()
identifier[src_off_ir] = identifier[ReilImmediateOperand] ( identifier[src_addr] , identifier[self] . identifier[_address_size] )
identifier[dst_reg_ir] = identifier[ReilRegisterOperand] ( identifier[dst_reg] , identifier[self] . identifier[_arch_regs_size] [ identifier[dst_reg] ])
identifier[matches] . identifier[append] ({
literal[string] :[ identifier[src_reg_ir] , identifier[src_off_ir] ],
literal[string] :[ identifier[dst_reg_ir] ]
})
keyword[return] identifier[matches] | def _classify_load_memory(self, regs_init, regs_fini, mem_fini, written_regs, read_regs):
"""Classify load-memory gadgets.
"""
matches = []
regs_init_inv = self._invert_dictionary(regs_init)
# Check for "dst_reg <- mem[src_reg + offset]" pattern.
for (dst_reg, dst_val) in regs_fini.items():
# Make sure the *dst* register was written.
if dst_reg not in written_regs:
continue # depends on [control=['if'], data=[]]
dst_size = self._arch_regs_size[dst_reg]
# Look for memory addresses that contain *dst_val*.
for src_addr in mem_fini.read_inverse(dst_val, dst_size // 8):
# Look for registers whose values are used as memory
# addresses.
for (src_reg, src_val) in regs_init.items():
# Make sure the *src* register was read.
if src_reg not in read_regs:
continue # depends on [control=['if'], data=[]]
# Check restrictions.
if self._arch_regs_size[src_reg] != self._address_size:
continue # depends on [control=['if'], data=[]]
offset = src_addr - src_val & 2 ** self._address_size - 1
src_reg_ir = ReilRegisterOperand(src_reg, self._arch_regs_size[src_reg])
src_off_ir = ReilImmediateOperand(offset, self._arch_regs_size[src_reg])
dst_reg_ir = ReilRegisterOperand(dst_reg, self._arch_regs_size[dst_reg])
matches.append({'src': [src_reg_ir, src_off_ir], 'dst': [dst_reg_ir]}) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['src_addr']] # depends on [control=['for'], data=[]]
# Check for "dst_reg <- mem[offset]" pattern.
for (dst_reg, dst_val) in regs_fini.items():
# Make sure the *dst* register was written.
if dst_reg not in written_regs:
continue # depends on [control=['if'], data=[]]
dst_size = self._arch_regs_size[dst_reg]
for src_addr in mem_fini.read_inverse(dst_val, dst_size // 8):
src_reg_ir = ReilEmptyOperand()
src_off_ir = ReilImmediateOperand(src_addr, self._address_size)
dst_reg_ir = ReilRegisterOperand(dst_reg, self._arch_regs_size[dst_reg])
matches.append({'src': [src_reg_ir, src_off_ir], 'dst': [dst_reg_ir]}) # depends on [control=['for'], data=['src_addr']] # depends on [control=['for'], data=[]]
return matches |
def create(self, resource, data):
'''
A base function that performs a default create POST request for a given object
'''
service_def, resource_def, path = self._get_service_information(
resource)
self._validate(resource, data)
return self.call(path=path, data=data, method='post') | def function[create, parameter[self, resource, data]]:
constant[
A base function that performs a default create POST request for a given object
]
<ast.Tuple object at 0x7da204565540> assign[=] call[name[self]._get_service_information, parameter[name[resource]]]
call[name[self]._validate, parameter[name[resource], name[data]]]
return[call[name[self].call, parameter[]]] | keyword[def] identifier[create] ( identifier[self] , identifier[resource] , identifier[data] ):
literal[string]
identifier[service_def] , identifier[resource_def] , identifier[path] = identifier[self] . identifier[_get_service_information] (
identifier[resource] )
identifier[self] . identifier[_validate] ( identifier[resource] , identifier[data] )
keyword[return] identifier[self] . identifier[call] ( identifier[path] = identifier[path] , identifier[data] = identifier[data] , identifier[method] = literal[string] ) | def create(self, resource, data):
"""
A base function that performs a default create POST request for a given object
"""
(service_def, resource_def, path) = self._get_service_information(resource)
self._validate(resource, data)
return self.call(path=path, data=data, method='post') |
def order_transforms(transforms):
"""Orders transforms to ensure proper chaining.
For example, if `transforms = [B, A, C]`, and `A` produces outputs needed
by `B`, the transforms will be re-rorderd to `[A, B, C]`.
Parameters
----------
transforms : list
List of transform instances to order.
Outputs
-------
list :
List of transformed ordered such that forward transforms can be carried
out without error.
"""
# get a set of all inputs and all outputs
outputs = set().union(*[t.outputs for t in transforms])
out = []
remaining = [t for t in transforms]
while remaining:
# pull out transforms that have no inputs in the set of outputs
leftover = []
for t in remaining:
if t.inputs.isdisjoint(outputs):
out.append(t)
outputs -= t.outputs
else:
leftover.append(t)
remaining = leftover
return out | def function[order_transforms, parameter[transforms]]:
constant[Orders transforms to ensure proper chaining.
For example, if `transforms = [B, A, C]`, and `A` produces outputs needed
by `B`, the transforms will be re-rorderd to `[A, B, C]`.
Parameters
----------
transforms : list
List of transform instances to order.
Outputs
-------
list :
List of transformed ordered such that forward transforms can be carried
out without error.
]
variable[outputs] assign[=] call[call[name[set], parameter[]].union, parameter[<ast.Starred object at 0x7da207f01d50>]]
variable[out] assign[=] list[[]]
variable[remaining] assign[=] <ast.ListComp object at 0x7da207f009a0>
while name[remaining] begin[:]
variable[leftover] assign[=] list[[]]
for taget[name[t]] in starred[name[remaining]] begin[:]
if call[name[t].inputs.isdisjoint, parameter[name[outputs]]] begin[:]
call[name[out].append, parameter[name[t]]]
<ast.AugAssign object at 0x7da207f001f0>
variable[remaining] assign[=] name[leftover]
return[name[out]] | keyword[def] identifier[order_transforms] ( identifier[transforms] ):
literal[string]
identifier[outputs] = identifier[set] (). identifier[union] (*[ identifier[t] . identifier[outputs] keyword[for] identifier[t] keyword[in] identifier[transforms] ])
identifier[out] =[]
identifier[remaining] =[ identifier[t] keyword[for] identifier[t] keyword[in] identifier[transforms] ]
keyword[while] identifier[remaining] :
identifier[leftover] =[]
keyword[for] identifier[t] keyword[in] identifier[remaining] :
keyword[if] identifier[t] . identifier[inputs] . identifier[isdisjoint] ( identifier[outputs] ):
identifier[out] . identifier[append] ( identifier[t] )
identifier[outputs] -= identifier[t] . identifier[outputs]
keyword[else] :
identifier[leftover] . identifier[append] ( identifier[t] )
identifier[remaining] = identifier[leftover]
keyword[return] identifier[out] | def order_transforms(transforms):
"""Orders transforms to ensure proper chaining.
For example, if `transforms = [B, A, C]`, and `A` produces outputs needed
by `B`, the transforms will be re-rorderd to `[A, B, C]`.
Parameters
----------
transforms : list
List of transform instances to order.
Outputs
-------
list :
List of transformed ordered such that forward transforms can be carried
out without error.
"""
# get a set of all inputs and all outputs
outputs = set().union(*[t.outputs for t in transforms])
out = []
remaining = [t for t in transforms]
while remaining:
# pull out transforms that have no inputs in the set of outputs
leftover = []
for t in remaining:
if t.inputs.isdisjoint(outputs):
out.append(t)
outputs -= t.outputs # depends on [control=['if'], data=[]]
else:
leftover.append(t) # depends on [control=['for'], data=['t']]
remaining = leftover # depends on [control=['while'], data=[]]
return out |
def reports(self, **kwargs):
"""Get all reports for this node. Additional arguments may also be
specified that will be passed to the query function.
"""
return self.__api.reports(
query=EqualsOperator("certname", self.name),
**kwargs) | def function[reports, parameter[self]]:
constant[Get all reports for this node. Additional arguments may also be
specified that will be passed to the query function.
]
return[call[name[self].__api.reports, parameter[]]] | keyword[def] identifier[reports] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[self] . identifier[__api] . identifier[reports] (
identifier[query] = identifier[EqualsOperator] ( literal[string] , identifier[self] . identifier[name] ),
** identifier[kwargs] ) | def reports(self, **kwargs):
"""Get all reports for this node. Additional arguments may also be
specified that will be passed to the query function.
"""
return self.__api.reports(query=EqualsOperator('certname', self.name), **kwargs) |
def sample(self, label):
"""
generate random padding boxes according to parameters
if satifactory padding generated, apply to ground-truth as well
Parameters:
----------
label : numpy.array (n x 5 matrix)
ground-truths
Returns:
----------
list of (crop_box, label) tuples, if failed, return empty list []
"""
samples = []
count = 0
for trial in range(self.max_trials):
if count >= self.max_sample:
return samples
scale = np.random.uniform(self.min_scale, self.max_scale)
min_ratio = max(self.min_aspect_ratio, scale * scale)
max_ratio = min(self.max_aspect_ratio, 1. / scale / scale)
ratio = math.sqrt(np.random.uniform(min_ratio, max_ratio))
width = scale * ratio
if width < 1:
continue
height = scale / ratio
if height < 1:
continue
left = np.random.uniform(0., 1 - width)
top = np.random.uniform(0., 1 - height)
right = left + width
bot = top + height
rand_box = (left, top, right, bot)
valid_mask = np.where(label[:, 0] > -1)[0]
gt = label[valid_mask, :]
new_gt_boxes = []
for i in range(gt.shape[0]):
xmin = (gt[i, 1] - left) / width
ymin = (gt[i, 2] - top) / height
xmax = (gt[i, 3] - left) / width
ymax = (gt[i, 4] - top) / height
new_size = min(xmax - xmin, ymax - ymin)
if new_size < self.min_gt_scale:
new_gt_boxes = []
break
new_gt_boxes.append([gt[i, 0], xmin, ymin, xmax, ymax])
if not new_gt_boxes:
continue
new_gt_boxes = np.array(new_gt_boxes)
label = np.lib.pad(new_gt_boxes,
((0, label.shape[0]-new_gt_boxes.shape[0]), (0,0)), \
'constant', constant_values=(-1, -1))
samples.append((rand_box, label))
count += 1
return samples | def function[sample, parameter[self, label]]:
constant[
generate random padding boxes according to parameters
if satifactory padding generated, apply to ground-truth as well
Parameters:
----------
label : numpy.array (n x 5 matrix)
ground-truths
Returns:
----------
list of (crop_box, label) tuples, if failed, return empty list []
]
variable[samples] assign[=] list[[]]
variable[count] assign[=] constant[0]
for taget[name[trial]] in starred[call[name[range], parameter[name[self].max_trials]]] begin[:]
if compare[name[count] greater_or_equal[>=] name[self].max_sample] begin[:]
return[name[samples]]
variable[scale] assign[=] call[name[np].random.uniform, parameter[name[self].min_scale, name[self].max_scale]]
variable[min_ratio] assign[=] call[name[max], parameter[name[self].min_aspect_ratio, binary_operation[name[scale] * name[scale]]]]
variable[max_ratio] assign[=] call[name[min], parameter[name[self].max_aspect_ratio, binary_operation[binary_operation[constant[1.0] / name[scale]] / name[scale]]]]
variable[ratio] assign[=] call[name[math].sqrt, parameter[call[name[np].random.uniform, parameter[name[min_ratio], name[max_ratio]]]]]
variable[width] assign[=] binary_operation[name[scale] * name[ratio]]
if compare[name[width] less[<] constant[1]] begin[:]
continue
variable[height] assign[=] binary_operation[name[scale] / name[ratio]]
if compare[name[height] less[<] constant[1]] begin[:]
continue
variable[left] assign[=] call[name[np].random.uniform, parameter[constant[0.0], binary_operation[constant[1] - name[width]]]]
variable[top] assign[=] call[name[np].random.uniform, parameter[constant[0.0], binary_operation[constant[1] - name[height]]]]
variable[right] assign[=] binary_operation[name[left] + name[width]]
variable[bot] assign[=] binary_operation[name[top] + name[height]]
variable[rand_box] assign[=] tuple[[<ast.Name object at 0x7da1b1e14820>, <ast.Name object at 0x7da1b1e156c0>, <ast.Name object at 0x7da1b1e17550>, <ast.Name object at 0x7da1b1e150f0>]]
variable[valid_mask] assign[=] call[call[name[np].where, parameter[compare[call[name[label]][tuple[[<ast.Slice object at 0x7da1b1e17f10>, <ast.Constant object at 0x7da1b1e14700>]]] greater[>] <ast.UnaryOp object at 0x7da1b1e16da0>]]]][constant[0]]
variable[gt] assign[=] call[name[label]][tuple[[<ast.Name object at 0x7da1b1e15240>, <ast.Slice object at 0x7da1b1e14370>]]]
variable[new_gt_boxes] assign[=] list[[]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[gt].shape][constant[0]]]]] begin[:]
variable[xmin] assign[=] binary_operation[binary_operation[call[name[gt]][tuple[[<ast.Name object at 0x7da18f8111e0>, <ast.Constant object at 0x7da18f813d00>]]] - name[left]] / name[width]]
variable[ymin] assign[=] binary_operation[binary_operation[call[name[gt]][tuple[[<ast.Name object at 0x7da18f8122c0>, <ast.Constant object at 0x7da18f8129e0>]]] - name[top]] / name[height]]
variable[xmax] assign[=] binary_operation[binary_operation[call[name[gt]][tuple[[<ast.Name object at 0x7da18f8130a0>, <ast.Constant object at 0x7da18f810820>]]] - name[left]] / name[width]]
variable[ymax] assign[=] binary_operation[binary_operation[call[name[gt]][tuple[[<ast.Name object at 0x7da18f810700>, <ast.Constant object at 0x7da18f812a70>]]] - name[top]] / name[height]]
variable[new_size] assign[=] call[name[min], parameter[binary_operation[name[xmax] - name[xmin]], binary_operation[name[ymax] - name[ymin]]]]
if compare[name[new_size] less[<] name[self].min_gt_scale] begin[:]
variable[new_gt_boxes] assign[=] list[[]]
break
call[name[new_gt_boxes].append, parameter[list[[<ast.Subscript object at 0x7da1b1ef1210>, <ast.Name object at 0x7da1b1ef3280>, <ast.Name object at 0x7da1b1ef0ac0>, <ast.Name object at 0x7da1b1ef1f60>, <ast.Name object at 0x7da1b1ef01f0>]]]]
if <ast.UnaryOp object at 0x7da1b1ef0d30> begin[:]
continue
variable[new_gt_boxes] assign[=] call[name[np].array, parameter[name[new_gt_boxes]]]
variable[label] assign[=] call[name[np].lib.pad, parameter[name[new_gt_boxes], tuple[[<ast.Tuple object at 0x7da1b1ef11e0>, <ast.Tuple object at 0x7da1b1ef05e0>]], constant[constant]]]
call[name[samples].append, parameter[tuple[[<ast.Name object at 0x7da1b1ef19c0>, <ast.Name object at 0x7da1b1ef1b10>]]]]
<ast.AugAssign object at 0x7da1b1ef24a0>
return[name[samples]] | keyword[def] identifier[sample] ( identifier[self] , identifier[label] ):
literal[string]
identifier[samples] =[]
identifier[count] = literal[int]
keyword[for] identifier[trial] keyword[in] identifier[range] ( identifier[self] . identifier[max_trials] ):
keyword[if] identifier[count] >= identifier[self] . identifier[max_sample] :
keyword[return] identifier[samples]
identifier[scale] = identifier[np] . identifier[random] . identifier[uniform] ( identifier[self] . identifier[min_scale] , identifier[self] . identifier[max_scale] )
identifier[min_ratio] = identifier[max] ( identifier[self] . identifier[min_aspect_ratio] , identifier[scale] * identifier[scale] )
identifier[max_ratio] = identifier[min] ( identifier[self] . identifier[max_aspect_ratio] , literal[int] / identifier[scale] / identifier[scale] )
identifier[ratio] = identifier[math] . identifier[sqrt] ( identifier[np] . identifier[random] . identifier[uniform] ( identifier[min_ratio] , identifier[max_ratio] ))
identifier[width] = identifier[scale] * identifier[ratio]
keyword[if] identifier[width] < literal[int] :
keyword[continue]
identifier[height] = identifier[scale] / identifier[ratio]
keyword[if] identifier[height] < literal[int] :
keyword[continue]
identifier[left] = identifier[np] . identifier[random] . identifier[uniform] ( literal[int] , literal[int] - identifier[width] )
identifier[top] = identifier[np] . identifier[random] . identifier[uniform] ( literal[int] , literal[int] - identifier[height] )
identifier[right] = identifier[left] + identifier[width]
identifier[bot] = identifier[top] + identifier[height]
identifier[rand_box] =( identifier[left] , identifier[top] , identifier[right] , identifier[bot] )
identifier[valid_mask] = identifier[np] . identifier[where] ( identifier[label] [:, literal[int] ]>- literal[int] )[ literal[int] ]
identifier[gt] = identifier[label] [ identifier[valid_mask] ,:]
identifier[new_gt_boxes] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[gt] . identifier[shape] [ literal[int] ]):
identifier[xmin] =( identifier[gt] [ identifier[i] , literal[int] ]- identifier[left] )/ identifier[width]
identifier[ymin] =( identifier[gt] [ identifier[i] , literal[int] ]- identifier[top] )/ identifier[height]
identifier[xmax] =( identifier[gt] [ identifier[i] , literal[int] ]- identifier[left] )/ identifier[width]
identifier[ymax] =( identifier[gt] [ identifier[i] , literal[int] ]- identifier[top] )/ identifier[height]
identifier[new_size] = identifier[min] ( identifier[xmax] - identifier[xmin] , identifier[ymax] - identifier[ymin] )
keyword[if] identifier[new_size] < identifier[self] . identifier[min_gt_scale] :
identifier[new_gt_boxes] =[]
keyword[break]
identifier[new_gt_boxes] . identifier[append] ([ identifier[gt] [ identifier[i] , literal[int] ], identifier[xmin] , identifier[ymin] , identifier[xmax] , identifier[ymax] ])
keyword[if] keyword[not] identifier[new_gt_boxes] :
keyword[continue]
identifier[new_gt_boxes] = identifier[np] . identifier[array] ( identifier[new_gt_boxes] )
identifier[label] = identifier[np] . identifier[lib] . identifier[pad] ( identifier[new_gt_boxes] ,
(( literal[int] , identifier[label] . identifier[shape] [ literal[int] ]- identifier[new_gt_boxes] . identifier[shape] [ literal[int] ]),( literal[int] , literal[int] )), literal[string] , identifier[constant_values] =(- literal[int] ,- literal[int] ))
identifier[samples] . identifier[append] (( identifier[rand_box] , identifier[label] ))
identifier[count] += literal[int]
keyword[return] identifier[samples] | def sample(self, label):
"""
generate random padding boxes according to parameters
if satifactory padding generated, apply to ground-truth as well
Parameters:
----------
label : numpy.array (n x 5 matrix)
ground-truths
Returns:
----------
list of (crop_box, label) tuples, if failed, return empty list []
"""
samples = []
count = 0
for trial in range(self.max_trials):
if count >= self.max_sample:
return samples # depends on [control=['if'], data=[]]
scale = np.random.uniform(self.min_scale, self.max_scale)
min_ratio = max(self.min_aspect_ratio, scale * scale)
max_ratio = min(self.max_aspect_ratio, 1.0 / scale / scale)
ratio = math.sqrt(np.random.uniform(min_ratio, max_ratio))
width = scale * ratio
if width < 1:
continue # depends on [control=['if'], data=[]]
height = scale / ratio
if height < 1:
continue # depends on [control=['if'], data=[]]
left = np.random.uniform(0.0, 1 - width)
top = np.random.uniform(0.0, 1 - height)
right = left + width
bot = top + height
rand_box = (left, top, right, bot)
valid_mask = np.where(label[:, 0] > -1)[0]
gt = label[valid_mask, :]
new_gt_boxes = []
for i in range(gt.shape[0]):
xmin = (gt[i, 1] - left) / width
ymin = (gt[i, 2] - top) / height
xmax = (gt[i, 3] - left) / width
ymax = (gt[i, 4] - top) / height
new_size = min(xmax - xmin, ymax - ymin)
if new_size < self.min_gt_scale:
new_gt_boxes = []
break # depends on [control=['if'], data=[]]
new_gt_boxes.append([gt[i, 0], xmin, ymin, xmax, ymax]) # depends on [control=['for'], data=['i']]
if not new_gt_boxes:
continue # depends on [control=['if'], data=[]]
new_gt_boxes = np.array(new_gt_boxes)
label = np.lib.pad(new_gt_boxes, ((0, label.shape[0] - new_gt_boxes.shape[0]), (0, 0)), 'constant', constant_values=(-1, -1))
samples.append((rand_box, label))
count += 1 # depends on [control=['for'], data=[]]
return samples |
def satellite(isochrone, kernel, stellar_mass, distance_modulus,**kwargs):
"""
Wrapping the isochrone and kernel simulate functions.
"""
mag_1, mag_2 = isochrone.simulate(stellar_mass, distance_modulus)
lon, lat = kernel.simulate(len(mag_1))
return mag_1, mag_2, lon, lat | def function[satellite, parameter[isochrone, kernel, stellar_mass, distance_modulus]]:
constant[
Wrapping the isochrone and kernel simulate functions.
]
<ast.Tuple object at 0x7da1b2346fe0> assign[=] call[name[isochrone].simulate, parameter[name[stellar_mass], name[distance_modulus]]]
<ast.Tuple object at 0x7da1b23460e0> assign[=] call[name[kernel].simulate, parameter[call[name[len], parameter[name[mag_1]]]]]
return[tuple[[<ast.Name object at 0x7da1b2345f30>, <ast.Name object at 0x7da1b2345bd0>, <ast.Name object at 0x7da1b2345de0>, <ast.Name object at 0x7da1b2345cc0>]]] | keyword[def] identifier[satellite] ( identifier[isochrone] , identifier[kernel] , identifier[stellar_mass] , identifier[distance_modulus] ,** identifier[kwargs] ):
literal[string]
identifier[mag_1] , identifier[mag_2] = identifier[isochrone] . identifier[simulate] ( identifier[stellar_mass] , identifier[distance_modulus] )
identifier[lon] , identifier[lat] = identifier[kernel] . identifier[simulate] ( identifier[len] ( identifier[mag_1] ))
keyword[return] identifier[mag_1] , identifier[mag_2] , identifier[lon] , identifier[lat] | def satellite(isochrone, kernel, stellar_mass, distance_modulus, **kwargs):
"""
Wrapping the isochrone and kernel simulate functions.
"""
(mag_1, mag_2) = isochrone.simulate(stellar_mass, distance_modulus)
(lon, lat) = kernel.simulate(len(mag_1))
return (mag_1, mag_2, lon, lat) |
def set_boot_arch(arch='default'):
'''
Set the kernel to boot in 32 or 64 bit mode on next boot.
.. note::
When this function fails with the error ``changes to kernel
architecture failed to save!``, then the boot arch is not updated.
This is either an Apple bug, not available on the test system, or a
result of system files being locked down in macOS (SIP Protection).
:param str arch: A string representing the desired architecture. If no
value is passed, default is assumed. Valid values include:
- i386
- x86_64
- default
:return: True if successful, False if not
:rtype: bool
CLI Example:
.. code-block:: bash
salt '*' system.set_boot_arch i386
'''
if arch not in ['i386', 'x86_64', 'default']:
msg = 'Invalid value passed for arch.\n' \
'Must be i386, x86_64, or default.\n' \
'Passed: {0}'.format(arch)
raise SaltInvocationError(msg)
cmd = 'systemsetup -setkernelbootarchitecture {0}'.format(arch)
__utils__['mac_utils.execute_return_success'](cmd)
return __utils__['mac_utils.confirm_updated'](
arch,
get_boot_arch,
) | def function[set_boot_arch, parameter[arch]]:
constant[
Set the kernel to boot in 32 or 64 bit mode on next boot.
.. note::
When this function fails with the error ``changes to kernel
architecture failed to save!``, then the boot arch is not updated.
This is either an Apple bug, not available on the test system, or a
result of system files being locked down in macOS (SIP Protection).
:param str arch: A string representing the desired architecture. If no
value is passed, default is assumed. Valid values include:
- i386
- x86_64
- default
:return: True if successful, False if not
:rtype: bool
CLI Example:
.. code-block:: bash
salt '*' system.set_boot_arch i386
]
if compare[name[arch] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Constant object at 0x7da18c4ce020>, <ast.Constant object at 0x7da18c4cf4c0>, <ast.Constant object at 0x7da18c4cd1b0>]]] begin[:]
variable[msg] assign[=] call[constant[Invalid value passed for arch.
Must be i386, x86_64, or default.
Passed: {0}].format, parameter[name[arch]]]
<ast.Raise object at 0x7da18c4cc850>
variable[cmd] assign[=] call[constant[systemsetup -setkernelbootarchitecture {0}].format, parameter[name[arch]]]
call[call[name[__utils__]][constant[mac_utils.execute_return_success]], parameter[name[cmd]]]
return[call[call[name[__utils__]][constant[mac_utils.confirm_updated]], parameter[name[arch], name[get_boot_arch]]]] | keyword[def] identifier[set_boot_arch] ( identifier[arch] = literal[string] ):
literal[string]
keyword[if] identifier[arch] keyword[not] keyword[in] [ literal[string] , literal[string] , literal[string] ]:
identifier[msg] = literal[string] literal[string] literal[string] . identifier[format] ( identifier[arch] )
keyword[raise] identifier[SaltInvocationError] ( identifier[msg] )
identifier[cmd] = literal[string] . identifier[format] ( identifier[arch] )
identifier[__utils__] [ literal[string] ]( identifier[cmd] )
keyword[return] identifier[__utils__] [ literal[string] ](
identifier[arch] ,
identifier[get_boot_arch] ,
) | def set_boot_arch(arch='default'):
"""
Set the kernel to boot in 32 or 64 bit mode on next boot.
.. note::
When this function fails with the error ``changes to kernel
architecture failed to save!``, then the boot arch is not updated.
This is either an Apple bug, not available on the test system, or a
result of system files being locked down in macOS (SIP Protection).
:param str arch: A string representing the desired architecture. If no
value is passed, default is assumed. Valid values include:
- i386
- x86_64
- default
:return: True if successful, False if not
:rtype: bool
CLI Example:
.. code-block:: bash
salt '*' system.set_boot_arch i386
"""
if arch not in ['i386', 'x86_64', 'default']:
msg = 'Invalid value passed for arch.\nMust be i386, x86_64, or default.\nPassed: {0}'.format(arch)
raise SaltInvocationError(msg) # depends on [control=['if'], data=['arch']]
cmd = 'systemsetup -setkernelbootarchitecture {0}'.format(arch)
__utils__['mac_utils.execute_return_success'](cmd)
return __utils__['mac_utils.confirm_updated'](arch, get_boot_arch) |
def without(self, *keys):
"""
Get all items except for those with the specified keys.
:param keys: The keys to remove
:type keys: tuple
:rtype: Collection
"""
items = copy(self.items)
keys = reversed(sorted(keys))
for key in keys:
del items[key]
return self.__class__(items) | def function[without, parameter[self]]:
constant[
Get all items except for those with the specified keys.
:param keys: The keys to remove
:type keys: tuple
:rtype: Collection
]
variable[items] assign[=] call[name[copy], parameter[name[self].items]]
variable[keys] assign[=] call[name[reversed], parameter[call[name[sorted], parameter[name[keys]]]]]
for taget[name[key]] in starred[name[keys]] begin[:]
<ast.Delete object at 0x7da1b055e710>
return[call[name[self].__class__, parameter[name[items]]]] | keyword[def] identifier[without] ( identifier[self] ,* identifier[keys] ):
literal[string]
identifier[items] = identifier[copy] ( identifier[self] . identifier[items] )
identifier[keys] = identifier[reversed] ( identifier[sorted] ( identifier[keys] ))
keyword[for] identifier[key] keyword[in] identifier[keys] :
keyword[del] identifier[items] [ identifier[key] ]
keyword[return] identifier[self] . identifier[__class__] ( identifier[items] ) | def without(self, *keys):
"""
Get all items except for those with the specified keys.
:param keys: The keys to remove
:type keys: tuple
:rtype: Collection
"""
items = copy(self.items)
keys = reversed(sorted(keys))
for key in keys:
del items[key] # depends on [control=['for'], data=['key']]
return self.__class__(items) |
def writeANVLString(ANVLDict, ordering=UNTL_XML_ORDER):
"""Take a dictionary and write out the key/value pairs
in ANVL format.
"""
lines = []
# Loop through the ordering for the data.
for key in ordering:
# Make sure the element exists in the data set.
if key in ANVLDict:
# Get the list of elements.
element_list = ANVLDict[key]
# Loop through the element contents.
for element in element_list:
value = element.get('content', '')
offset = len(key) + 1
line = '%s: %s' % (key, breakString(value, 79, offset))
lines.append(line)
return '\n'.join(lines) | def function[writeANVLString, parameter[ANVLDict, ordering]]:
constant[Take a dictionary and write out the key/value pairs
in ANVL format.
]
variable[lines] assign[=] list[[]]
for taget[name[key]] in starred[name[ordering]] begin[:]
if compare[name[key] in name[ANVLDict]] begin[:]
variable[element_list] assign[=] call[name[ANVLDict]][name[key]]
for taget[name[element]] in starred[name[element_list]] begin[:]
variable[value] assign[=] call[name[element].get, parameter[constant[content], constant[]]]
variable[offset] assign[=] binary_operation[call[name[len], parameter[name[key]]] + constant[1]]
variable[line] assign[=] binary_operation[constant[%s: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b225d270>, <ast.Call object at 0x7da1b225d750>]]]
call[name[lines].append, parameter[name[line]]]
return[call[constant[
].join, parameter[name[lines]]]] | keyword[def] identifier[writeANVLString] ( identifier[ANVLDict] , identifier[ordering] = identifier[UNTL_XML_ORDER] ):
literal[string]
identifier[lines] =[]
keyword[for] identifier[key] keyword[in] identifier[ordering] :
keyword[if] identifier[key] keyword[in] identifier[ANVLDict] :
identifier[element_list] = identifier[ANVLDict] [ identifier[key] ]
keyword[for] identifier[element] keyword[in] identifier[element_list] :
identifier[value] = identifier[element] . identifier[get] ( literal[string] , literal[string] )
identifier[offset] = identifier[len] ( identifier[key] )+ literal[int]
identifier[line] = literal[string] %( identifier[key] , identifier[breakString] ( identifier[value] , literal[int] , identifier[offset] ))
identifier[lines] . identifier[append] ( identifier[line] )
keyword[return] literal[string] . identifier[join] ( identifier[lines] ) | def writeANVLString(ANVLDict, ordering=UNTL_XML_ORDER):
"""Take a dictionary and write out the key/value pairs
in ANVL format.
"""
lines = []
# Loop through the ordering for the data.
for key in ordering:
# Make sure the element exists in the data set.
if key in ANVLDict:
# Get the list of elements.
element_list = ANVLDict[key]
# Loop through the element contents.
for element in element_list:
value = element.get('content', '')
offset = len(key) + 1
line = '%s: %s' % (key, breakString(value, 79, offset))
lines.append(line) # depends on [control=['for'], data=['element']] # depends on [control=['if'], data=['key', 'ANVLDict']] # depends on [control=['for'], data=['key']]
return '\n'.join(lines) |
def guid_to_squid(guid):
'''
Converts a GUID to a compressed guid (SQUID)
Each Guid has 5 parts separated by '-'. For the first three each one will be
totally reversed, and for the remaining two each one will be reversed by
every other character. Then the final compressed Guid will be constructed by
concatenating all the reversed parts without '-'.
.. Example::
Input: 2BE0FA87-5B36-43CF-95C8-C68D6673FB94
Reversed: 78AF0EB2-63B5-FC34-598C-6CD86637BF49
Final Compressed Guid: 78AF0EB263B5FC34598C6CD86637BF49
Args:
guid (str): A valid GUID
Returns:
str: A valid compressed GUID (SQUID)
'''
guid_pattern = re.compile(r'^\{(\w{8})-(\w{4})-(\w{4})-(\w\w)(\w\w)-(\w\w)(\w\w)(\w\w)(\w\w)(\w\w)(\w\w)\}$')
guid_match = guid_pattern.match(guid)
squid = ''
if guid_match is not None:
for index in range(1, 12):
squid += guid_match.group(index)[::-1]
return squid | def function[guid_to_squid, parameter[guid]]:
constant[
Converts a GUID to a compressed guid (SQUID)
Each Guid has 5 parts separated by '-'. For the first three each one will be
totally reversed, and for the remaining two each one will be reversed by
every other character. Then the final compressed Guid will be constructed by
concatenating all the reversed parts without '-'.
.. Example::
Input: 2BE0FA87-5B36-43CF-95C8-C68D6673FB94
Reversed: 78AF0EB2-63B5-FC34-598C-6CD86637BF49
Final Compressed Guid: 78AF0EB263B5FC34598C6CD86637BF49
Args:
guid (str): A valid GUID
Returns:
str: A valid compressed GUID (SQUID)
]
variable[guid_pattern] assign[=] call[name[re].compile, parameter[constant[^\{(\w{8})-(\w{4})-(\w{4})-(\w\w)(\w\w)-(\w\w)(\w\w)(\w\w)(\w\w)(\w\w)(\w\w)\}$]]]
variable[guid_match] assign[=] call[name[guid_pattern].match, parameter[name[guid]]]
variable[squid] assign[=] constant[]
if compare[name[guid_match] is_not constant[None]] begin[:]
for taget[name[index]] in starred[call[name[range], parameter[constant[1], constant[12]]]] begin[:]
<ast.AugAssign object at 0x7da1b1f94e50>
return[name[squid]] | keyword[def] identifier[guid_to_squid] ( identifier[guid] ):
literal[string]
identifier[guid_pattern] = identifier[re] . identifier[compile] ( literal[string] )
identifier[guid_match] = identifier[guid_pattern] . identifier[match] ( identifier[guid] )
identifier[squid] = literal[string]
keyword[if] identifier[guid_match] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[index] keyword[in] identifier[range] ( literal[int] , literal[int] ):
identifier[squid] += identifier[guid_match] . identifier[group] ( identifier[index] )[::- literal[int] ]
keyword[return] identifier[squid] | def guid_to_squid(guid):
"""
Converts a GUID to a compressed guid (SQUID)
Each Guid has 5 parts separated by '-'. For the first three each one will be
totally reversed, and for the remaining two each one will be reversed by
every other character. Then the final compressed Guid will be constructed by
concatenating all the reversed parts without '-'.
.. Example::
Input: 2BE0FA87-5B36-43CF-95C8-C68D6673FB94
Reversed: 78AF0EB2-63B5-FC34-598C-6CD86637BF49
Final Compressed Guid: 78AF0EB263B5FC34598C6CD86637BF49
Args:
guid (str): A valid GUID
Returns:
str: A valid compressed GUID (SQUID)
"""
guid_pattern = re.compile('^\\{(\\w{8})-(\\w{4})-(\\w{4})-(\\w\\w)(\\w\\w)-(\\w\\w)(\\w\\w)(\\w\\w)(\\w\\w)(\\w\\w)(\\w\\w)\\}$')
guid_match = guid_pattern.match(guid)
squid = ''
if guid_match is not None:
for index in range(1, 12):
squid += guid_match.group(index)[::-1] # depends on [control=['for'], data=['index']] # depends on [control=['if'], data=['guid_match']]
return squid |
def flow_rate(vol_per_rev, rpm):
"""Return the flow rate from a pump given the volume of fluid pumped per
revolution and the desired pump speed.
:param vol_per_rev: Volume of fluid output per revolution (dependent on pump and tubing)
:type vol_per_rev: float
:param rpm: Desired pump speed in revolutions per minute
:type rpm: float
:return: Flow rate of the pump (mL/s)
:rtype: float
:Examples:
>>> from aguaclara.research.peristaltic_pump import flow_rate
>>> from aguaclara.core.units import unit_registry as u
>>> flow_rate(3*u.mL/u.rev, 5*u.rev/u.min)
<Quantity(0.25, 'milliliter / second')>
"""
return (vol_per_rev * rpm).to(u.mL/u.s) | def function[flow_rate, parameter[vol_per_rev, rpm]]:
constant[Return the flow rate from a pump given the volume of fluid pumped per
revolution and the desired pump speed.
:param vol_per_rev: Volume of fluid output per revolution (dependent on pump and tubing)
:type vol_per_rev: float
:param rpm: Desired pump speed in revolutions per minute
:type rpm: float
:return: Flow rate of the pump (mL/s)
:rtype: float
:Examples:
>>> from aguaclara.research.peristaltic_pump import flow_rate
>>> from aguaclara.core.units import unit_registry as u
>>> flow_rate(3*u.mL/u.rev, 5*u.rev/u.min)
<Quantity(0.25, 'milliliter / second')>
]
return[call[binary_operation[name[vol_per_rev] * name[rpm]].to, parameter[binary_operation[name[u].mL / name[u].s]]]] | keyword[def] identifier[flow_rate] ( identifier[vol_per_rev] , identifier[rpm] ):
literal[string]
keyword[return] ( identifier[vol_per_rev] * identifier[rpm] ). identifier[to] ( identifier[u] . identifier[mL] / identifier[u] . identifier[s] ) | def flow_rate(vol_per_rev, rpm):
"""Return the flow rate from a pump given the volume of fluid pumped per
revolution and the desired pump speed.
:param vol_per_rev: Volume of fluid output per revolution (dependent on pump and tubing)
:type vol_per_rev: float
:param rpm: Desired pump speed in revolutions per minute
:type rpm: float
:return: Flow rate of the pump (mL/s)
:rtype: float
:Examples:
>>> from aguaclara.research.peristaltic_pump import flow_rate
>>> from aguaclara.core.units import unit_registry as u
>>> flow_rate(3*u.mL/u.rev, 5*u.rev/u.min)
<Quantity(0.25, 'milliliter / second')>
"""
return (vol_per_rev * rpm).to(u.mL / u.s) |
def create_graph_from_data(self, data, **kwargs):
"""Run the PC algorithm.
Args:
data (pandas.DataFrame): DataFrame containing the data
Returns:
networkx.DiGraph: Solution given by PC on the given data.
"""
# Building setup w/ arguments.
self.arguments['{CITEST}'] = self.dir_CI_test[self.CI_test]
self.arguments['{METHOD_INDEP}'] = self.dir_method_indep[self.method_indep]
self.arguments['{DIRECTED}'] = 'TRUE'
self.arguments['{ALPHA}'] = str(self.alpha)
self.arguments['{NJOBS}'] = str(self.nb_jobs)
self.arguments['{VERBOSE}'] = str(self.verbose).upper()
results = self._run_pc(data, verbose=self.verbose)
return nx.relabel_nodes(nx.DiGraph(results),
{idx: i for idx, i in enumerate(data.columns)}) | def function[create_graph_from_data, parameter[self, data]]:
constant[Run the PC algorithm.
Args:
data (pandas.DataFrame): DataFrame containing the data
Returns:
networkx.DiGraph: Solution given by PC on the given data.
]
call[name[self].arguments][constant[{CITEST}]] assign[=] call[name[self].dir_CI_test][name[self].CI_test]
call[name[self].arguments][constant[{METHOD_INDEP}]] assign[=] call[name[self].dir_method_indep][name[self].method_indep]
call[name[self].arguments][constant[{DIRECTED}]] assign[=] constant[TRUE]
call[name[self].arguments][constant[{ALPHA}]] assign[=] call[name[str], parameter[name[self].alpha]]
call[name[self].arguments][constant[{NJOBS}]] assign[=] call[name[str], parameter[name[self].nb_jobs]]
call[name[self].arguments][constant[{VERBOSE}]] assign[=] call[call[name[str], parameter[name[self].verbose]].upper, parameter[]]
variable[results] assign[=] call[name[self]._run_pc, parameter[name[data]]]
return[call[name[nx].relabel_nodes, parameter[call[name[nx].DiGraph, parameter[name[results]]], <ast.DictComp object at 0x7da1b01fdf30>]]] | keyword[def] identifier[create_graph_from_data] ( identifier[self] , identifier[data] ,** identifier[kwargs] ):
literal[string]
identifier[self] . identifier[arguments] [ literal[string] ]= identifier[self] . identifier[dir_CI_test] [ identifier[self] . identifier[CI_test] ]
identifier[self] . identifier[arguments] [ literal[string] ]= identifier[self] . identifier[dir_method_indep] [ identifier[self] . identifier[method_indep] ]
identifier[self] . identifier[arguments] [ literal[string] ]= literal[string]
identifier[self] . identifier[arguments] [ literal[string] ]= identifier[str] ( identifier[self] . identifier[alpha] )
identifier[self] . identifier[arguments] [ literal[string] ]= identifier[str] ( identifier[self] . identifier[nb_jobs] )
identifier[self] . identifier[arguments] [ literal[string] ]= identifier[str] ( identifier[self] . identifier[verbose] ). identifier[upper] ()
identifier[results] = identifier[self] . identifier[_run_pc] ( identifier[data] , identifier[verbose] = identifier[self] . identifier[verbose] )
keyword[return] identifier[nx] . identifier[relabel_nodes] ( identifier[nx] . identifier[DiGraph] ( identifier[results] ),
{ identifier[idx] : identifier[i] keyword[for] identifier[idx] , identifier[i] keyword[in] identifier[enumerate] ( identifier[data] . identifier[columns] )}) | def create_graph_from_data(self, data, **kwargs):
"""Run the PC algorithm.
Args:
data (pandas.DataFrame): DataFrame containing the data
Returns:
networkx.DiGraph: Solution given by PC on the given data.
"""
# Building setup w/ arguments.
self.arguments['{CITEST}'] = self.dir_CI_test[self.CI_test]
self.arguments['{METHOD_INDEP}'] = self.dir_method_indep[self.method_indep]
self.arguments['{DIRECTED}'] = 'TRUE'
self.arguments['{ALPHA}'] = str(self.alpha)
self.arguments['{NJOBS}'] = str(self.nb_jobs)
self.arguments['{VERBOSE}'] = str(self.verbose).upper()
results = self._run_pc(data, verbose=self.verbose)
return nx.relabel_nodes(nx.DiGraph(results), {idx: i for (idx, i) in enumerate(data.columns)}) |
def get_location_from_taobao(ip):
"""
{
"code":0,
"data":{
"country":"\u65e5\u672c",
"country_id":"JP",
"area":"",
"area_id":"",
"region":"",
"region_id":"",
"city":"",
"city_id":"",
"county":"",
"county_id":"",
"isp":"",
"isp_id":"",
"ip":"58.12.23.23"
}
}
"""
global taobao
response = requests.get(taobao % ip)
if not response.status_code == 200:
return
l = json.loads(response.content)
if not l['code'] == 0:
return
l = l['data']
return ("%s,%s,%s,%s,%s" % (l['country'], l['area'], l['region'], l['city'], l['isp'])).encode('utf8') | def function[get_location_from_taobao, parameter[ip]]:
constant[
{
"code":0,
"data":{
"country":"日本",
"country_id":"JP",
"area":"",
"area_id":"",
"region":"",
"region_id":"",
"city":"",
"city_id":"",
"county":"",
"county_id":"",
"isp":"",
"isp_id":"",
"ip":"58.12.23.23"
}
}
]
<ast.Global object at 0x7da1b09bd060>
variable[response] assign[=] call[name[requests].get, parameter[binary_operation[name[taobao] <ast.Mod object at 0x7da2590d6920> name[ip]]]]
if <ast.UnaryOp object at 0x7da1b09bc070> begin[:]
return[None]
variable[l] assign[=] call[name[json].loads, parameter[name[response].content]]
if <ast.UnaryOp object at 0x7da1b09bd450> begin[:]
return[None]
variable[l] assign[=] call[name[l]][constant[data]]
return[call[binary_operation[constant[%s,%s,%s,%s,%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b09bdbd0>, <ast.Subscript object at 0x7da1b09bf0a0>, <ast.Subscript object at 0x7da1b09bd300>, <ast.Subscript object at 0x7da1b09bf670>, <ast.Subscript object at 0x7da1b09bee30>]]].encode, parameter[constant[utf8]]]] | keyword[def] identifier[get_location_from_taobao] ( identifier[ip] ):
literal[string]
keyword[global] identifier[taobao]
identifier[response] = identifier[requests] . identifier[get] ( identifier[taobao] % identifier[ip] )
keyword[if] keyword[not] identifier[response] . identifier[status_code] == literal[int] :
keyword[return]
identifier[l] = identifier[json] . identifier[loads] ( identifier[response] . identifier[content] )
keyword[if] keyword[not] identifier[l] [ literal[string] ]== literal[int] :
keyword[return]
identifier[l] = identifier[l] [ literal[string] ]
keyword[return] ( literal[string] %( identifier[l] [ literal[string] ], identifier[l] [ literal[string] ], identifier[l] [ literal[string] ], identifier[l] [ literal[string] ], identifier[l] [ literal[string] ])). identifier[encode] ( literal[string] ) | def get_location_from_taobao(ip):
"""
{
"code":0,
"data":{
"country":"日本",
"country_id":"JP",
"area":"",
"area_id":"",
"region":"",
"region_id":"",
"city":"",
"city_id":"",
"county":"",
"county_id":"",
"isp":"",
"isp_id":"",
"ip":"58.12.23.23"
}
}
"""
global taobao
response = requests.get(taobao % ip)
if not response.status_code == 200:
return # depends on [control=['if'], data=[]]
l = json.loads(response.content)
if not l['code'] == 0:
return # depends on [control=['if'], data=[]]
l = l['data']
return ('%s,%s,%s,%s,%s' % (l['country'], l['area'], l['region'], l['city'], l['isp'])).encode('utf8') |
def setup_random_indices_local_geometry(self, coordination):
"""
Sets up random indices for the local geometry, for testing purposes
:param coordination: coordination of the local geometry
"""
self.icentral_site = 0
self.indices = list(range(1, coordination + 1))
np.random.shuffle(self.indices) | def function[setup_random_indices_local_geometry, parameter[self, coordination]]:
constant[
Sets up random indices for the local geometry, for testing purposes
:param coordination: coordination of the local geometry
]
name[self].icentral_site assign[=] constant[0]
name[self].indices assign[=] call[name[list], parameter[call[name[range], parameter[constant[1], binary_operation[name[coordination] + constant[1]]]]]]
call[name[np].random.shuffle, parameter[name[self].indices]] | keyword[def] identifier[setup_random_indices_local_geometry] ( identifier[self] , identifier[coordination] ):
literal[string]
identifier[self] . identifier[icentral_site] = literal[int]
identifier[self] . identifier[indices] = identifier[list] ( identifier[range] ( literal[int] , identifier[coordination] + literal[int] ))
identifier[np] . identifier[random] . identifier[shuffle] ( identifier[self] . identifier[indices] ) | def setup_random_indices_local_geometry(self, coordination):
"""
Sets up random indices for the local geometry, for testing purposes
:param coordination: coordination of the local geometry
"""
self.icentral_site = 0
self.indices = list(range(1, coordination + 1))
np.random.shuffle(self.indices) |
def get(self):
"""Return the number of seconds elapsed since object creation,
or since last call to this function, whichever is more recent."""
elapsed = datetime.now() - self._previous
self._previous += elapsed
return elapsed.total_seconds() | def function[get, parameter[self]]:
constant[Return the number of seconds elapsed since object creation,
or since last call to this function, whichever is more recent.]
variable[elapsed] assign[=] binary_operation[call[name[datetime].now, parameter[]] - name[self]._previous]
<ast.AugAssign object at 0x7da1b0a23e20>
return[call[name[elapsed].total_seconds, parameter[]]] | keyword[def] identifier[get] ( identifier[self] ):
literal[string]
identifier[elapsed] = identifier[datetime] . identifier[now] ()- identifier[self] . identifier[_previous]
identifier[self] . identifier[_previous] += identifier[elapsed]
keyword[return] identifier[elapsed] . identifier[total_seconds] () | def get(self):
"""Return the number of seconds elapsed since object creation,
or since last call to this function, whichever is more recent."""
elapsed = datetime.now() - self._previous
self._previous += elapsed
return elapsed.total_seconds() |
def duplicate(self, fullname, shortname, categoryid,
visible=True, **kwargs):
"""
Duplicates an existing course with options.
Note: Can be very slow running.
:param string fullname: The new course's full name
:param string shortname: The new course's short name
:param string categoryid: Category new course should be created under
:keyword bool visible: Defaults to True. The new course's visiblity
:keyword bool activities: (optional) Defaults to True. \
Include course activites
:keyword bool blocks: (optional) Defaults to True. \
Include course blocks
:keyword bool filters: (optional) Defaults to True. \
Include course filters
:keyword bool users: (optional) Defaults to False. Include users
:keyword bool role_assignments: (optional) Defaults to False. \
Include role assignments
:keyword bool comments: (optional) Defaults to False. \
Include user comments
:keyword bool usercompletion: (optional) Defaults to False. \
Include user course completion information
:keyword bool logs: (optional) Defaults to False. Include course logs
:keyword bool grade_histories: (optional) Defaults to False. \
Include histories
:returns: response object
Example Usage::
>>> import muddle
>>> muddle.course(10).duplicate('new-fullname', 'new-shortname', 20)
"""
# TODO
# Ideally categoryid should be optional here and
# should default to catid of course being duplicated.
allowed_options = ['activities', 'blocks',
'filters', 'users',
'role_assignments', 'comments',
'usercompletion', 'logs',
'grade_histories']
if valid_options(kwargs, allowed_options):
option_params = {}
for index, key in enumerate(kwargs):
option_params.update(
{'options[' + str(index) + '][name]': key,
'options[' + str(index) + '][value]':
int(kwargs.get(key))})
params = {'wsfunction': 'core_course_duplicate_course',
'courseid': self.course_id,
'fullname': fullname,
'shortname': shortname,
'categoryid': categoryid,
'visible': int(visible)}
params.update(option_params)
params.update(self.request_params)
return requests.post(self.api_url, params=params, verify=False) | def function[duplicate, parameter[self, fullname, shortname, categoryid, visible]]:
constant[
Duplicates an existing course with options.
Note: Can be very slow running.
:param string fullname: The new course's full name
:param string shortname: The new course's short name
:param string categoryid: Category new course should be created under
:keyword bool visible: Defaults to True. The new course's visiblity
:keyword bool activities: (optional) Defaults to True. Include course activites
:keyword bool blocks: (optional) Defaults to True. Include course blocks
:keyword bool filters: (optional) Defaults to True. Include course filters
:keyword bool users: (optional) Defaults to False. Include users
:keyword bool role_assignments: (optional) Defaults to False. Include role assignments
:keyword bool comments: (optional) Defaults to False. Include user comments
:keyword bool usercompletion: (optional) Defaults to False. Include user course completion information
:keyword bool logs: (optional) Defaults to False. Include course logs
:keyword bool grade_histories: (optional) Defaults to False. Include histories
:returns: response object
Example Usage::
>>> import muddle
>>> muddle.course(10).duplicate('new-fullname', 'new-shortname', 20)
]
variable[allowed_options] assign[=] list[[<ast.Constant object at 0x7da1b0911f90>, <ast.Constant object at 0x7da1b0913640>, <ast.Constant object at 0x7da1b09128f0>, <ast.Constant object at 0x7da1b09102e0>, <ast.Constant object at 0x7da1b0913820>, <ast.Constant object at 0x7da1b0913430>, <ast.Constant object at 0x7da1b0910bb0>, <ast.Constant object at 0x7da1b0910a60>, <ast.Constant object at 0x7da1b09100d0>]]
if call[name[valid_options], parameter[name[kwargs], name[allowed_options]]] begin[:]
variable[option_params] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b0910b80>, <ast.Name object at 0x7da1b0911a50>]]] in starred[call[name[enumerate], parameter[name[kwargs]]]] begin[:]
call[name[option_params].update, parameter[dictionary[[<ast.BinOp object at 0x7da1b0912500>, <ast.BinOp object at 0x7da1b0913610>], [<ast.Name object at 0x7da1b0912b60>, <ast.Call object at 0x7da1b09128c0>]]]]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b0baa290>, <ast.Constant object at 0x7da1b0ba8b80>, <ast.Constant object at 0x7da1b0ba8af0>, <ast.Constant object at 0x7da1b0ba84f0>, <ast.Constant object at 0x7da1b0baa260>, <ast.Constant object at 0x7da1b0bab790>], [<ast.Constant object at 0x7da1b0baac50>, <ast.Attribute object at 0x7da1b0bab160>, <ast.Name object at 0x7da1b0a72cb0>, <ast.Name object at 0x7da1b0a70220>, <ast.Name object at 0x7da1b0a72b30>, <ast.Call object at 0x7da1b0a70cd0>]]
call[name[params].update, parameter[name[option_params]]]
call[name[params].update, parameter[name[self].request_params]]
return[call[name[requests].post, parameter[name[self].api_url]]] | keyword[def] identifier[duplicate] ( identifier[self] , identifier[fullname] , identifier[shortname] , identifier[categoryid] ,
identifier[visible] = keyword[True] ,** identifier[kwargs] ):
literal[string]
identifier[allowed_options] =[ literal[string] , literal[string] ,
literal[string] , literal[string] ,
literal[string] , literal[string] ,
literal[string] , literal[string] ,
literal[string] ]
keyword[if] identifier[valid_options] ( identifier[kwargs] , identifier[allowed_options] ):
identifier[option_params] ={}
keyword[for] identifier[index] , identifier[key] keyword[in] identifier[enumerate] ( identifier[kwargs] ):
identifier[option_params] . identifier[update] (
{ literal[string] + identifier[str] ( identifier[index] )+ literal[string] : identifier[key] ,
literal[string] + identifier[str] ( identifier[index] )+ literal[string] :
identifier[int] ( identifier[kwargs] . identifier[get] ( identifier[key] ))})
identifier[params] ={ literal[string] : literal[string] ,
literal[string] : identifier[self] . identifier[course_id] ,
literal[string] : identifier[fullname] ,
literal[string] : identifier[shortname] ,
literal[string] : identifier[categoryid] ,
literal[string] : identifier[int] ( identifier[visible] )}
identifier[params] . identifier[update] ( identifier[option_params] )
identifier[params] . identifier[update] ( identifier[self] . identifier[request_params] )
keyword[return] identifier[requests] . identifier[post] ( identifier[self] . identifier[api_url] , identifier[params] = identifier[params] , identifier[verify] = keyword[False] ) | def duplicate(self, fullname, shortname, categoryid, visible=True, **kwargs):
"""
Duplicates an existing course with options.
Note: Can be very slow running.
:param string fullname: The new course's full name
:param string shortname: The new course's short name
:param string categoryid: Category new course should be created under
:keyword bool visible: Defaults to True. The new course's visiblity
:keyword bool activities: (optional) Defaults to True. Include course activites
:keyword bool blocks: (optional) Defaults to True. Include course blocks
:keyword bool filters: (optional) Defaults to True. Include course filters
:keyword bool users: (optional) Defaults to False. Include users
:keyword bool role_assignments: (optional) Defaults to False. Include role assignments
:keyword bool comments: (optional) Defaults to False. Include user comments
:keyword bool usercompletion: (optional) Defaults to False. Include user course completion information
:keyword bool logs: (optional) Defaults to False. Include course logs
:keyword bool grade_histories: (optional) Defaults to False. Include histories
:returns: response object
Example Usage::
>>> import muddle
>>> muddle.course(10).duplicate('new-fullname', 'new-shortname', 20)
"""
# TODO
# Ideally categoryid should be optional here and
# should default to catid of course being duplicated.
allowed_options = ['activities', 'blocks', 'filters', 'users', 'role_assignments', 'comments', 'usercompletion', 'logs', 'grade_histories']
if valid_options(kwargs, allowed_options):
option_params = {}
for (index, key) in enumerate(kwargs):
option_params.update({'options[' + str(index) + '][name]': key, 'options[' + str(index) + '][value]': int(kwargs.get(key))}) # depends on [control=['for'], data=[]]
params = {'wsfunction': 'core_course_duplicate_course', 'courseid': self.course_id, 'fullname': fullname, 'shortname': shortname, 'categoryid': categoryid, 'visible': int(visible)}
params.update(option_params)
params.update(self.request_params)
return requests.post(self.api_url, params=params, verify=False) # depends on [control=['if'], data=[]] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.