code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def _load_config(robot_path):
"""
Used internally by pyfrc, don't call this directly.
Loads a json file from sim/config.json and makes the information available
to simulation/testing code.
"""
from . import config
config_obj = config.config_obj
sim_path = join(robot_path, "sim")
config_file = join(sim_path, "config.json")
if exists(config_file):
with open(config_file, "r") as fp:
config_obj.update(json.load(fp))
else:
logger.warning("sim/config.json not found, using default simulation parameters")
config_obj["simpath"] = sim_path
# setup defaults
config_obj.setdefault("pyfrc", {})
config_obj["pyfrc"].setdefault("robot", {})
config_obj["pyfrc"]["robot"].setdefault("w", 2)
# switched from 'h' to 'l' in 2018, but keeping it there for legacy reasons
l = config_obj["pyfrc"]["robot"].get("h", 3)
config_obj["pyfrc"]["robot"].setdefault("l", l)
config_obj["pyfrc"]["robot"].setdefault("starting_x", 0)
config_obj["pyfrc"]["robot"].setdefault("starting_y", 0)
config_obj["pyfrc"]["robot"].setdefault("starting_angle", 0)
# list of dictionaries of x=, y=, angle=, name=
config_obj["pyfrc"]["robot"].setdefault("start_positions", [])
field = config_obj["pyfrc"].setdefault("field", {})
force_defaults = False
# The rules here are complex because of backwards compat
# -> if you specify a particular season, then it will override w/h/px
# -> if you specify objects then you will get your own stuff
# -> if you don't specify anything then it override w/h/px
# -> if you add your own, it will warn you unless you specify an image
# backwards compat
if "season" in config_obj["pyfrc"]["field"]:
season = config_obj["pyfrc"]["field"]["season"]
defaults = _field_defaults.get(str(season), _field_defaults["default"])
force_defaults = True
elif "objects" in config_obj["pyfrc"]["field"]:
defaults = _field_defaults["default"]
else:
if "image" not in field:
force_defaults = True
defaults = _field_defaults[_default_year]
if force_defaults:
if "w" in field or "h" in field or "px_per_ft" in field:
logger.warning("Ignoring field w/h/px_per_ft settings")
field["w"] = defaults["w"]
field["h"] = defaults["h"]
field["px_per_ft"] = defaults["px_per_ft"]
config_obj["pyfrc"]["field"].setdefault("objects", [])
config_obj["pyfrc"]["field"].setdefault("w", defaults["w"])
config_obj["pyfrc"]["field"].setdefault("h", defaults["h"])
config_obj["pyfrc"]["field"].setdefault("px_per_ft", defaults["px_per_ft"])
img = config_obj["pyfrc"]["field"].setdefault("image", defaults["image"])
config_obj["pyfrc"].setdefault(
"game_specific_messages", defaults.get("game_specific_messages", [])
)
config_obj["pyfrc"]["field"].setdefault(
"auto_joysticks", defaults.get("auto_joysticks", False)
)
assert isinstance(config_obj["pyfrc"]["game_specific_messages"], (list, type(None)))
if img and not isabs(config_obj["pyfrc"]["field"]["image"]):
config_obj["pyfrc"]["field"]["image"] = abspath(join(sim_path, img))
config_obj["pyfrc"].setdefault("analog", {})
config_obj["pyfrc"].setdefault("CAN", {})
config_obj["pyfrc"].setdefault("dio", {})
config_obj["pyfrc"].setdefault("pwm", {})
config_obj["pyfrc"].setdefault("relay", {})
config_obj["pyfrc"].setdefault("solenoid", {})
config_obj["pyfrc"].setdefault("joysticks", {})
for i in range(6):
config_obj["pyfrc"]["joysticks"].setdefault(str(i), {})
config_obj["pyfrc"]["joysticks"][str(i)].setdefault("axes", {})
config_obj["pyfrc"]["joysticks"][str(i)].setdefault("buttons", {})
config_obj["pyfrc"]["joysticks"][str(i)]["buttons"].setdefault("1", "Trigger")
config_obj["pyfrc"]["joysticks"][str(i)]["buttons"].setdefault("2", "Top") | def function[_load_config, parameter[robot_path]]:
constant[
Used internally by pyfrc, don't call this directly.
Loads a json file from sim/config.json and makes the information available
to simulation/testing code.
]
from relative_module[None] import module[config]
variable[config_obj] assign[=] name[config].config_obj
variable[sim_path] assign[=] call[name[join], parameter[name[robot_path], constant[sim]]]
variable[config_file] assign[=] call[name[join], parameter[name[sim_path], constant[config.json]]]
if call[name[exists], parameter[name[config_file]]] begin[:]
with call[name[open], parameter[name[config_file], constant[r]]] begin[:]
call[name[config_obj].update, parameter[call[name[json].load, parameter[name[fp]]]]]
call[name[config_obj]][constant[simpath]] assign[=] name[sim_path]
call[name[config_obj].setdefault, parameter[constant[pyfrc], dictionary[[], []]]]
call[call[name[config_obj]][constant[pyfrc]].setdefault, parameter[constant[robot], dictionary[[], []]]]
call[call[call[name[config_obj]][constant[pyfrc]]][constant[robot]].setdefault, parameter[constant[w], constant[2]]]
variable[l] assign[=] call[call[call[name[config_obj]][constant[pyfrc]]][constant[robot]].get, parameter[constant[h], constant[3]]]
call[call[call[name[config_obj]][constant[pyfrc]]][constant[robot]].setdefault, parameter[constant[l], name[l]]]
call[call[call[name[config_obj]][constant[pyfrc]]][constant[robot]].setdefault, parameter[constant[starting_x], constant[0]]]
call[call[call[name[config_obj]][constant[pyfrc]]][constant[robot]].setdefault, parameter[constant[starting_y], constant[0]]]
call[call[call[name[config_obj]][constant[pyfrc]]][constant[robot]].setdefault, parameter[constant[starting_angle], constant[0]]]
call[call[call[name[config_obj]][constant[pyfrc]]][constant[robot]].setdefault, parameter[constant[start_positions], list[[]]]]
variable[field] assign[=] call[call[name[config_obj]][constant[pyfrc]].setdefault, parameter[constant[field], dictionary[[], []]]]
variable[force_defaults] assign[=] constant[False]
if compare[constant[season] in call[call[name[config_obj]][constant[pyfrc]]][constant[field]]] begin[:]
variable[season] assign[=] call[call[call[name[config_obj]][constant[pyfrc]]][constant[field]]][constant[season]]
variable[defaults] assign[=] call[name[_field_defaults].get, parameter[call[name[str], parameter[name[season]]], call[name[_field_defaults]][constant[default]]]]
variable[force_defaults] assign[=] constant[True]
if name[force_defaults] begin[:]
if <ast.BoolOp object at 0x7da1b19b64a0> begin[:]
call[name[logger].warning, parameter[constant[Ignoring field w/h/px_per_ft settings]]]
call[name[field]][constant[w]] assign[=] call[name[defaults]][constant[w]]
call[name[field]][constant[h]] assign[=] call[name[defaults]][constant[h]]
call[name[field]][constant[px_per_ft]] assign[=] call[name[defaults]][constant[px_per_ft]]
call[call[call[name[config_obj]][constant[pyfrc]]][constant[field]].setdefault, parameter[constant[objects], list[[]]]]
call[call[call[name[config_obj]][constant[pyfrc]]][constant[field]].setdefault, parameter[constant[w], call[name[defaults]][constant[w]]]]
call[call[call[name[config_obj]][constant[pyfrc]]][constant[field]].setdefault, parameter[constant[h], call[name[defaults]][constant[h]]]]
call[call[call[name[config_obj]][constant[pyfrc]]][constant[field]].setdefault, parameter[constant[px_per_ft], call[name[defaults]][constant[px_per_ft]]]]
variable[img] assign[=] call[call[call[name[config_obj]][constant[pyfrc]]][constant[field]].setdefault, parameter[constant[image], call[name[defaults]][constant[image]]]]
call[call[name[config_obj]][constant[pyfrc]].setdefault, parameter[constant[game_specific_messages], call[name[defaults].get, parameter[constant[game_specific_messages], list[[]]]]]]
call[call[call[name[config_obj]][constant[pyfrc]]][constant[field]].setdefault, parameter[constant[auto_joysticks], call[name[defaults].get, parameter[constant[auto_joysticks], constant[False]]]]]
assert[call[name[isinstance], parameter[call[call[name[config_obj]][constant[pyfrc]]][constant[game_specific_messages]], tuple[[<ast.Name object at 0x7da1b18005e0>, <ast.Call object at 0x7da1b1801120>]]]]]
if <ast.BoolOp object at 0x7da1b1800070> begin[:]
call[call[call[name[config_obj]][constant[pyfrc]]][constant[field]]][constant[image]] assign[=] call[name[abspath], parameter[call[name[join], parameter[name[sim_path], name[img]]]]]
call[call[name[config_obj]][constant[pyfrc]].setdefault, parameter[constant[analog], dictionary[[], []]]]
call[call[name[config_obj]][constant[pyfrc]].setdefault, parameter[constant[CAN], dictionary[[], []]]]
call[call[name[config_obj]][constant[pyfrc]].setdefault, parameter[constant[dio], dictionary[[], []]]]
call[call[name[config_obj]][constant[pyfrc]].setdefault, parameter[constant[pwm], dictionary[[], []]]]
call[call[name[config_obj]][constant[pyfrc]].setdefault, parameter[constant[relay], dictionary[[], []]]]
call[call[name[config_obj]][constant[pyfrc]].setdefault, parameter[constant[solenoid], dictionary[[], []]]]
call[call[name[config_obj]][constant[pyfrc]].setdefault, parameter[constant[joysticks], dictionary[[], []]]]
for taget[name[i]] in starred[call[name[range], parameter[constant[6]]]] begin[:]
call[call[call[name[config_obj]][constant[pyfrc]]][constant[joysticks]].setdefault, parameter[call[name[str], parameter[name[i]]], dictionary[[], []]]]
call[call[call[call[name[config_obj]][constant[pyfrc]]][constant[joysticks]]][call[name[str], parameter[name[i]]]].setdefault, parameter[constant[axes], dictionary[[], []]]]
call[call[call[call[name[config_obj]][constant[pyfrc]]][constant[joysticks]]][call[name[str], parameter[name[i]]]].setdefault, parameter[constant[buttons], dictionary[[], []]]]
call[call[call[call[call[name[config_obj]][constant[pyfrc]]][constant[joysticks]]][call[name[str], parameter[name[i]]]]][constant[buttons]].setdefault, parameter[constant[1], constant[Trigger]]]
call[call[call[call[call[name[config_obj]][constant[pyfrc]]][constant[joysticks]]][call[name[str], parameter[name[i]]]]][constant[buttons]].setdefault, parameter[constant[2], constant[Top]]] | keyword[def] identifier[_load_config] ( identifier[robot_path] ):
literal[string]
keyword[from] . keyword[import] identifier[config]
identifier[config_obj] = identifier[config] . identifier[config_obj]
identifier[sim_path] = identifier[join] ( identifier[robot_path] , literal[string] )
identifier[config_file] = identifier[join] ( identifier[sim_path] , literal[string] )
keyword[if] identifier[exists] ( identifier[config_file] ):
keyword[with] identifier[open] ( identifier[config_file] , literal[string] ) keyword[as] identifier[fp] :
identifier[config_obj] . identifier[update] ( identifier[json] . identifier[load] ( identifier[fp] ))
keyword[else] :
identifier[logger] . identifier[warning] ( literal[string] )
identifier[config_obj] [ literal[string] ]= identifier[sim_path]
identifier[config_obj] . identifier[setdefault] ( literal[string] ,{})
identifier[config_obj] [ literal[string] ]. identifier[setdefault] ( literal[string] ,{})
identifier[config_obj] [ literal[string] ][ literal[string] ]. identifier[setdefault] ( literal[string] , literal[int] )
identifier[l] = identifier[config_obj] [ literal[string] ][ literal[string] ]. identifier[get] ( literal[string] , literal[int] )
identifier[config_obj] [ literal[string] ][ literal[string] ]. identifier[setdefault] ( literal[string] , identifier[l] )
identifier[config_obj] [ literal[string] ][ literal[string] ]. identifier[setdefault] ( literal[string] , literal[int] )
identifier[config_obj] [ literal[string] ][ literal[string] ]. identifier[setdefault] ( literal[string] , literal[int] )
identifier[config_obj] [ literal[string] ][ literal[string] ]. identifier[setdefault] ( literal[string] , literal[int] )
identifier[config_obj] [ literal[string] ][ literal[string] ]. identifier[setdefault] ( literal[string] ,[])
identifier[field] = identifier[config_obj] [ literal[string] ]. identifier[setdefault] ( literal[string] ,{})
identifier[force_defaults] = keyword[False]
keyword[if] literal[string] keyword[in] identifier[config_obj] [ literal[string] ][ literal[string] ]:
identifier[season] = identifier[config_obj] [ literal[string] ][ literal[string] ][ literal[string] ]
identifier[defaults] = identifier[_field_defaults] . identifier[get] ( identifier[str] ( identifier[season] ), identifier[_field_defaults] [ literal[string] ])
identifier[force_defaults] = keyword[True]
keyword[elif] literal[string] keyword[in] identifier[config_obj] [ literal[string] ][ literal[string] ]:
identifier[defaults] = identifier[_field_defaults] [ literal[string] ]
keyword[else] :
keyword[if] literal[string] keyword[not] keyword[in] identifier[field] :
identifier[force_defaults] = keyword[True]
identifier[defaults] = identifier[_field_defaults] [ identifier[_default_year] ]
keyword[if] identifier[force_defaults] :
keyword[if] literal[string] keyword[in] identifier[field] keyword[or] literal[string] keyword[in] identifier[field] keyword[or] literal[string] keyword[in] identifier[field] :
identifier[logger] . identifier[warning] ( literal[string] )
identifier[field] [ literal[string] ]= identifier[defaults] [ literal[string] ]
identifier[field] [ literal[string] ]= identifier[defaults] [ literal[string] ]
identifier[field] [ literal[string] ]= identifier[defaults] [ literal[string] ]
identifier[config_obj] [ literal[string] ][ literal[string] ]. identifier[setdefault] ( literal[string] ,[])
identifier[config_obj] [ literal[string] ][ literal[string] ]. identifier[setdefault] ( literal[string] , identifier[defaults] [ literal[string] ])
identifier[config_obj] [ literal[string] ][ literal[string] ]. identifier[setdefault] ( literal[string] , identifier[defaults] [ literal[string] ])
identifier[config_obj] [ literal[string] ][ literal[string] ]. identifier[setdefault] ( literal[string] , identifier[defaults] [ literal[string] ])
identifier[img] = identifier[config_obj] [ literal[string] ][ literal[string] ]. identifier[setdefault] ( literal[string] , identifier[defaults] [ literal[string] ])
identifier[config_obj] [ literal[string] ]. identifier[setdefault] (
literal[string] , identifier[defaults] . identifier[get] ( literal[string] ,[])
)
identifier[config_obj] [ literal[string] ][ literal[string] ]. identifier[setdefault] (
literal[string] , identifier[defaults] . identifier[get] ( literal[string] , keyword[False] )
)
keyword[assert] identifier[isinstance] ( identifier[config_obj] [ literal[string] ][ literal[string] ],( identifier[list] , identifier[type] ( keyword[None] )))
keyword[if] identifier[img] keyword[and] keyword[not] identifier[isabs] ( identifier[config_obj] [ literal[string] ][ literal[string] ][ literal[string] ]):
identifier[config_obj] [ literal[string] ][ literal[string] ][ literal[string] ]= identifier[abspath] ( identifier[join] ( identifier[sim_path] , identifier[img] ))
identifier[config_obj] [ literal[string] ]. identifier[setdefault] ( literal[string] ,{})
identifier[config_obj] [ literal[string] ]. identifier[setdefault] ( literal[string] ,{})
identifier[config_obj] [ literal[string] ]. identifier[setdefault] ( literal[string] ,{})
identifier[config_obj] [ literal[string] ]. identifier[setdefault] ( literal[string] ,{})
identifier[config_obj] [ literal[string] ]. identifier[setdefault] ( literal[string] ,{})
identifier[config_obj] [ literal[string] ]. identifier[setdefault] ( literal[string] ,{})
identifier[config_obj] [ literal[string] ]. identifier[setdefault] ( literal[string] ,{})
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] ):
identifier[config_obj] [ literal[string] ][ literal[string] ]. identifier[setdefault] ( identifier[str] ( identifier[i] ),{})
identifier[config_obj] [ literal[string] ][ literal[string] ][ identifier[str] ( identifier[i] )]. identifier[setdefault] ( literal[string] ,{})
identifier[config_obj] [ literal[string] ][ literal[string] ][ identifier[str] ( identifier[i] )]. identifier[setdefault] ( literal[string] ,{})
identifier[config_obj] [ literal[string] ][ literal[string] ][ identifier[str] ( identifier[i] )][ literal[string] ]. identifier[setdefault] ( literal[string] , literal[string] )
identifier[config_obj] [ literal[string] ][ literal[string] ][ identifier[str] ( identifier[i] )][ literal[string] ]. identifier[setdefault] ( literal[string] , literal[string] ) | def _load_config(robot_path):
"""
Used internally by pyfrc, don't call this directly.
Loads a json file from sim/config.json and makes the information available
to simulation/testing code.
"""
from . import config
config_obj = config.config_obj
sim_path = join(robot_path, 'sim')
config_file = join(sim_path, 'config.json')
if exists(config_file):
with open(config_file, 'r') as fp:
config_obj.update(json.load(fp)) # depends on [control=['with'], data=['fp']] # depends on [control=['if'], data=[]]
else:
logger.warning('sim/config.json not found, using default simulation parameters')
config_obj['simpath'] = sim_path
# setup defaults
config_obj.setdefault('pyfrc', {})
config_obj['pyfrc'].setdefault('robot', {})
config_obj['pyfrc']['robot'].setdefault('w', 2)
# switched from 'h' to 'l' in 2018, but keeping it there for legacy reasons
l = config_obj['pyfrc']['robot'].get('h', 3)
config_obj['pyfrc']['robot'].setdefault('l', l)
config_obj['pyfrc']['robot'].setdefault('starting_x', 0)
config_obj['pyfrc']['robot'].setdefault('starting_y', 0)
config_obj['pyfrc']['robot'].setdefault('starting_angle', 0)
# list of dictionaries of x=, y=, angle=, name=
config_obj['pyfrc']['robot'].setdefault('start_positions', [])
field = config_obj['pyfrc'].setdefault('field', {})
force_defaults = False
# The rules here are complex because of backwards compat
# -> if you specify a particular season, then it will override w/h/px
# -> if you specify objects then you will get your own stuff
# -> if you don't specify anything then it override w/h/px
# -> if you add your own, it will warn you unless you specify an image
# backwards compat
if 'season' in config_obj['pyfrc']['field']:
season = config_obj['pyfrc']['field']['season']
defaults = _field_defaults.get(str(season), _field_defaults['default'])
force_defaults = True # depends on [control=['if'], data=[]]
elif 'objects' in config_obj['pyfrc']['field']:
defaults = _field_defaults['default'] # depends on [control=['if'], data=[]]
else:
if 'image' not in field:
force_defaults = True # depends on [control=['if'], data=[]]
defaults = _field_defaults[_default_year]
if force_defaults:
if 'w' in field or 'h' in field or 'px_per_ft' in field:
logger.warning('Ignoring field w/h/px_per_ft settings') # depends on [control=['if'], data=[]]
field['w'] = defaults['w']
field['h'] = defaults['h']
field['px_per_ft'] = defaults['px_per_ft'] # depends on [control=['if'], data=[]]
config_obj['pyfrc']['field'].setdefault('objects', [])
config_obj['pyfrc']['field'].setdefault('w', defaults['w'])
config_obj['pyfrc']['field'].setdefault('h', defaults['h'])
config_obj['pyfrc']['field'].setdefault('px_per_ft', defaults['px_per_ft'])
img = config_obj['pyfrc']['field'].setdefault('image', defaults['image'])
config_obj['pyfrc'].setdefault('game_specific_messages', defaults.get('game_specific_messages', []))
config_obj['pyfrc']['field'].setdefault('auto_joysticks', defaults.get('auto_joysticks', False))
assert isinstance(config_obj['pyfrc']['game_specific_messages'], (list, type(None)))
if img and (not isabs(config_obj['pyfrc']['field']['image'])):
config_obj['pyfrc']['field']['image'] = abspath(join(sim_path, img)) # depends on [control=['if'], data=[]]
config_obj['pyfrc'].setdefault('analog', {})
config_obj['pyfrc'].setdefault('CAN', {})
config_obj['pyfrc'].setdefault('dio', {})
config_obj['pyfrc'].setdefault('pwm', {})
config_obj['pyfrc'].setdefault('relay', {})
config_obj['pyfrc'].setdefault('solenoid', {})
config_obj['pyfrc'].setdefault('joysticks', {})
for i in range(6):
config_obj['pyfrc']['joysticks'].setdefault(str(i), {})
config_obj['pyfrc']['joysticks'][str(i)].setdefault('axes', {})
config_obj['pyfrc']['joysticks'][str(i)].setdefault('buttons', {})
config_obj['pyfrc']['joysticks'][str(i)]['buttons'].setdefault('1', 'Trigger')
config_obj['pyfrc']['joysticks'][str(i)]['buttons'].setdefault('2', 'Top') # depends on [control=['for'], data=['i']] |
def is_date(property_name, *, format=None, present_optional=False, message=None):
"""Returns a Validation that checks a value as a date."""
# NOTE: Not currently using format param
def check(val):
"""Checks that a value can be parsed as a date."""
if val is None:
return present_optional
else:
is_date, _ = util.try_parse_date(val)
return is_date
return Validation(check, property_name, message) | def function[is_date, parameter[property_name]]:
constant[Returns a Validation that checks a value as a date.]
def function[check, parameter[val]]:
constant[Checks that a value can be parsed as a date.]
if compare[name[val] is constant[None]] begin[:]
return[name[present_optional]]
return[call[name[Validation], parameter[name[check], name[property_name], name[message]]]] | keyword[def] identifier[is_date] ( identifier[property_name] ,*, identifier[format] = keyword[None] , identifier[present_optional] = keyword[False] , identifier[message] = keyword[None] ):
literal[string]
keyword[def] identifier[check] ( identifier[val] ):
literal[string]
keyword[if] identifier[val] keyword[is] keyword[None] :
keyword[return] identifier[present_optional]
keyword[else] :
identifier[is_date] , identifier[_] = identifier[util] . identifier[try_parse_date] ( identifier[val] )
keyword[return] identifier[is_date]
keyword[return] identifier[Validation] ( identifier[check] , identifier[property_name] , identifier[message] ) | def is_date(property_name, *, format=None, present_optional=False, message=None):
"""Returns a Validation that checks a value as a date."""
# NOTE: Not currently using format param
def check(val):
"""Checks that a value can be parsed as a date."""
if val is None:
return present_optional # depends on [control=['if'], data=[]]
else:
(is_date, _) = util.try_parse_date(val)
return is_date
return Validation(check, property_name, message) |
def get_variant(variant_handle, context=None):
"""Create a variant given its handle (or serialized dict equivalent)
Args:
variant_handle (`ResourceHandle` or dict): Resource handle, or
equivalent serialized dict representation from
ResourceHandle.to_dict
context (`ResolvedContext`): The context this variant is associated
with, if any.
Returns:
`Variant`.
"""
if isinstance(variant_handle, dict):
variant_handle = ResourceHandle.from_dict(variant_handle)
variant_resource = package_repository_manager.get_resource_from_handle(variant_handle)
variant = Variant(variant_resource, context=context)
return variant | def function[get_variant, parameter[variant_handle, context]]:
constant[Create a variant given its handle (or serialized dict equivalent)
Args:
variant_handle (`ResourceHandle` or dict): Resource handle, or
equivalent serialized dict representation from
ResourceHandle.to_dict
context (`ResolvedContext`): The context this variant is associated
with, if any.
Returns:
`Variant`.
]
if call[name[isinstance], parameter[name[variant_handle], name[dict]]] begin[:]
variable[variant_handle] assign[=] call[name[ResourceHandle].from_dict, parameter[name[variant_handle]]]
variable[variant_resource] assign[=] call[name[package_repository_manager].get_resource_from_handle, parameter[name[variant_handle]]]
variable[variant] assign[=] call[name[Variant], parameter[name[variant_resource]]]
return[name[variant]] | keyword[def] identifier[get_variant] ( identifier[variant_handle] , identifier[context] = keyword[None] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[variant_handle] , identifier[dict] ):
identifier[variant_handle] = identifier[ResourceHandle] . identifier[from_dict] ( identifier[variant_handle] )
identifier[variant_resource] = identifier[package_repository_manager] . identifier[get_resource_from_handle] ( identifier[variant_handle] )
identifier[variant] = identifier[Variant] ( identifier[variant_resource] , identifier[context] = identifier[context] )
keyword[return] identifier[variant] | def get_variant(variant_handle, context=None):
"""Create a variant given its handle (or serialized dict equivalent)
Args:
variant_handle (`ResourceHandle` or dict): Resource handle, or
equivalent serialized dict representation from
ResourceHandle.to_dict
context (`ResolvedContext`): The context this variant is associated
with, if any.
Returns:
`Variant`.
"""
if isinstance(variant_handle, dict):
variant_handle = ResourceHandle.from_dict(variant_handle) # depends on [control=['if'], data=[]]
variant_resource = package_repository_manager.get_resource_from_handle(variant_handle)
variant = Variant(variant_resource, context=context)
return variant |
def ident():
"""
This routine returns the 3x3 identity matrix.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ident_c.html
:return: The 3x3 identity matrix.
:rtype: 3x3-Element Array of floats
"""
matrix = stypes.emptyDoubleMatrix()
libspice.ident_c(matrix)
return stypes.cMatrixToNumpy(matrix) | def function[ident, parameter[]]:
constant[
This routine returns the 3x3 identity matrix.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ident_c.html
:return: The 3x3 identity matrix.
:rtype: 3x3-Element Array of floats
]
variable[matrix] assign[=] call[name[stypes].emptyDoubleMatrix, parameter[]]
call[name[libspice].ident_c, parameter[name[matrix]]]
return[call[name[stypes].cMatrixToNumpy, parameter[name[matrix]]]] | keyword[def] identifier[ident] ():
literal[string]
identifier[matrix] = identifier[stypes] . identifier[emptyDoubleMatrix] ()
identifier[libspice] . identifier[ident_c] ( identifier[matrix] )
keyword[return] identifier[stypes] . identifier[cMatrixToNumpy] ( identifier[matrix] ) | def ident():
"""
This routine returns the 3x3 identity matrix.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ident_c.html
:return: The 3x3 identity matrix.
:rtype: 3x3-Element Array of floats
"""
matrix = stypes.emptyDoubleMatrix()
libspice.ident_c(matrix)
return stypes.cMatrixToNumpy(matrix) |
def unhumanize_bandwidth(bitsstr):
'''
Take a string representing a link capacity, e.g., 10 Mb/s, and
return an integer representing the number of bits/sec.
Recognizes:
- 'bits/sec' or 'b/s' are treated as plain bits per second
- 'Kb' or 'kb' as thousand bits/sec
- 'Mb' or 'mb' as million bits/sec
- 'Gb' or 'gb' as billion bits/sec
- 'Tb' or 'tb' as trillion bits/sec
- if second character is 'B', quantity is interpreted as bytes/sec
- any subsequent characters after the first two are ignored, so
Kb/s Kb/sec Kbps are interpreted identically.
Returns None if the string doesn't contain anything parseable.
'''
if isinstance(bitsstr, int):
return bitsstr
mobj = re.match('^\s*([\d\.]+)\s*(.*)\s*$', bitsstr)
if not mobj:
return None
value, units = mobj.groups()
value = float(value)
multipliers = { 'b':1, 'k':1e3, 'm':1e6, 'g':1e9, 't':1e12 }
if not units:
units = 'bits'
mult = multipliers.get(units[0].lower(), 0)
bits = 1
if len(units) > 1:
if units[1] == 'B': bits = 8
# print (bitsstr, value, mult, bits)
return int(value * mult * bits) | def function[unhumanize_bandwidth, parameter[bitsstr]]:
constant[
Take a string representing a link capacity, e.g., 10 Mb/s, and
return an integer representing the number of bits/sec.
Recognizes:
- 'bits/sec' or 'b/s' are treated as plain bits per second
- 'Kb' or 'kb' as thousand bits/sec
- 'Mb' or 'mb' as million bits/sec
- 'Gb' or 'gb' as billion bits/sec
- 'Tb' or 'tb' as trillion bits/sec
- if second character is 'B', quantity is interpreted as bytes/sec
- any subsequent characters after the first two are ignored, so
Kb/s Kb/sec Kbps are interpreted identically.
Returns None if the string doesn't contain anything parseable.
]
if call[name[isinstance], parameter[name[bitsstr], name[int]]] begin[:]
return[name[bitsstr]]
variable[mobj] assign[=] call[name[re].match, parameter[constant[^\s*([\d\.]+)\s*(.*)\s*$], name[bitsstr]]]
if <ast.UnaryOp object at 0x7da2044c1e10> begin[:]
return[constant[None]]
<ast.Tuple object at 0x7da2044c0100> assign[=] call[name[mobj].groups, parameter[]]
variable[value] assign[=] call[name[float], parameter[name[value]]]
variable[multipliers] assign[=] dictionary[[<ast.Constant object at 0x7da2044c07c0>, <ast.Constant object at 0x7da2044c23e0>, <ast.Constant object at 0x7da2044c3670>, <ast.Constant object at 0x7da2044c2ec0>, <ast.Constant object at 0x7da20c6c6a40>], [<ast.Constant object at 0x7da20c6c7ca0>, <ast.Constant object at 0x7da20c6c4490>, <ast.Constant object at 0x7da20c6c4c40>, <ast.Constant object at 0x7da20c6c7c40>, <ast.Constant object at 0x7da20c6c5d50>]]
if <ast.UnaryOp object at 0x7da20c6c47f0> begin[:]
variable[units] assign[=] constant[bits]
variable[mult] assign[=] call[name[multipliers].get, parameter[call[call[name[units]][constant[0]].lower, parameter[]], constant[0]]]
variable[bits] assign[=] constant[1]
if compare[call[name[len], parameter[name[units]]] greater[>] constant[1]] begin[:]
if compare[call[name[units]][constant[1]] equal[==] constant[B]] begin[:]
variable[bits] assign[=] constant[8]
return[call[name[int], parameter[binary_operation[binary_operation[name[value] * name[mult]] * name[bits]]]]] | keyword[def] identifier[unhumanize_bandwidth] ( identifier[bitsstr] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[bitsstr] , identifier[int] ):
keyword[return] identifier[bitsstr]
identifier[mobj] = identifier[re] . identifier[match] ( literal[string] , identifier[bitsstr] )
keyword[if] keyword[not] identifier[mobj] :
keyword[return] keyword[None]
identifier[value] , identifier[units] = identifier[mobj] . identifier[groups] ()
identifier[value] = identifier[float] ( identifier[value] )
identifier[multipliers] ={ literal[string] : literal[int] , literal[string] : literal[int] , literal[string] : literal[int] , literal[string] : literal[int] , literal[string] : literal[int] }
keyword[if] keyword[not] identifier[units] :
identifier[units] = literal[string]
identifier[mult] = identifier[multipliers] . identifier[get] ( identifier[units] [ literal[int] ]. identifier[lower] (), literal[int] )
identifier[bits] = literal[int]
keyword[if] identifier[len] ( identifier[units] )> literal[int] :
keyword[if] identifier[units] [ literal[int] ]== literal[string] : identifier[bits] = literal[int]
keyword[return] identifier[int] ( identifier[value] * identifier[mult] * identifier[bits] ) | def unhumanize_bandwidth(bitsstr):
"""
Take a string representing a link capacity, e.g., 10 Mb/s, and
return an integer representing the number of bits/sec.
Recognizes:
- 'bits/sec' or 'b/s' are treated as plain bits per second
- 'Kb' or 'kb' as thousand bits/sec
- 'Mb' or 'mb' as million bits/sec
- 'Gb' or 'gb' as billion bits/sec
- 'Tb' or 'tb' as trillion bits/sec
- if second character is 'B', quantity is interpreted as bytes/sec
- any subsequent characters after the first two are ignored, so
Kb/s Kb/sec Kbps are interpreted identically.
Returns None if the string doesn't contain anything parseable.
"""
if isinstance(bitsstr, int):
return bitsstr # depends on [control=['if'], data=[]]
mobj = re.match('^\\s*([\\d\\.]+)\\s*(.*)\\s*$', bitsstr)
if not mobj:
return None # depends on [control=['if'], data=[]]
(value, units) = mobj.groups()
value = float(value)
multipliers = {'b': 1, 'k': 1000.0, 'm': 1000000.0, 'g': 1000000000.0, 't': 1000000000000.0}
if not units:
units = 'bits' # depends on [control=['if'], data=[]]
mult = multipliers.get(units[0].lower(), 0)
bits = 1
if len(units) > 1:
if units[1] == 'B':
bits = 8 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# print (bitsstr, value, mult, bits)
return int(value * mult * bits) |
def make_ring_dicts(**kwargs):
"""Build and return the information about the Galprop rings
"""
library_yamlfile = kwargs.get('library', 'models/library.yaml')
gmm = kwargs.get('GalpropMapManager', GalpropMapManager(**kwargs))
if library_yamlfile is None or library_yamlfile == 'None':
return gmm
diffuse_comps = DiffuseModelManager.read_diffuse_component_yaml(library_yamlfile)
for diffuse_value in diffuse_comps.values():
if diffuse_value is None:
continue
if diffuse_value['model_type'] != 'galprop_rings':
continue
versions = diffuse_value['versions']
for version in versions:
gmm.make_ring_dict(version)
return gmm | def function[make_ring_dicts, parameter[]]:
constant[Build and return the information about the Galprop rings
]
variable[library_yamlfile] assign[=] call[name[kwargs].get, parameter[constant[library], constant[models/library.yaml]]]
variable[gmm] assign[=] call[name[kwargs].get, parameter[constant[GalpropMapManager], call[name[GalpropMapManager], parameter[]]]]
if <ast.BoolOp object at 0x7da207f9b820> begin[:]
return[name[gmm]]
variable[diffuse_comps] assign[=] call[name[DiffuseModelManager].read_diffuse_component_yaml, parameter[name[library_yamlfile]]]
for taget[name[diffuse_value]] in starred[call[name[diffuse_comps].values, parameter[]]] begin[:]
if compare[name[diffuse_value] is constant[None]] begin[:]
continue
if compare[call[name[diffuse_value]][constant[model_type]] not_equal[!=] constant[galprop_rings]] begin[:]
continue
variable[versions] assign[=] call[name[diffuse_value]][constant[versions]]
for taget[name[version]] in starred[name[versions]] begin[:]
call[name[gmm].make_ring_dict, parameter[name[version]]]
return[name[gmm]] | keyword[def] identifier[make_ring_dicts] (** identifier[kwargs] ):
literal[string]
identifier[library_yamlfile] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] )
identifier[gmm] = identifier[kwargs] . identifier[get] ( literal[string] , identifier[GalpropMapManager] (** identifier[kwargs] ))
keyword[if] identifier[library_yamlfile] keyword[is] keyword[None] keyword[or] identifier[library_yamlfile] == literal[string] :
keyword[return] identifier[gmm]
identifier[diffuse_comps] = identifier[DiffuseModelManager] . identifier[read_diffuse_component_yaml] ( identifier[library_yamlfile] )
keyword[for] identifier[diffuse_value] keyword[in] identifier[diffuse_comps] . identifier[values] ():
keyword[if] identifier[diffuse_value] keyword[is] keyword[None] :
keyword[continue]
keyword[if] identifier[diffuse_value] [ literal[string] ]!= literal[string] :
keyword[continue]
identifier[versions] = identifier[diffuse_value] [ literal[string] ]
keyword[for] identifier[version] keyword[in] identifier[versions] :
identifier[gmm] . identifier[make_ring_dict] ( identifier[version] )
keyword[return] identifier[gmm] | def make_ring_dicts(**kwargs):
"""Build and return the information about the Galprop rings
"""
library_yamlfile = kwargs.get('library', 'models/library.yaml')
gmm = kwargs.get('GalpropMapManager', GalpropMapManager(**kwargs))
if library_yamlfile is None or library_yamlfile == 'None':
return gmm # depends on [control=['if'], data=[]]
diffuse_comps = DiffuseModelManager.read_diffuse_component_yaml(library_yamlfile)
for diffuse_value in diffuse_comps.values():
if diffuse_value is None:
continue # depends on [control=['if'], data=[]]
if diffuse_value['model_type'] != 'galprop_rings':
continue # depends on [control=['if'], data=[]]
versions = diffuse_value['versions']
for version in versions:
gmm.make_ring_dict(version) # depends on [control=['for'], data=['version']] # depends on [control=['for'], data=['diffuse_value']]
return gmm |
def v2_runner_on_skipped(self, result, **kwargs):
"""Run when a task is skipped."""
if self._display.verbosity > 1:
self._print_task()
self.last_skipped = False
line_length = 120
spaces = " " * (31 - len(result._host.name) - 4)
line = " * {}{}- {}".format(
colorize(result._host.name, "not_so_bold"),
spaces,
colorize("skipped", "skipped"),
)
reason = result._result.get("skipped_reason", "") or result._result.get(
"skip_reason", ""
)
if len(reason) < 50:
line += " -- {}".format(reason)
print("{} {}---------".format(line, "-" * (line_length - len(line))))
else:
print("{} {}".format(line, "-" * (line_length - len(line))))
print(self._indent_text(reason, 8))
print(reason) | def function[v2_runner_on_skipped, parameter[self, result]]:
constant[Run when a task is skipped.]
if compare[name[self]._display.verbosity greater[>] constant[1]] begin[:]
call[name[self]._print_task, parameter[]]
name[self].last_skipped assign[=] constant[False]
variable[line_length] assign[=] constant[120]
variable[spaces] assign[=] binary_operation[constant[ ] * binary_operation[binary_operation[constant[31] - call[name[len], parameter[name[result]._host.name]]] - constant[4]]]
variable[line] assign[=] call[constant[ * {}{}- {}].format, parameter[call[name[colorize], parameter[name[result]._host.name, constant[not_so_bold]]], name[spaces], call[name[colorize], parameter[constant[skipped], constant[skipped]]]]]
variable[reason] assign[=] <ast.BoolOp object at 0x7da1b03bae30>
if compare[call[name[len], parameter[name[reason]]] less[<] constant[50]] begin[:]
<ast.AugAssign object at 0x7da1b03285e0>
call[name[print], parameter[call[constant[{} {}---------].format, parameter[name[line], binary_operation[constant[-] * binary_operation[name[line_length] - call[name[len], parameter[name[line]]]]]]]]] | keyword[def] identifier[v2_runner_on_skipped] ( identifier[self] , identifier[result] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[self] . identifier[_display] . identifier[verbosity] > literal[int] :
identifier[self] . identifier[_print_task] ()
identifier[self] . identifier[last_skipped] = keyword[False]
identifier[line_length] = literal[int]
identifier[spaces] = literal[string] *( literal[int] - identifier[len] ( identifier[result] . identifier[_host] . identifier[name] )- literal[int] )
identifier[line] = literal[string] . identifier[format] (
identifier[colorize] ( identifier[result] . identifier[_host] . identifier[name] , literal[string] ),
identifier[spaces] ,
identifier[colorize] ( literal[string] , literal[string] ),
)
identifier[reason] = identifier[result] . identifier[_result] . identifier[get] ( literal[string] , literal[string] ) keyword[or] identifier[result] . identifier[_result] . identifier[get] (
literal[string] , literal[string]
)
keyword[if] identifier[len] ( identifier[reason] )< literal[int] :
identifier[line] += literal[string] . identifier[format] ( identifier[reason] )
identifier[print] ( literal[string] . identifier[format] ( identifier[line] , literal[string] *( identifier[line_length] - identifier[len] ( identifier[line] ))))
keyword[else] :
identifier[print] ( literal[string] . identifier[format] ( identifier[line] , literal[string] *( identifier[line_length] - identifier[len] ( identifier[line] ))))
identifier[print] ( identifier[self] . identifier[_indent_text] ( identifier[reason] , literal[int] ))
identifier[print] ( identifier[reason] ) | def v2_runner_on_skipped(self, result, **kwargs):
"""Run when a task is skipped."""
if self._display.verbosity > 1:
self._print_task()
self.last_skipped = False
line_length = 120
spaces = ' ' * (31 - len(result._host.name) - 4)
line = ' * {}{}- {}'.format(colorize(result._host.name, 'not_so_bold'), spaces, colorize('skipped', 'skipped'))
reason = result._result.get('skipped_reason', '') or result._result.get('skip_reason', '')
if len(reason) < 50:
line += ' -- {}'.format(reason)
print('{} {}---------'.format(line, '-' * (line_length - len(line)))) # depends on [control=['if'], data=[]]
else:
print('{} {}'.format(line, '-' * (line_length - len(line))))
print(self._indent_text(reason, 8))
print(reason) # depends on [control=['if'], data=[]] |
def convert(source_las, *, point_format_id=None, file_version=None):
""" Converts a Las from one point format to another
Automatically upgrades the file version if source file version is not compatible with
the new point_format_id
convert to point format 0
>>> las = read_las('pylastests/simple.las')
>>> las.header.version
'1.2'
>>> las = convert(las, point_format_id=0)
>>> las.header.point_format_id
0
>>> las.header.version
'1.2'
convert to point format 6, which need version >= 1.4
then convert back to point format 0, version is not downgraded
>>> las = read_las('pylastests/simple.las')
>>> las.header.version
'1.2'
>>> las = convert(las, point_format_id=6)
>>> las.header.point_format_id
6
>>> las.header.version
'1.4'
>>> las = convert(las, point_format_id=0)
>>> las.header.version
'1.4'
an exception is raised if the requested point format is not compatible
with the file version
>>> las = read_las('pylastests/simple.las')
>>> convert(las, point_format_id=6, file_version='1.2')
Traceback (most recent call last):
...
pylas.errors.PylasError: Point format 6 is not compatible with file version 1.2
Parameters
----------
source_las : pylas.lasdatas.base.LasBase
The source data to be converted
point_format_id : int, optional
The new point format id (the default is None, which won't change the source format id)
file_version : str, optional,
The new file version. None by default which means that the file_version
may be upgraded for compatibility with the new point_format. The file version will not
be downgraded.
Returns
-------
pylas.lasdatas.base.LasBase
"""
if point_format_id is None:
point_format_id = source_las.points_data.point_format.id
if file_version is None:
file_version = max(
source_las.header.version,
dims.min_file_version_for_point_format(point_format_id),
)
else:
file_version = str(file_version)
dims.raise_if_version_not_compatible_with_fmt(point_format_id, file_version)
header = headers.HeaderFactory.convert_header(source_las.header, file_version)
header.point_format_id = point_format_id
point_format = PointFormat(
point_format_id, source_las.points_data.point_format.extra_dims
)
points = record.PackedPointRecord.from_point_record(
source_las.points_data, point_format
)
try:
evlrs = source_las.evlrs
except ValueError:
evlrs = []
if file_version >= "1.4":
las = las14.LasData(
header=header, vlrs=source_las.vlrs, points=points, evlrs=evlrs
)
else:
if evlrs:
logger.warning(
"The source contained {} EVLRs,"
" they will be lost as version {} doest not support them".format(
len(evlrs), file_version
)
)
las = las12.LasData(header=header, vlrs=source_las.vlrs, points=points)
return las | def function[convert, parameter[source_las]]:
constant[ Converts a Las from one point format to another
Automatically upgrades the file version if source file version is not compatible with
the new point_format_id
convert to point format 0
>>> las = read_las('pylastests/simple.las')
>>> las.header.version
'1.2'
>>> las = convert(las, point_format_id=0)
>>> las.header.point_format_id
0
>>> las.header.version
'1.2'
convert to point format 6, which need version >= 1.4
then convert back to point format 0, version is not downgraded
>>> las = read_las('pylastests/simple.las')
>>> las.header.version
'1.2'
>>> las = convert(las, point_format_id=6)
>>> las.header.point_format_id
6
>>> las.header.version
'1.4'
>>> las = convert(las, point_format_id=0)
>>> las.header.version
'1.4'
an exception is raised if the requested point format is not compatible
with the file version
>>> las = read_las('pylastests/simple.las')
>>> convert(las, point_format_id=6, file_version='1.2')
Traceback (most recent call last):
...
pylas.errors.PylasError: Point format 6 is not compatible with file version 1.2
Parameters
----------
source_las : pylas.lasdatas.base.LasBase
The source data to be converted
point_format_id : int, optional
The new point format id (the default is None, which won't change the source format id)
file_version : str, optional,
The new file version. None by default which means that the file_version
may be upgraded for compatibility with the new point_format. The file version will not
be downgraded.
Returns
-------
pylas.lasdatas.base.LasBase
]
if compare[name[point_format_id] is constant[None]] begin[:]
variable[point_format_id] assign[=] name[source_las].points_data.point_format.id
if compare[name[file_version] is constant[None]] begin[:]
variable[file_version] assign[=] call[name[max], parameter[name[source_las].header.version, call[name[dims].min_file_version_for_point_format, parameter[name[point_format_id]]]]]
variable[header] assign[=] call[name[headers].HeaderFactory.convert_header, parameter[name[source_las].header, name[file_version]]]
name[header].point_format_id assign[=] name[point_format_id]
variable[point_format] assign[=] call[name[PointFormat], parameter[name[point_format_id], name[source_las].points_data.point_format.extra_dims]]
variable[points] assign[=] call[name[record].PackedPointRecord.from_point_record, parameter[name[source_las].points_data, name[point_format]]]
<ast.Try object at 0x7da18c4ce920>
if compare[name[file_version] greater_or_equal[>=] constant[1.4]] begin[:]
variable[las] assign[=] call[name[las14].LasData, parameter[]]
return[name[las]] | keyword[def] identifier[convert] ( identifier[source_las] ,*, identifier[point_format_id] = keyword[None] , identifier[file_version] = keyword[None] ):
literal[string]
keyword[if] identifier[point_format_id] keyword[is] keyword[None] :
identifier[point_format_id] = identifier[source_las] . identifier[points_data] . identifier[point_format] . identifier[id]
keyword[if] identifier[file_version] keyword[is] keyword[None] :
identifier[file_version] = identifier[max] (
identifier[source_las] . identifier[header] . identifier[version] ,
identifier[dims] . identifier[min_file_version_for_point_format] ( identifier[point_format_id] ),
)
keyword[else] :
identifier[file_version] = identifier[str] ( identifier[file_version] )
identifier[dims] . identifier[raise_if_version_not_compatible_with_fmt] ( identifier[point_format_id] , identifier[file_version] )
identifier[header] = identifier[headers] . identifier[HeaderFactory] . identifier[convert_header] ( identifier[source_las] . identifier[header] , identifier[file_version] )
identifier[header] . identifier[point_format_id] = identifier[point_format_id]
identifier[point_format] = identifier[PointFormat] (
identifier[point_format_id] , identifier[source_las] . identifier[points_data] . identifier[point_format] . identifier[extra_dims]
)
identifier[points] = identifier[record] . identifier[PackedPointRecord] . identifier[from_point_record] (
identifier[source_las] . identifier[points_data] , identifier[point_format]
)
keyword[try] :
identifier[evlrs] = identifier[source_las] . identifier[evlrs]
keyword[except] identifier[ValueError] :
identifier[evlrs] =[]
keyword[if] identifier[file_version] >= literal[string] :
identifier[las] = identifier[las14] . identifier[LasData] (
identifier[header] = identifier[header] , identifier[vlrs] = identifier[source_las] . identifier[vlrs] , identifier[points] = identifier[points] , identifier[evlrs] = identifier[evlrs]
)
keyword[else] :
keyword[if] identifier[evlrs] :
identifier[logger] . identifier[warning] (
literal[string]
literal[string] . identifier[format] (
identifier[len] ( identifier[evlrs] ), identifier[file_version]
)
)
identifier[las] = identifier[las12] . identifier[LasData] ( identifier[header] = identifier[header] , identifier[vlrs] = identifier[source_las] . identifier[vlrs] , identifier[points] = identifier[points] )
keyword[return] identifier[las] | def convert(source_las, *, point_format_id=None, file_version=None):
""" Converts a Las from one point format to another
Automatically upgrades the file version if source file version is not compatible with
the new point_format_id
convert to point format 0
>>> las = read_las('pylastests/simple.las')
>>> las.header.version
'1.2'
>>> las = convert(las, point_format_id=0)
>>> las.header.point_format_id
0
>>> las.header.version
'1.2'
convert to point format 6, which need version >= 1.4
then convert back to point format 0, version is not downgraded
>>> las = read_las('pylastests/simple.las')
>>> las.header.version
'1.2'
>>> las = convert(las, point_format_id=6)
>>> las.header.point_format_id
6
>>> las.header.version
'1.4'
>>> las = convert(las, point_format_id=0)
>>> las.header.version
'1.4'
an exception is raised if the requested point format is not compatible
with the file version
>>> las = read_las('pylastests/simple.las')
>>> convert(las, point_format_id=6, file_version='1.2')
Traceback (most recent call last):
...
pylas.errors.PylasError: Point format 6 is not compatible with file version 1.2
Parameters
----------
source_las : pylas.lasdatas.base.LasBase
The source data to be converted
point_format_id : int, optional
The new point format id (the default is None, which won't change the source format id)
file_version : str, optional,
The new file version. None by default which means that the file_version
may be upgraded for compatibility with the new point_format. The file version will not
be downgraded.
Returns
-------
pylas.lasdatas.base.LasBase
"""
if point_format_id is None:
point_format_id = source_las.points_data.point_format.id # depends on [control=['if'], data=['point_format_id']]
if file_version is None:
file_version = max(source_las.header.version, dims.min_file_version_for_point_format(point_format_id)) # depends on [control=['if'], data=['file_version']]
else:
file_version = str(file_version)
dims.raise_if_version_not_compatible_with_fmt(point_format_id, file_version)
header = headers.HeaderFactory.convert_header(source_las.header, file_version)
header.point_format_id = point_format_id
point_format = PointFormat(point_format_id, source_las.points_data.point_format.extra_dims)
points = record.PackedPointRecord.from_point_record(source_las.points_data, point_format)
try:
evlrs = source_las.evlrs # depends on [control=['try'], data=[]]
except ValueError:
evlrs = [] # depends on [control=['except'], data=[]]
if file_version >= '1.4':
las = las14.LasData(header=header, vlrs=source_las.vlrs, points=points, evlrs=evlrs) # depends on [control=['if'], data=[]]
else:
if evlrs:
logger.warning('The source contained {} EVLRs, they will be lost as version {} doest not support them'.format(len(evlrs), file_version)) # depends on [control=['if'], data=[]]
las = las12.LasData(header=header, vlrs=source_las.vlrs, points=points)
return las |
def adapt_array(arr):
"""
Adapts a Numpy array into an ARRAY string to put into the database.
Parameters
----------
arr: array
The Numpy array to be adapted into an ARRAY type that can be inserted into a SQL file.
Returns
-------
ARRAY
The adapted array object
"""
out = io.BytesIO()
np.save(out, arr), out.seek(0)
return buffer(out.read()) | def function[adapt_array, parameter[arr]]:
constant[
Adapts a Numpy array into an ARRAY string to put into the database.
Parameters
----------
arr: array
The Numpy array to be adapted into an ARRAY type that can be inserted into a SQL file.
Returns
-------
ARRAY
The adapted array object
]
variable[out] assign[=] call[name[io].BytesIO, parameter[]]
tuple[[<ast.Call object at 0x7da1b0a706d0>, <ast.Call object at 0x7da1b0a71780>]]
return[call[name[buffer], parameter[call[name[out].read, parameter[]]]]] | keyword[def] identifier[adapt_array] ( identifier[arr] ):
literal[string]
identifier[out] = identifier[io] . identifier[BytesIO] ()
identifier[np] . identifier[save] ( identifier[out] , identifier[arr] ), identifier[out] . identifier[seek] ( literal[int] )
keyword[return] identifier[buffer] ( identifier[out] . identifier[read] ()) | def adapt_array(arr):
"""
Adapts a Numpy array into an ARRAY string to put into the database.
Parameters
----------
arr: array
The Numpy array to be adapted into an ARRAY type that can be inserted into a SQL file.
Returns
-------
ARRAY
The adapted array object
"""
out = io.BytesIO()
(np.save(out, arr), out.seek(0))
return buffer(out.read()) |
def sg_input(shape=None, dtype=sg_floatx, name=None):
r"""Creates a placeholder.
Args:
shape: A tuple/list of integers. If an integers is given, it will turn to a list.
dtype: A data type. Default is float32.
name: A name for the placeholder.
Returns:
A wrapped placeholder `Tensor`.
"""
if shape is None:
return tf.placeholder(dtype, shape=None, name=name)
else:
if not isinstance(shape, (list, tuple)):
shape = [shape]
return tf.placeholder(dtype, shape=[None] + list(shape), name=name) | def function[sg_input, parameter[shape, dtype, name]]:
constant[Creates a placeholder.
Args:
shape: A tuple/list of integers. If an integers is given, it will turn to a list.
dtype: A data type. Default is float32.
name: A name for the placeholder.
Returns:
A wrapped placeholder `Tensor`.
]
if compare[name[shape] is constant[None]] begin[:]
return[call[name[tf].placeholder, parameter[name[dtype]]]] | keyword[def] identifier[sg_input] ( identifier[shape] = keyword[None] , identifier[dtype] = identifier[sg_floatx] , identifier[name] = keyword[None] ):
literal[string]
keyword[if] identifier[shape] keyword[is] keyword[None] :
keyword[return] identifier[tf] . identifier[placeholder] ( identifier[dtype] , identifier[shape] = keyword[None] , identifier[name] = identifier[name] )
keyword[else] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[shape] ,( identifier[list] , identifier[tuple] )):
identifier[shape] =[ identifier[shape] ]
keyword[return] identifier[tf] . identifier[placeholder] ( identifier[dtype] , identifier[shape] =[ keyword[None] ]+ identifier[list] ( identifier[shape] ), identifier[name] = identifier[name] ) | def sg_input(shape=None, dtype=sg_floatx, name=None):
"""Creates a placeholder.
Args:
shape: A tuple/list of integers. If an integers is given, it will turn to a list.
dtype: A data type. Default is float32.
name: A name for the placeholder.
Returns:
A wrapped placeholder `Tensor`.
"""
if shape is None:
return tf.placeholder(dtype, shape=None, name=name) # depends on [control=['if'], data=[]]
else:
if not isinstance(shape, (list, tuple)):
shape = [shape] # depends on [control=['if'], data=[]]
return tf.placeholder(dtype, shape=[None] + list(shape), name=name) |
def send(self, alf):
''' Non-blocking send '''
send_alf = SendThread(self.url, alf, self.connection_timeout, self.retry_count)
send_alf.start() | def function[send, parameter[self, alf]]:
constant[ Non-blocking send ]
variable[send_alf] assign[=] call[name[SendThread], parameter[name[self].url, name[alf], name[self].connection_timeout, name[self].retry_count]]
call[name[send_alf].start, parameter[]] | keyword[def] identifier[send] ( identifier[self] , identifier[alf] ):
literal[string]
identifier[send_alf] = identifier[SendThread] ( identifier[self] . identifier[url] , identifier[alf] , identifier[self] . identifier[connection_timeout] , identifier[self] . identifier[retry_count] )
identifier[send_alf] . identifier[start] () | def send(self, alf):
""" Non-blocking send """
send_alf = SendThread(self.url, alf, self.connection_timeout, self.retry_count)
send_alf.start() |
def _create_file(self, filename):
"""Ensure a new file is created and opened for writing."""
file_descriptor = os.open(filename, os.O_WRONLY | os.O_CREAT | os.O_EXCL)
return os.fdopen(file_descriptor, 'w') | def function[_create_file, parameter[self, filename]]:
constant[Ensure a new file is created and opened for writing.]
variable[file_descriptor] assign[=] call[name[os].open, parameter[name[filename], binary_operation[binary_operation[name[os].O_WRONLY <ast.BitOr object at 0x7da2590d6aa0> name[os].O_CREAT] <ast.BitOr object at 0x7da2590d6aa0> name[os].O_EXCL]]]
return[call[name[os].fdopen, parameter[name[file_descriptor], constant[w]]]] | keyword[def] identifier[_create_file] ( identifier[self] , identifier[filename] ):
literal[string]
identifier[file_descriptor] = identifier[os] . identifier[open] ( identifier[filename] , identifier[os] . identifier[O_WRONLY] | identifier[os] . identifier[O_CREAT] | identifier[os] . identifier[O_EXCL] )
keyword[return] identifier[os] . identifier[fdopen] ( identifier[file_descriptor] , literal[string] ) | def _create_file(self, filename):
"""Ensure a new file is created and opened for writing."""
file_descriptor = os.open(filename, os.O_WRONLY | os.O_CREAT | os.O_EXCL)
return os.fdopen(file_descriptor, 'w') |
def update(self, muted=values.unset, hold=values.unset, hold_url=values.unset,
hold_method=values.unset, announce_url=values.unset,
announce_method=values.unset, wait_url=values.unset,
wait_method=values.unset, beep_on_exit=values.unset,
end_conference_on_exit=values.unset, coaching=values.unset,
call_sid_to_coach=values.unset):
"""
Update the ParticipantInstance
:param bool muted: Whether the participant should be muted
:param bool hold: Whether the participant should be on hold
:param unicode hold_url: The URL we call using the `hold_method` for music that plays when the participant is on hold
:param unicode hold_method: The HTTP method we should use to call hold_url
:param unicode announce_url: The URL we call using the `announce_method` for an announcement to the participant
:param unicode announce_method: The HTTP method we should use to call announce_url
:param unicode wait_url: URL that hosts pre-conference hold music
:param unicode wait_method: The HTTP method we should use to call `wait_url`
:param bool beep_on_exit: Whether to play a notification beep to the conference when the participant exit
:param bool end_conference_on_exit: Whether to end the conference when the participant leaves
:param bool coaching: Indicates if the participant changed to coach
:param unicode call_sid_to_coach: The SID of the participant who is being `coached`
:returns: Updated ParticipantInstance
:rtype: twilio.rest.api.v2010.account.conference.participant.ParticipantInstance
"""
data = values.of({
'Muted': muted,
'Hold': hold,
'HoldUrl': hold_url,
'HoldMethod': hold_method,
'AnnounceUrl': announce_url,
'AnnounceMethod': announce_method,
'WaitUrl': wait_url,
'WaitMethod': wait_method,
'BeepOnExit': beep_on_exit,
'EndConferenceOnExit': end_conference_on_exit,
'Coaching': coaching,
'CallSidToCoach': call_sid_to_coach,
})
payload = self._version.update(
'POST',
self._uri,
data=data,
)
return ParticipantInstance(
self._version,
payload,
account_sid=self._solution['account_sid'],
conference_sid=self._solution['conference_sid'],
call_sid=self._solution['call_sid'],
) | def function[update, parameter[self, muted, hold, hold_url, hold_method, announce_url, announce_method, wait_url, wait_method, beep_on_exit, end_conference_on_exit, coaching, call_sid_to_coach]]:
constant[
Update the ParticipantInstance
:param bool muted: Whether the participant should be muted
:param bool hold: Whether the participant should be on hold
:param unicode hold_url: The URL we call using the `hold_method` for music that plays when the participant is on hold
:param unicode hold_method: The HTTP method we should use to call hold_url
:param unicode announce_url: The URL we call using the `announce_method` for an announcement to the participant
:param unicode announce_method: The HTTP method we should use to call announce_url
:param unicode wait_url: URL that hosts pre-conference hold music
:param unicode wait_method: The HTTP method we should use to call `wait_url`
:param bool beep_on_exit: Whether to play a notification beep to the conference when the participant exit
:param bool end_conference_on_exit: Whether to end the conference when the participant leaves
:param bool coaching: Indicates if the participant changed to coach
:param unicode call_sid_to_coach: The SID of the participant who is being `coached`
:returns: Updated ParticipantInstance
:rtype: twilio.rest.api.v2010.account.conference.participant.ParticipantInstance
]
variable[data] assign[=] call[name[values].of, parameter[dictionary[[<ast.Constant object at 0x7da2054a57b0>, <ast.Constant object at 0x7da2054a4eb0>, <ast.Constant object at 0x7da2054a6890>, <ast.Constant object at 0x7da2054a5210>, <ast.Constant object at 0x7da2054a7760>, <ast.Constant object at 0x7da2054a6500>, <ast.Constant object at 0x7da2054a5840>, <ast.Constant object at 0x7da2054a6b90>, <ast.Constant object at 0x7da2054a5ea0>, <ast.Constant object at 0x7da2054a5ae0>, <ast.Constant object at 0x7da2054a6230>, <ast.Constant object at 0x7da2054a7280>], [<ast.Name object at 0x7da2054a41f0>, <ast.Name object at 0x7da2054a73a0>, <ast.Name object at 0x7da2054a4be0>, <ast.Name object at 0x7da2054a60e0>, <ast.Name object at 0x7da2054a7310>, <ast.Name object at 0x7da2054a4df0>, <ast.Name object at 0x7da2054a6cb0>, <ast.Name object at 0x7da2054a7b50>, <ast.Name object at 0x7da2054a5c90>, <ast.Name object at 0x7da2054a4370>, <ast.Name object at 0x7da2054a5fc0>, <ast.Name object at 0x7da2054a56f0>]]]]
variable[payload] assign[=] call[name[self]._version.update, parameter[constant[POST], name[self]._uri]]
return[call[name[ParticipantInstance], parameter[name[self]._version, name[payload]]]] | keyword[def] identifier[update] ( identifier[self] , identifier[muted] = identifier[values] . identifier[unset] , identifier[hold] = identifier[values] . identifier[unset] , identifier[hold_url] = identifier[values] . identifier[unset] ,
identifier[hold_method] = identifier[values] . identifier[unset] , identifier[announce_url] = identifier[values] . identifier[unset] ,
identifier[announce_method] = identifier[values] . identifier[unset] , identifier[wait_url] = identifier[values] . identifier[unset] ,
identifier[wait_method] = identifier[values] . identifier[unset] , identifier[beep_on_exit] = identifier[values] . identifier[unset] ,
identifier[end_conference_on_exit] = identifier[values] . identifier[unset] , identifier[coaching] = identifier[values] . identifier[unset] ,
identifier[call_sid_to_coach] = identifier[values] . identifier[unset] ):
literal[string]
identifier[data] = identifier[values] . identifier[of] ({
literal[string] : identifier[muted] ,
literal[string] : identifier[hold] ,
literal[string] : identifier[hold_url] ,
literal[string] : identifier[hold_method] ,
literal[string] : identifier[announce_url] ,
literal[string] : identifier[announce_method] ,
literal[string] : identifier[wait_url] ,
literal[string] : identifier[wait_method] ,
literal[string] : identifier[beep_on_exit] ,
literal[string] : identifier[end_conference_on_exit] ,
literal[string] : identifier[coaching] ,
literal[string] : identifier[call_sid_to_coach] ,
})
identifier[payload] = identifier[self] . identifier[_version] . identifier[update] (
literal[string] ,
identifier[self] . identifier[_uri] ,
identifier[data] = identifier[data] ,
)
keyword[return] identifier[ParticipantInstance] (
identifier[self] . identifier[_version] ,
identifier[payload] ,
identifier[account_sid] = identifier[self] . identifier[_solution] [ literal[string] ],
identifier[conference_sid] = identifier[self] . identifier[_solution] [ literal[string] ],
identifier[call_sid] = identifier[self] . identifier[_solution] [ literal[string] ],
) | def update(self, muted=values.unset, hold=values.unset, hold_url=values.unset, hold_method=values.unset, announce_url=values.unset, announce_method=values.unset, wait_url=values.unset, wait_method=values.unset, beep_on_exit=values.unset, end_conference_on_exit=values.unset, coaching=values.unset, call_sid_to_coach=values.unset):
"""
Update the ParticipantInstance
:param bool muted: Whether the participant should be muted
:param bool hold: Whether the participant should be on hold
:param unicode hold_url: The URL we call using the `hold_method` for music that plays when the participant is on hold
:param unicode hold_method: The HTTP method we should use to call hold_url
:param unicode announce_url: The URL we call using the `announce_method` for an announcement to the participant
:param unicode announce_method: The HTTP method we should use to call announce_url
:param unicode wait_url: URL that hosts pre-conference hold music
:param unicode wait_method: The HTTP method we should use to call `wait_url`
:param bool beep_on_exit: Whether to play a notification beep to the conference when the participant exit
:param bool end_conference_on_exit: Whether to end the conference when the participant leaves
:param bool coaching: Indicates if the participant changed to coach
:param unicode call_sid_to_coach: The SID of the participant who is being `coached`
:returns: Updated ParticipantInstance
:rtype: twilio.rest.api.v2010.account.conference.participant.ParticipantInstance
"""
data = values.of({'Muted': muted, 'Hold': hold, 'HoldUrl': hold_url, 'HoldMethod': hold_method, 'AnnounceUrl': announce_url, 'AnnounceMethod': announce_method, 'WaitUrl': wait_url, 'WaitMethod': wait_method, 'BeepOnExit': beep_on_exit, 'EndConferenceOnExit': end_conference_on_exit, 'Coaching': coaching, 'CallSidToCoach': call_sid_to_coach})
payload = self._version.update('POST', self._uri, data=data)
return ParticipantInstance(self._version, payload, account_sid=self._solution['account_sid'], conference_sid=self._solution['conference_sid'], call_sid=self._solution['call_sid']) |
def _get_filename_path(self, path):
""" Helper function for creating filename without file extension
"""
feature_filename = os.path.join(path, self.feature_type.value)
if self.feature_name is not None:
feature_filename = os.path.join(feature_filename, self.feature_name)
return feature_filename | def function[_get_filename_path, parameter[self, path]]:
constant[ Helper function for creating filename without file extension
]
variable[feature_filename] assign[=] call[name[os].path.join, parameter[name[path], name[self].feature_type.value]]
if compare[name[self].feature_name is_not constant[None]] begin[:]
variable[feature_filename] assign[=] call[name[os].path.join, parameter[name[feature_filename], name[self].feature_name]]
return[name[feature_filename]] | keyword[def] identifier[_get_filename_path] ( identifier[self] , identifier[path] ):
literal[string]
identifier[feature_filename] = identifier[os] . identifier[path] . identifier[join] ( identifier[path] , identifier[self] . identifier[feature_type] . identifier[value] )
keyword[if] identifier[self] . identifier[feature_name] keyword[is] keyword[not] keyword[None] :
identifier[feature_filename] = identifier[os] . identifier[path] . identifier[join] ( identifier[feature_filename] , identifier[self] . identifier[feature_name] )
keyword[return] identifier[feature_filename] | def _get_filename_path(self, path):
""" Helper function for creating filename without file extension
"""
feature_filename = os.path.join(path, self.feature_type.value)
if self.feature_name is not None:
feature_filename = os.path.join(feature_filename, self.feature_name) # depends on [control=['if'], data=[]]
return feature_filename |
def job(request):
"""View for a single job."""
job_id = request.GET.get("job_id")
recent_jobs = JobRecord.objects.order_by("-start_time")[0:100]
recent_trials = TrialRecord.objects \
.filter(job_id=job_id) \
.order_by("-start_time")
trial_records = []
for recent_trial in recent_trials:
trial_records.append(get_trial_info(recent_trial))
current_job = JobRecord.objects \
.filter(job_id=job_id) \
.order_by("-start_time")[0]
if len(trial_records) > 0:
param_keys = trial_records[0]["params"].keys()
else:
param_keys = []
# TODO: support custom metrics here
metric_keys = ["episode_reward", "accuracy", "loss"]
context = {
"current_job": get_job_info(current_job),
"recent_jobs": recent_jobs,
"recent_trials": trial_records,
"param_keys": param_keys,
"param_num": len(param_keys),
"metric_keys": metric_keys,
"metric_num": len(metric_keys)
}
return render(request, "job.html", context) | def function[job, parameter[request]]:
constant[View for a single job.]
variable[job_id] assign[=] call[name[request].GET.get, parameter[constant[job_id]]]
variable[recent_jobs] assign[=] call[call[name[JobRecord].objects.order_by, parameter[constant[-start_time]]]][<ast.Slice object at 0x7da18f09c490>]
variable[recent_trials] assign[=] call[call[name[TrialRecord].objects.filter, parameter[]].order_by, parameter[constant[-start_time]]]
variable[trial_records] assign[=] list[[]]
for taget[name[recent_trial]] in starred[name[recent_trials]] begin[:]
call[name[trial_records].append, parameter[call[name[get_trial_info], parameter[name[recent_trial]]]]]
variable[current_job] assign[=] call[call[call[name[JobRecord].objects.filter, parameter[]].order_by, parameter[constant[-start_time]]]][constant[0]]
if compare[call[name[len], parameter[name[trial_records]]] greater[>] constant[0]] begin[:]
variable[param_keys] assign[=] call[call[call[name[trial_records]][constant[0]]][constant[params]].keys, parameter[]]
variable[metric_keys] assign[=] list[[<ast.Constant object at 0x7da18fe91000>, <ast.Constant object at 0x7da18fe918d0>, <ast.Constant object at 0x7da18fe91840>]]
variable[context] assign[=] dictionary[[<ast.Constant object at 0x7da18fe93400>, <ast.Constant object at 0x7da18fe92c20>, <ast.Constant object at 0x7da18fe91b10>, <ast.Constant object at 0x7da18fe903a0>, <ast.Constant object at 0x7da18fe90f10>, <ast.Constant object at 0x7da18fe93c10>, <ast.Constant object at 0x7da18fe92020>], [<ast.Call object at 0x7da18fe91ba0>, <ast.Name object at 0x7da18fe91a80>, <ast.Name object at 0x7da18fe91600>, <ast.Name object at 0x7da18fe90e80>, <ast.Call object at 0x7da18fe93d30>, <ast.Name object at 0x7da18fe92da0>, <ast.Call object at 0x7da18fe92710>]]
return[call[name[render], parameter[name[request], constant[job.html], name[context]]]] | keyword[def] identifier[job] ( identifier[request] ):
literal[string]
identifier[job_id] = identifier[request] . identifier[GET] . identifier[get] ( literal[string] )
identifier[recent_jobs] = identifier[JobRecord] . identifier[objects] . identifier[order_by] ( literal[string] )[ literal[int] : literal[int] ]
identifier[recent_trials] = identifier[TrialRecord] . identifier[objects] . identifier[filter] ( identifier[job_id] = identifier[job_id] ). identifier[order_by] ( literal[string] )
identifier[trial_records] =[]
keyword[for] identifier[recent_trial] keyword[in] identifier[recent_trials] :
identifier[trial_records] . identifier[append] ( identifier[get_trial_info] ( identifier[recent_trial] ))
identifier[current_job] = identifier[JobRecord] . identifier[objects] . identifier[filter] ( identifier[job_id] = identifier[job_id] ). identifier[order_by] ( literal[string] )[ literal[int] ]
keyword[if] identifier[len] ( identifier[trial_records] )> literal[int] :
identifier[param_keys] = identifier[trial_records] [ literal[int] ][ literal[string] ]. identifier[keys] ()
keyword[else] :
identifier[param_keys] =[]
identifier[metric_keys] =[ literal[string] , literal[string] , literal[string] ]
identifier[context] ={
literal[string] : identifier[get_job_info] ( identifier[current_job] ),
literal[string] : identifier[recent_jobs] ,
literal[string] : identifier[trial_records] ,
literal[string] : identifier[param_keys] ,
literal[string] : identifier[len] ( identifier[param_keys] ),
literal[string] : identifier[metric_keys] ,
literal[string] : identifier[len] ( identifier[metric_keys] )
}
keyword[return] identifier[render] ( identifier[request] , literal[string] , identifier[context] ) | def job(request):
"""View for a single job."""
job_id = request.GET.get('job_id')
recent_jobs = JobRecord.objects.order_by('-start_time')[0:100]
recent_trials = TrialRecord.objects.filter(job_id=job_id).order_by('-start_time')
trial_records = []
for recent_trial in recent_trials:
trial_records.append(get_trial_info(recent_trial)) # depends on [control=['for'], data=['recent_trial']]
current_job = JobRecord.objects.filter(job_id=job_id).order_by('-start_time')[0]
if len(trial_records) > 0:
param_keys = trial_records[0]['params'].keys() # depends on [control=['if'], data=[]]
else:
param_keys = []
# TODO: support custom metrics here
metric_keys = ['episode_reward', 'accuracy', 'loss']
context = {'current_job': get_job_info(current_job), 'recent_jobs': recent_jobs, 'recent_trials': trial_records, 'param_keys': param_keys, 'param_num': len(param_keys), 'metric_keys': metric_keys, 'metric_num': len(metric_keys)}
return render(request, 'job.html', context) |
def decode(self, bytes, raw=False):
"""decode(bytearray, raw=False) -> value
Decodes the given bytearray containing the elapsed time in
seconds since the GPS epoch and returns the corresponding
Python :class:`datetime`.
If the optional parameter ``raw`` is ``True``, the integral
number of seconds will be returned instead.
"""
sec = super(Time32Type, self).decode(bytes)
return sec if raw else dmc.toLocalTime(sec) | def function[decode, parameter[self, bytes, raw]]:
constant[decode(bytearray, raw=False) -> value
Decodes the given bytearray containing the elapsed time in
seconds since the GPS epoch and returns the corresponding
Python :class:`datetime`.
If the optional parameter ``raw`` is ``True``, the integral
number of seconds will be returned instead.
]
variable[sec] assign[=] call[call[name[super], parameter[name[Time32Type], name[self]]].decode, parameter[name[bytes]]]
return[<ast.IfExp object at 0x7da2054a4f70>] | keyword[def] identifier[decode] ( identifier[self] , identifier[bytes] , identifier[raw] = keyword[False] ):
literal[string]
identifier[sec] = identifier[super] ( identifier[Time32Type] , identifier[self] ). identifier[decode] ( identifier[bytes] )
keyword[return] identifier[sec] keyword[if] identifier[raw] keyword[else] identifier[dmc] . identifier[toLocalTime] ( identifier[sec] ) | def decode(self, bytes, raw=False):
"""decode(bytearray, raw=False) -> value
Decodes the given bytearray containing the elapsed time in
seconds since the GPS epoch and returns the corresponding
Python :class:`datetime`.
If the optional parameter ``raw`` is ``True``, the integral
number of seconds will be returned instead.
"""
sec = super(Time32Type, self).decode(bytes)
return sec if raw else dmc.toLocalTime(sec) |
def get_data_port_m(self, data_port_id):
"""Searches and returns the model of a data port of a given state
The method searches a port with the given id in the data ports of the given state model. If the state model
is a container state, not only the input and output data ports are looked at, but also the scoped variables.
:param data_port_id: The data port id to be searched
:return: The model of the data port or None if it is not found
"""
for scoped_var_m in self.scoped_variables:
if scoped_var_m.scoped_variable.data_port_id == data_port_id:
return scoped_var_m
return StateModel.get_data_port_m(self, data_port_id) | def function[get_data_port_m, parameter[self, data_port_id]]:
constant[Searches and returns the model of a data port of a given state
The method searches a port with the given id in the data ports of the given state model. If the state model
is a container state, not only the input and output data ports are looked at, but also the scoped variables.
:param data_port_id: The data port id to be searched
:return: The model of the data port or None if it is not found
]
for taget[name[scoped_var_m]] in starred[name[self].scoped_variables] begin[:]
if compare[name[scoped_var_m].scoped_variable.data_port_id equal[==] name[data_port_id]] begin[:]
return[name[scoped_var_m]]
return[call[name[StateModel].get_data_port_m, parameter[name[self], name[data_port_id]]]] | keyword[def] identifier[get_data_port_m] ( identifier[self] , identifier[data_port_id] ):
literal[string]
keyword[for] identifier[scoped_var_m] keyword[in] identifier[self] . identifier[scoped_variables] :
keyword[if] identifier[scoped_var_m] . identifier[scoped_variable] . identifier[data_port_id] == identifier[data_port_id] :
keyword[return] identifier[scoped_var_m]
keyword[return] identifier[StateModel] . identifier[get_data_port_m] ( identifier[self] , identifier[data_port_id] ) | def get_data_port_m(self, data_port_id):
"""Searches and returns the model of a data port of a given state
The method searches a port with the given id in the data ports of the given state model. If the state model
is a container state, not only the input and output data ports are looked at, but also the scoped variables.
:param data_port_id: The data port id to be searched
:return: The model of the data port or None if it is not found
"""
for scoped_var_m in self.scoped_variables:
if scoped_var_m.scoped_variable.data_port_id == data_port_id:
return scoped_var_m # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['scoped_var_m']]
return StateModel.get_data_port_m(self, data_port_id) |
def p_multiplicative_expr(self, p):
"""multiplicative_expr : unary_expr
| multiplicative_expr MULT unary_expr
| multiplicative_expr DIV unary_expr
| multiplicative_expr MOD unary_expr
"""
if len(p) == 2:
p[0] = p[1]
else:
p[0] = ast.BinOp(op=p[2], left=p[1], right=p[3]) | def function[p_multiplicative_expr, parameter[self, p]]:
constant[multiplicative_expr : unary_expr
| multiplicative_expr MULT unary_expr
| multiplicative_expr DIV unary_expr
| multiplicative_expr MOD unary_expr
]
if compare[call[name[len], parameter[name[p]]] equal[==] constant[2]] begin[:]
call[name[p]][constant[0]] assign[=] call[name[p]][constant[1]] | keyword[def] identifier[p_multiplicative_expr] ( identifier[self] , identifier[p] ):
literal[string]
keyword[if] identifier[len] ( identifier[p] )== literal[int] :
identifier[p] [ literal[int] ]= identifier[p] [ literal[int] ]
keyword[else] :
identifier[p] [ literal[int] ]= identifier[ast] . identifier[BinOp] ( identifier[op] = identifier[p] [ literal[int] ], identifier[left] = identifier[p] [ literal[int] ], identifier[right] = identifier[p] [ literal[int] ]) | def p_multiplicative_expr(self, p):
"""multiplicative_expr : unary_expr
| multiplicative_expr MULT unary_expr
| multiplicative_expr DIV unary_expr
| multiplicative_expr MOD unary_expr
"""
if len(p) == 2:
p[0] = p[1] # depends on [control=['if'], data=[]]
else:
p[0] = ast.BinOp(op=p[2], left=p[1], right=p[3]) |
def intify(obj, str_fun=str, use_ord=True, use_hash=True, use_len=True):
"""FIXME: this is unpythonic and does things you don't expect!
FIXME: rename to "integer_from_category"
Returns an integer representative of a categorical object (string, dict, etc)
>>> intify('1.2345e10')
12345000000
>>> intify([12]), intify('[99]'), intify('(12,)')
(91, 91, 40)
>>> intify('A'), intify('a'), intify('AAA'), intify('B'), intify('BB')
(97, 97, 97, 98, 98)
>>> intify(272)
272
>>> intify(float('nan'), use_ord=False, use_hash=False, str_fun=None)
>>> intify(float('nan'), use_ord=False, use_hash=False, use_len=False)
>>> intify(float('nan')), intify('n'), intify(None)
(110, 110, 110)
>>> intify(None, use_ord=False, use_hash=False, use_len=False)
>>> intify(None, use_ord=False, use_hash=False, str_fun=False)
>>> intify(None, use_hash=False, str_fun=False)
"""
try:
return int(obj)
except (IndexError, ValueError, AttributeError, TypeError):
pass
try:
float_obj = float(obj)
if float('-inf') < float_obj < float('inf'):
# WARN: This will increment sys.maxint by +1 and decrement sys.maxint by -1!!!!
# But hopefully these cases will be dealt with as expected, above
return int(float_obj)
except (IndexError, ValueError, AttributeError, TypeError):
pass
if not str_fun:
def str_fun(x):
return x
if use_ord:
try:
return ord(str_fun(obj)[0].lower())
except (IndexError, ValueError, AttributeError, TypeError):
pass
if use_hash:
try:
return hash(str_fun(obj))
except (IndexError, ValueError, AttributeError, TypeError):
pass
if use_len:
try:
return len(obj)
except (IndexError, ValueError, AttributeError, TypeError):
pass
try:
return len(str_fun(obj))
except (IndexError, ValueError, AttributeError, TypeError):
pass
return None | def function[intify, parameter[obj, str_fun, use_ord, use_hash, use_len]]:
constant[FIXME: this is unpythonic and does things you don't expect!
FIXME: rename to "integer_from_category"
Returns an integer representative of a categorical object (string, dict, etc)
>>> intify('1.2345e10')
12345000000
>>> intify([12]), intify('[99]'), intify('(12,)')
(91, 91, 40)
>>> intify('A'), intify('a'), intify('AAA'), intify('B'), intify('BB')
(97, 97, 97, 98, 98)
>>> intify(272)
272
>>> intify(float('nan'), use_ord=False, use_hash=False, str_fun=None)
>>> intify(float('nan'), use_ord=False, use_hash=False, use_len=False)
>>> intify(float('nan')), intify('n'), intify(None)
(110, 110, 110)
>>> intify(None, use_ord=False, use_hash=False, use_len=False)
>>> intify(None, use_ord=False, use_hash=False, str_fun=False)
>>> intify(None, use_hash=False, str_fun=False)
]
<ast.Try object at 0x7da1b24ed900>
<ast.Try object at 0x7da1b24ec040>
if <ast.UnaryOp object at 0x7da1b24ec280> begin[:]
def function[str_fun, parameter[x]]:
return[name[x]]
if name[use_ord] begin[:]
<ast.Try object at 0x7da1b24ecc40>
if name[use_hash] begin[:]
<ast.Try object at 0x7da1b24ecfa0>
if name[use_len] begin[:]
<ast.Try object at 0x7da1b24ec340>
<ast.Try object at 0x7da1b24ec220>
return[constant[None]] | keyword[def] identifier[intify] ( identifier[obj] , identifier[str_fun] = identifier[str] , identifier[use_ord] = keyword[True] , identifier[use_hash] = keyword[True] , identifier[use_len] = keyword[True] ):
literal[string]
keyword[try] :
keyword[return] identifier[int] ( identifier[obj] )
keyword[except] ( identifier[IndexError] , identifier[ValueError] , identifier[AttributeError] , identifier[TypeError] ):
keyword[pass]
keyword[try] :
identifier[float_obj] = identifier[float] ( identifier[obj] )
keyword[if] identifier[float] ( literal[string] )< identifier[float_obj] < identifier[float] ( literal[string] ):
keyword[return] identifier[int] ( identifier[float_obj] )
keyword[except] ( identifier[IndexError] , identifier[ValueError] , identifier[AttributeError] , identifier[TypeError] ):
keyword[pass]
keyword[if] keyword[not] identifier[str_fun] :
keyword[def] identifier[str_fun] ( identifier[x] ):
keyword[return] identifier[x]
keyword[if] identifier[use_ord] :
keyword[try] :
keyword[return] identifier[ord] ( identifier[str_fun] ( identifier[obj] )[ literal[int] ]. identifier[lower] ())
keyword[except] ( identifier[IndexError] , identifier[ValueError] , identifier[AttributeError] , identifier[TypeError] ):
keyword[pass]
keyword[if] identifier[use_hash] :
keyword[try] :
keyword[return] identifier[hash] ( identifier[str_fun] ( identifier[obj] ))
keyword[except] ( identifier[IndexError] , identifier[ValueError] , identifier[AttributeError] , identifier[TypeError] ):
keyword[pass]
keyword[if] identifier[use_len] :
keyword[try] :
keyword[return] identifier[len] ( identifier[obj] )
keyword[except] ( identifier[IndexError] , identifier[ValueError] , identifier[AttributeError] , identifier[TypeError] ):
keyword[pass]
keyword[try] :
keyword[return] identifier[len] ( identifier[str_fun] ( identifier[obj] ))
keyword[except] ( identifier[IndexError] , identifier[ValueError] , identifier[AttributeError] , identifier[TypeError] ):
keyword[pass]
keyword[return] keyword[None] | def intify(obj, str_fun=str, use_ord=True, use_hash=True, use_len=True):
"""FIXME: this is unpythonic and does things you don't expect!
FIXME: rename to "integer_from_category"
Returns an integer representative of a categorical object (string, dict, etc)
>>> intify('1.2345e10')
12345000000
>>> intify([12]), intify('[99]'), intify('(12,)')
(91, 91, 40)
>>> intify('A'), intify('a'), intify('AAA'), intify('B'), intify('BB')
(97, 97, 97, 98, 98)
>>> intify(272)
272
>>> intify(float('nan'), use_ord=False, use_hash=False, str_fun=None)
>>> intify(float('nan'), use_ord=False, use_hash=False, use_len=False)
>>> intify(float('nan')), intify('n'), intify(None)
(110, 110, 110)
>>> intify(None, use_ord=False, use_hash=False, use_len=False)
>>> intify(None, use_ord=False, use_hash=False, str_fun=False)
>>> intify(None, use_hash=False, str_fun=False)
"""
try:
return int(obj) # depends on [control=['try'], data=[]]
except (IndexError, ValueError, AttributeError, TypeError):
pass # depends on [control=['except'], data=[]]
try:
float_obj = float(obj)
if float('-inf') < float_obj < float('inf'):
# WARN: This will increment sys.maxint by +1 and decrement sys.maxint by -1!!!!
# But hopefully these cases will be dealt with as expected, above
return int(float_obj) # depends on [control=['if'], data=['float_obj']] # depends on [control=['try'], data=[]]
except (IndexError, ValueError, AttributeError, TypeError):
pass # depends on [control=['except'], data=[]]
if not str_fun:
def str_fun(x):
return x # depends on [control=['if'], data=[]]
if use_ord:
try:
return ord(str_fun(obj)[0].lower()) # depends on [control=['try'], data=[]]
except (IndexError, ValueError, AttributeError, TypeError):
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
if use_hash:
try:
return hash(str_fun(obj)) # depends on [control=['try'], data=[]]
except (IndexError, ValueError, AttributeError, TypeError):
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
if use_len:
try:
return len(obj) # depends on [control=['try'], data=[]]
except (IndexError, ValueError, AttributeError, TypeError):
pass # depends on [control=['except'], data=[]]
try:
return len(str_fun(obj)) # depends on [control=['try'], data=[]]
except (IndexError, ValueError, AttributeError, TypeError):
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
return None |
def techport(Id):
'''
In order to use this capability, queries can be issued to the system with the following URI
format:
GET /xml-api/id_parameter
Parameter Required? Value Description
id_parameter Yes Type: String
Default: None
The id value of the TechPort record.
TechPort values range from 0-20000.
Not all values will yield results. Id
values can be obtained through the
standard TechPort search feature and
are visible in the website URLs, e.g.
http://techport.nasa.gov/view/0000,
where 0000 is the id value.
Example usage:
http://techport.nasa.gov/xml-api/4795
Output: The output of this query is an XML file with all field data of the TechPort record.
'''
base_url = 'http://techport.nasa.gov/xml-api/'
if not isinstance(Id, str):
raise ValueError("The Id arg you provided is not the type of str")
else:
base_url += Id
return dispatch_http_get(base_url) | def function[techport, parameter[Id]]:
constant[
In order to use this capability, queries can be issued to the system with the following URI
format:
GET /xml-api/id_parameter
Parameter Required? Value Description
id_parameter Yes Type: String
Default: None
The id value of the TechPort record.
TechPort values range from 0-20000.
Not all values will yield results. Id
values can be obtained through the
standard TechPort search feature and
are visible in the website URLs, e.g.
http://techport.nasa.gov/view/0000,
where 0000 is the id value.
Example usage:
http://techport.nasa.gov/xml-api/4795
Output: The output of this query is an XML file with all field data of the TechPort record.
]
variable[base_url] assign[=] constant[http://techport.nasa.gov/xml-api/]
if <ast.UnaryOp object at 0x7da204346f80> begin[:]
<ast.Raise object at 0x7da2043453f0>
return[call[name[dispatch_http_get], parameter[name[base_url]]]] | keyword[def] identifier[techport] ( identifier[Id] ):
literal[string]
identifier[base_url] = literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[Id] , identifier[str] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[else] :
identifier[base_url] += identifier[Id]
keyword[return] identifier[dispatch_http_get] ( identifier[base_url] ) | def techport(Id):
"""
In order to use this capability, queries can be issued to the system with the following URI
format:
GET /xml-api/id_parameter
Parameter Required? Value Description
id_parameter Yes Type: String
Default: None
The id value of the TechPort record.
TechPort values range from 0-20000.
Not all values will yield results. Id
values can be obtained through the
standard TechPort search feature and
are visible in the website URLs, e.g.
http://techport.nasa.gov/view/0000,
where 0000 is the id value.
Example usage:
http://techport.nasa.gov/xml-api/4795
Output: The output of this query is an XML file with all field data of the TechPort record.
"""
base_url = 'http://techport.nasa.gov/xml-api/'
if not isinstance(Id, str):
raise ValueError('The Id arg you provided is not the type of str') # depends on [control=['if'], data=[]]
else:
base_url += Id
return dispatch_http_get(base_url) |
def find_msvcrt():
"""
Likely useless and will return None, see https://bugs.python.org/issue23606
Offered for full compatibility, though.
"""
# Compile Python command for wine-python
command = '"from ctypes.util import find_msvcrt; print(find_msvcrt())"'
# Start wine-python
winepython_p = subprocess.Popen(
'wine-python -c' + command,
stdout = subprocess.PIPE,
stderr = subprocess.PIPE,
shell = True
)
# Get stdout and stderr
winepython_out, winepython_err = winepython_p.communicate()
# Change encoding
winepython_out = winepython_out.decode(encoding = 'UTF-8').strip()
# Handle None values
if winepython_out in ['', 'None']:
winepython_out = None
return winepython_out | def function[find_msvcrt, parameter[]]:
constant[
Likely useless and will return None, see https://bugs.python.org/issue23606
Offered for full compatibility, though.
]
variable[command] assign[=] constant["from ctypes.util import find_msvcrt; print(find_msvcrt())"]
variable[winepython_p] assign[=] call[name[subprocess].Popen, parameter[binary_operation[constant[wine-python -c] + name[command]]]]
<ast.Tuple object at 0x7da1b0b7d510> assign[=] call[name[winepython_p].communicate, parameter[]]
variable[winepython_out] assign[=] call[call[name[winepython_out].decode, parameter[]].strip, parameter[]]
if compare[name[winepython_out] in list[[<ast.Constant object at 0x7da1b0b525f0>, <ast.Constant object at 0x7da1b0b52500>]]] begin[:]
variable[winepython_out] assign[=] constant[None]
return[name[winepython_out]] | keyword[def] identifier[find_msvcrt] ():
literal[string]
identifier[command] = literal[string]
identifier[winepython_p] = identifier[subprocess] . identifier[Popen] (
literal[string] + identifier[command] ,
identifier[stdout] = identifier[subprocess] . identifier[PIPE] ,
identifier[stderr] = identifier[subprocess] . identifier[PIPE] ,
identifier[shell] = keyword[True]
)
identifier[winepython_out] , identifier[winepython_err] = identifier[winepython_p] . identifier[communicate] ()
identifier[winepython_out] = identifier[winepython_out] . identifier[decode] ( identifier[encoding] = literal[string] ). identifier[strip] ()
keyword[if] identifier[winepython_out] keyword[in] [ literal[string] , literal[string] ]:
identifier[winepython_out] = keyword[None]
keyword[return] identifier[winepython_out] | def find_msvcrt():
"""
Likely useless and will return None, see https://bugs.python.org/issue23606
Offered for full compatibility, though.
""" # Compile Python command for wine-python
command = '"from ctypes.util import find_msvcrt; print(find_msvcrt())"' # Start wine-python
winepython_p = subprocess.Popen('wine-python -c' + command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) # Get stdout and stderr
(winepython_out, winepython_err) = winepython_p.communicate() # Change encoding
winepython_out = winepython_out.decode(encoding='UTF-8').strip() # Handle None values
if winepython_out in ['', 'None']:
winepython_out = None # depends on [control=['if'], data=['winepython_out']]
return winepython_out |
def pre_deploy_assume_role(assume_role_config, context):
"""Assume role (prior to deployment)."""
if isinstance(assume_role_config, dict):
assume_role_arn = ''
if assume_role_config.get('post_deploy_env_revert'):
context.save_existing_iam_env_vars()
if assume_role_config.get('arn'):
assume_role_arn = assume_role_config['arn']
assume_role_duration = assume_role_config.get('duration')
elif assume_role_config.get(context.env_name):
if isinstance(assume_role_config[context.env_name], dict):
assume_role_arn = assume_role_config[context.env_name]['arn'] # noqa
assume_role_duration = assume_role_config[context.env_name].get('duration') # noqa pylint: disable=line-too-long
else:
assume_role_arn = assume_role_config[context.env_name]
assume_role_duration = None
else:
LOGGER.info('Skipping assume-role; no role found for '
'environment %s...',
context.env_name)
if assume_role_arn:
context.env_vars = merge_dicts(
context.env_vars,
assume_role(
role_arn=assume_role_arn,
session_name=assume_role_config.get('session_name', None),
duration_seconds=assume_role_duration,
region=context.env_region,
env_vars=context.env_vars
)
)
else:
context.env_vars = merge_dicts(
context.env_vars,
assume_role(role_arn=assume_role_config,
region=context.env_region,
env_vars=context.env_vars)
) | def function[pre_deploy_assume_role, parameter[assume_role_config, context]]:
constant[Assume role (prior to deployment).]
if call[name[isinstance], parameter[name[assume_role_config], name[dict]]] begin[:]
variable[assume_role_arn] assign[=] constant[]
if call[name[assume_role_config].get, parameter[constant[post_deploy_env_revert]]] begin[:]
call[name[context].save_existing_iam_env_vars, parameter[]]
if call[name[assume_role_config].get, parameter[constant[arn]]] begin[:]
variable[assume_role_arn] assign[=] call[name[assume_role_config]][constant[arn]]
variable[assume_role_duration] assign[=] call[name[assume_role_config].get, parameter[constant[duration]]]
if name[assume_role_arn] begin[:]
name[context].env_vars assign[=] call[name[merge_dicts], parameter[name[context].env_vars, call[name[assume_role], parameter[]]]] | keyword[def] identifier[pre_deploy_assume_role] ( identifier[assume_role_config] , identifier[context] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[assume_role_config] , identifier[dict] ):
identifier[assume_role_arn] = literal[string]
keyword[if] identifier[assume_role_config] . identifier[get] ( literal[string] ):
identifier[context] . identifier[save_existing_iam_env_vars] ()
keyword[if] identifier[assume_role_config] . identifier[get] ( literal[string] ):
identifier[assume_role_arn] = identifier[assume_role_config] [ literal[string] ]
identifier[assume_role_duration] = identifier[assume_role_config] . identifier[get] ( literal[string] )
keyword[elif] identifier[assume_role_config] . identifier[get] ( identifier[context] . identifier[env_name] ):
keyword[if] identifier[isinstance] ( identifier[assume_role_config] [ identifier[context] . identifier[env_name] ], identifier[dict] ):
identifier[assume_role_arn] = identifier[assume_role_config] [ identifier[context] . identifier[env_name] ][ literal[string] ]
identifier[assume_role_duration] = identifier[assume_role_config] [ identifier[context] . identifier[env_name] ]. identifier[get] ( literal[string] )
keyword[else] :
identifier[assume_role_arn] = identifier[assume_role_config] [ identifier[context] . identifier[env_name] ]
identifier[assume_role_duration] = keyword[None]
keyword[else] :
identifier[LOGGER] . identifier[info] ( literal[string]
literal[string] ,
identifier[context] . identifier[env_name] )
keyword[if] identifier[assume_role_arn] :
identifier[context] . identifier[env_vars] = identifier[merge_dicts] (
identifier[context] . identifier[env_vars] ,
identifier[assume_role] (
identifier[role_arn] = identifier[assume_role_arn] ,
identifier[session_name] = identifier[assume_role_config] . identifier[get] ( literal[string] , keyword[None] ),
identifier[duration_seconds] = identifier[assume_role_duration] ,
identifier[region] = identifier[context] . identifier[env_region] ,
identifier[env_vars] = identifier[context] . identifier[env_vars]
)
)
keyword[else] :
identifier[context] . identifier[env_vars] = identifier[merge_dicts] (
identifier[context] . identifier[env_vars] ,
identifier[assume_role] ( identifier[role_arn] = identifier[assume_role_config] ,
identifier[region] = identifier[context] . identifier[env_region] ,
identifier[env_vars] = identifier[context] . identifier[env_vars] )
) | def pre_deploy_assume_role(assume_role_config, context):
"""Assume role (prior to deployment)."""
if isinstance(assume_role_config, dict):
assume_role_arn = ''
if assume_role_config.get('post_deploy_env_revert'):
context.save_existing_iam_env_vars() # depends on [control=['if'], data=[]]
if assume_role_config.get('arn'):
assume_role_arn = assume_role_config['arn']
assume_role_duration = assume_role_config.get('duration') # depends on [control=['if'], data=[]]
elif assume_role_config.get(context.env_name):
if isinstance(assume_role_config[context.env_name], dict):
assume_role_arn = assume_role_config[context.env_name]['arn'] # noqa
assume_role_duration = assume_role_config[context.env_name].get('duration') # noqa pylint: disable=line-too-long # depends on [control=['if'], data=[]]
else:
assume_role_arn = assume_role_config[context.env_name]
assume_role_duration = None # depends on [control=['if'], data=[]]
else:
LOGGER.info('Skipping assume-role; no role found for environment %s...', context.env_name)
if assume_role_arn:
context.env_vars = merge_dicts(context.env_vars, assume_role(role_arn=assume_role_arn, session_name=assume_role_config.get('session_name', None), duration_seconds=assume_role_duration, region=context.env_region, env_vars=context.env_vars)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
context.env_vars = merge_dicts(context.env_vars, assume_role(role_arn=assume_role_config, region=context.env_region, env_vars=context.env_vars)) |
def _try_backup_item(self):
"""Check if a backup item is available in cache and call
the item handler if it is.
:return: `True` if backup item was found.
:returntype: `bool`"""
if not self._backup_state:
return False
item = self.cache.get_item(self.address, self._backup_state)
if item:
self._object_handler(item.address, item.value, item.state)
return True
else:
False | def function[_try_backup_item, parameter[self]]:
constant[Check if a backup item is available in cache and call
the item handler if it is.
:return: `True` if backup item was found.
:returntype: `bool`]
if <ast.UnaryOp object at 0x7da20c993d00> begin[:]
return[constant[False]]
variable[item] assign[=] call[name[self].cache.get_item, parameter[name[self].address, name[self]._backup_state]]
if name[item] begin[:]
call[name[self]._object_handler, parameter[name[item].address, name[item].value, name[item].state]]
return[constant[True]] | keyword[def] identifier[_try_backup_item] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_backup_state] :
keyword[return] keyword[False]
identifier[item] = identifier[self] . identifier[cache] . identifier[get_item] ( identifier[self] . identifier[address] , identifier[self] . identifier[_backup_state] )
keyword[if] identifier[item] :
identifier[self] . identifier[_object_handler] ( identifier[item] . identifier[address] , identifier[item] . identifier[value] , identifier[item] . identifier[state] )
keyword[return] keyword[True]
keyword[else] :
keyword[False] | def _try_backup_item(self):
"""Check if a backup item is available in cache and call
the item handler if it is.
:return: `True` if backup item was found.
:returntype: `bool`"""
if not self._backup_state:
return False # depends on [control=['if'], data=[]]
item = self.cache.get_item(self.address, self._backup_state)
if item:
self._object_handler(item.address, item.value, item.state)
return True # depends on [control=['if'], data=[]]
else:
False |
def destructuring_stmt_handle(self, original, loc, tokens):
"""Process match assign blocks."""
internal_assert(len(tokens) == 2, "invalid destructuring assignment tokens", tokens)
matches, item = tokens
out = match_handle(loc, [matches, "in", item, None])
out += self.pattern_error(original, loc, match_to_var, match_check_var)
return out | def function[destructuring_stmt_handle, parameter[self, original, loc, tokens]]:
constant[Process match assign blocks.]
call[name[internal_assert], parameter[compare[call[name[len], parameter[name[tokens]]] equal[==] constant[2]], constant[invalid destructuring assignment tokens], name[tokens]]]
<ast.Tuple object at 0x7da20c7c8940> assign[=] name[tokens]
variable[out] assign[=] call[name[match_handle], parameter[name[loc], list[[<ast.Name object at 0x7da20c7cb760>, <ast.Constant object at 0x7da20c7cb610>, <ast.Name object at 0x7da20c7cb010>, <ast.Constant object at 0x7da20c7cb160>]]]]
<ast.AugAssign object at 0x7da20c7caf20>
return[name[out]] | keyword[def] identifier[destructuring_stmt_handle] ( identifier[self] , identifier[original] , identifier[loc] , identifier[tokens] ):
literal[string]
identifier[internal_assert] ( identifier[len] ( identifier[tokens] )== literal[int] , literal[string] , identifier[tokens] )
identifier[matches] , identifier[item] = identifier[tokens]
identifier[out] = identifier[match_handle] ( identifier[loc] ,[ identifier[matches] , literal[string] , identifier[item] , keyword[None] ])
identifier[out] += identifier[self] . identifier[pattern_error] ( identifier[original] , identifier[loc] , identifier[match_to_var] , identifier[match_check_var] )
keyword[return] identifier[out] | def destructuring_stmt_handle(self, original, loc, tokens):
"""Process match assign blocks."""
internal_assert(len(tokens) == 2, 'invalid destructuring assignment tokens', tokens)
(matches, item) = tokens
out = match_handle(loc, [matches, 'in', item, None])
out += self.pattern_error(original, loc, match_to_var, match_check_var)
return out |
def start(self):
"""
start
"""
def main_thread():
# create resp, req thread pool
self._create_thread_pool()
# start connection, this will block until stop()
self.conn_thread = Thread(target=self._conn.connect)
self.conn_thread.daemon = True
self.conn_thread.start()
# register model to controller...
self.is_ready.wait()
if hasattr(self, 'run'):
_logger.debug("Start running...")
self.run()
# start main_thread
self.main_thread = Thread(target=main_thread)
self.main_thread.daemon = True
self.main_thread.start()
if threading.current_thread().__class__.__name__ == '_MainThread':
# control this bundle stop or not
while not self.stop_event.wait(1):
sleep(1)
else:
self.stop_event.wait()
self.stop()
_logger.debug("Shutdown successfully") | def function[start, parameter[self]]:
constant[
start
]
def function[main_thread, parameter[]]:
call[name[self]._create_thread_pool, parameter[]]
name[self].conn_thread assign[=] call[name[Thread], parameter[]]
name[self].conn_thread.daemon assign[=] constant[True]
call[name[self].conn_thread.start, parameter[]]
call[name[self].is_ready.wait, parameter[]]
if call[name[hasattr], parameter[name[self], constant[run]]] begin[:]
call[name[_logger].debug, parameter[constant[Start running...]]]
call[name[self].run, parameter[]]
name[self].main_thread assign[=] call[name[Thread], parameter[]]
name[self].main_thread.daemon assign[=] constant[True]
call[name[self].main_thread.start, parameter[]]
if compare[call[name[threading].current_thread, parameter[]].__class__.__name__ equal[==] constant[_MainThread]] begin[:]
while <ast.UnaryOp object at 0x7da1b0b3a2c0> begin[:]
call[name[sleep], parameter[constant[1]]]
call[name[self].stop, parameter[]]
call[name[_logger].debug, parameter[constant[Shutdown successfully]]] | keyword[def] identifier[start] ( identifier[self] ):
literal[string]
keyword[def] identifier[main_thread] ():
identifier[self] . identifier[_create_thread_pool] ()
identifier[self] . identifier[conn_thread] = identifier[Thread] ( identifier[target] = identifier[self] . identifier[_conn] . identifier[connect] )
identifier[self] . identifier[conn_thread] . identifier[daemon] = keyword[True]
identifier[self] . identifier[conn_thread] . identifier[start] ()
identifier[self] . identifier[is_ready] . identifier[wait] ()
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[_logger] . identifier[debug] ( literal[string] )
identifier[self] . identifier[run] ()
identifier[self] . identifier[main_thread] = identifier[Thread] ( identifier[target] = identifier[main_thread] )
identifier[self] . identifier[main_thread] . identifier[daemon] = keyword[True]
identifier[self] . identifier[main_thread] . identifier[start] ()
keyword[if] identifier[threading] . identifier[current_thread] (). identifier[__class__] . identifier[__name__] == literal[string] :
keyword[while] keyword[not] identifier[self] . identifier[stop_event] . identifier[wait] ( literal[int] ):
identifier[sleep] ( literal[int] )
keyword[else] :
identifier[self] . identifier[stop_event] . identifier[wait] ()
identifier[self] . identifier[stop] ()
identifier[_logger] . identifier[debug] ( literal[string] ) | def start(self):
"""
start
"""
def main_thread():
# create resp, req thread pool
self._create_thread_pool()
# start connection, this will block until stop()
self.conn_thread = Thread(target=self._conn.connect)
self.conn_thread.daemon = True
self.conn_thread.start()
# register model to controller...
self.is_ready.wait()
if hasattr(self, 'run'):
_logger.debug('Start running...')
self.run() # depends on [control=['if'], data=[]]
# start main_thread
self.main_thread = Thread(target=main_thread)
self.main_thread.daemon = True
self.main_thread.start()
if threading.current_thread().__class__.__name__ == '_MainThread':
# control this bundle stop or not
while not self.stop_event.wait(1):
sleep(1) # depends on [control=['while'], data=[]] # depends on [control=['if'], data=[]]
else:
self.stop_event.wait()
self.stop()
_logger.debug('Shutdown successfully') |
def sync(self):
"""Synchronise and update the stored state to the in-memory state."""
if self.filepath:
serialised_file = open(self.filepath, "w")
json.dump(self.state, serialised_file)
serialised_file.close()
else:
print("Filepath to the persistence file is not set. State cannot be synced to disc.") | def function[sync, parameter[self]]:
constant[Synchronise and update the stored state to the in-memory state.]
if name[self].filepath begin[:]
variable[serialised_file] assign[=] call[name[open], parameter[name[self].filepath, constant[w]]]
call[name[json].dump, parameter[name[self].state, name[serialised_file]]]
call[name[serialised_file].close, parameter[]] | keyword[def] identifier[sync] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[filepath] :
identifier[serialised_file] = identifier[open] ( identifier[self] . identifier[filepath] , literal[string] )
identifier[json] . identifier[dump] ( identifier[self] . identifier[state] , identifier[serialised_file] )
identifier[serialised_file] . identifier[close] ()
keyword[else] :
identifier[print] ( literal[string] ) | def sync(self):
"""Synchronise and update the stored state to the in-memory state."""
if self.filepath:
serialised_file = open(self.filepath, 'w')
json.dump(self.state, serialised_file)
serialised_file.close() # depends on [control=['if'], data=[]]
else:
print('Filepath to the persistence file is not set. State cannot be synced to disc.') |
def clean_by_request(self, request):
'''
Remove all futures that were waiting for request `request` since it is done waiting
'''
if request not in self.request_map:
return
for tag, matcher, future in self.request_map[request]:
# timeout the future
self._timeout_future(tag, matcher, future)
# remove the timeout
if future in self.timeout_map:
tornado.ioloop.IOLoop.current().remove_timeout(self.timeout_map[future])
del self.timeout_map[future]
del self.request_map[request] | def function[clean_by_request, parameter[self, request]]:
constant[
Remove all futures that were waiting for request `request` since it is done waiting
]
if compare[name[request] <ast.NotIn object at 0x7da2590d7190> name[self].request_map] begin[:]
return[None]
for taget[tuple[[<ast.Name object at 0x7da18bc71fc0>, <ast.Name object at 0x7da18bc70c40>, <ast.Name object at 0x7da18bc708b0>]]] in starred[call[name[self].request_map][name[request]]] begin[:]
call[name[self]._timeout_future, parameter[name[tag], name[matcher], name[future]]]
if compare[name[future] in name[self].timeout_map] begin[:]
call[call[name[tornado].ioloop.IOLoop.current, parameter[]].remove_timeout, parameter[call[name[self].timeout_map][name[future]]]]
<ast.Delete object at 0x7da18bc71570>
<ast.Delete object at 0x7da18bc73fa0> | keyword[def] identifier[clean_by_request] ( identifier[self] , identifier[request] ):
literal[string]
keyword[if] identifier[request] keyword[not] keyword[in] identifier[self] . identifier[request_map] :
keyword[return]
keyword[for] identifier[tag] , identifier[matcher] , identifier[future] keyword[in] identifier[self] . identifier[request_map] [ identifier[request] ]:
identifier[self] . identifier[_timeout_future] ( identifier[tag] , identifier[matcher] , identifier[future] )
keyword[if] identifier[future] keyword[in] identifier[self] . identifier[timeout_map] :
identifier[tornado] . identifier[ioloop] . identifier[IOLoop] . identifier[current] (). identifier[remove_timeout] ( identifier[self] . identifier[timeout_map] [ identifier[future] ])
keyword[del] identifier[self] . identifier[timeout_map] [ identifier[future] ]
keyword[del] identifier[self] . identifier[request_map] [ identifier[request] ] | def clean_by_request(self, request):
"""
Remove all futures that were waiting for request `request` since it is done waiting
"""
if request not in self.request_map:
return # depends on [control=['if'], data=[]]
for (tag, matcher, future) in self.request_map[request]:
# timeout the future
self._timeout_future(tag, matcher, future)
# remove the timeout
if future in self.timeout_map:
tornado.ioloop.IOLoop.current().remove_timeout(self.timeout_map[future])
del self.timeout_map[future] # depends on [control=['if'], data=['future']] # depends on [control=['for'], data=[]]
del self.request_map[request] |
def do_insertions(insertions, tokens):
"""
Helper for lexers which must combine the results of several
sublexers.
``insertions`` is a list of ``(index, itokens)`` pairs.
Each ``itokens`` iterable should be inserted at position
``index`` into the token stream given by the ``tokens``
argument.
The result is a combined token stream.
TODO: clean up the code here.
"""
insertions = iter(insertions)
try:
index, itokens = next(insertions)
except StopIteration:
# no insertions
for item in tokens:
yield item
return
realpos = None
insleft = True
# iterate over the token stream where we want to insert
# the tokens from the insertion list.
for i, t, v in tokens:
# first iteration. store the postition of first item
if realpos is None:
realpos = i
oldi = 0
while insleft and i + len(v) >= index:
tmpval = v[oldi:index - i]
yield realpos, t, tmpval
realpos += len(tmpval)
for it_index, it_token, it_value in itokens:
yield realpos, it_token, it_value
realpos += len(it_value)
oldi = index - i
try:
index, itokens = next(insertions)
except StopIteration:
insleft = False
break # not strictly necessary
yield realpos, t, v[oldi:]
realpos += len(v) - oldi
# leftover tokens
while insleft:
# no normal tokens, set realpos to zero
realpos = realpos or 0
for p, t, v in itokens:
yield realpos, t, v
realpos += len(v)
try:
index, itokens = next(insertions)
except StopIteration:
insleft = False
break | def function[do_insertions, parameter[insertions, tokens]]:
constant[
Helper for lexers which must combine the results of several
sublexers.
``insertions`` is a list of ``(index, itokens)`` pairs.
Each ``itokens`` iterable should be inserted at position
``index`` into the token stream given by the ``tokens``
argument.
The result is a combined token stream.
TODO: clean up the code here.
]
variable[insertions] assign[=] call[name[iter], parameter[name[insertions]]]
<ast.Try object at 0x7da2047ea560>
variable[realpos] assign[=] constant[None]
variable[insleft] assign[=] constant[True]
for taget[tuple[[<ast.Name object at 0x7da18f7229e0>, <ast.Name object at 0x7da18f7226e0>, <ast.Name object at 0x7da18f720be0>]]] in starred[name[tokens]] begin[:]
if compare[name[realpos] is constant[None]] begin[:]
variable[realpos] assign[=] name[i]
variable[oldi] assign[=] constant[0]
while <ast.BoolOp object at 0x7da18f721720> begin[:]
variable[tmpval] assign[=] call[name[v]][<ast.Slice object at 0x7da18f720a90>]
<ast.Yield object at 0x7da18f720100>
<ast.AugAssign object at 0x7da18f721db0>
for taget[tuple[[<ast.Name object at 0x7da18f723550>, <ast.Name object at 0x7da18f7200d0>, <ast.Name object at 0x7da18f720370>]]] in starred[name[itokens]] begin[:]
<ast.Yield object at 0x7da18f7233d0>
<ast.AugAssign object at 0x7da18f721b40>
variable[oldi] assign[=] binary_operation[name[index] - name[i]]
<ast.Try object at 0x7da18f722d40>
<ast.Yield object at 0x7da1b120b910>
<ast.AugAssign object at 0x7da1b120bb80>
while name[insleft] begin[:]
variable[realpos] assign[=] <ast.BoolOp object at 0x7da1b120a170>
for taget[tuple[[<ast.Name object at 0x7da1b1208e20>, <ast.Name object at 0x7da1b120b400>, <ast.Name object at 0x7da1b120b250>]]] in starred[name[itokens]] begin[:]
<ast.Yield object at 0x7da1b1209360>
<ast.AugAssign object at 0x7da1b120b010>
<ast.Try object at 0x7da1b1208ca0> | keyword[def] identifier[do_insertions] ( identifier[insertions] , identifier[tokens] ):
literal[string]
identifier[insertions] = identifier[iter] ( identifier[insertions] )
keyword[try] :
identifier[index] , identifier[itokens] = identifier[next] ( identifier[insertions] )
keyword[except] identifier[StopIteration] :
keyword[for] identifier[item] keyword[in] identifier[tokens] :
keyword[yield] identifier[item]
keyword[return]
identifier[realpos] = keyword[None]
identifier[insleft] = keyword[True]
keyword[for] identifier[i] , identifier[t] , identifier[v] keyword[in] identifier[tokens] :
keyword[if] identifier[realpos] keyword[is] keyword[None] :
identifier[realpos] = identifier[i]
identifier[oldi] = literal[int]
keyword[while] identifier[insleft] keyword[and] identifier[i] + identifier[len] ( identifier[v] )>= identifier[index] :
identifier[tmpval] = identifier[v] [ identifier[oldi] : identifier[index] - identifier[i] ]
keyword[yield] identifier[realpos] , identifier[t] , identifier[tmpval]
identifier[realpos] += identifier[len] ( identifier[tmpval] )
keyword[for] identifier[it_index] , identifier[it_token] , identifier[it_value] keyword[in] identifier[itokens] :
keyword[yield] identifier[realpos] , identifier[it_token] , identifier[it_value]
identifier[realpos] += identifier[len] ( identifier[it_value] )
identifier[oldi] = identifier[index] - identifier[i]
keyword[try] :
identifier[index] , identifier[itokens] = identifier[next] ( identifier[insertions] )
keyword[except] identifier[StopIteration] :
identifier[insleft] = keyword[False]
keyword[break]
keyword[yield] identifier[realpos] , identifier[t] , identifier[v] [ identifier[oldi] :]
identifier[realpos] += identifier[len] ( identifier[v] )- identifier[oldi]
keyword[while] identifier[insleft] :
identifier[realpos] = identifier[realpos] keyword[or] literal[int]
keyword[for] identifier[p] , identifier[t] , identifier[v] keyword[in] identifier[itokens] :
keyword[yield] identifier[realpos] , identifier[t] , identifier[v]
identifier[realpos] += identifier[len] ( identifier[v] )
keyword[try] :
identifier[index] , identifier[itokens] = identifier[next] ( identifier[insertions] )
keyword[except] identifier[StopIteration] :
identifier[insleft] = keyword[False]
keyword[break] | def do_insertions(insertions, tokens):
"""
Helper for lexers which must combine the results of several
sublexers.
``insertions`` is a list of ``(index, itokens)`` pairs.
Each ``itokens`` iterable should be inserted at position
``index`` into the token stream given by the ``tokens``
argument.
The result is a combined token stream.
TODO: clean up the code here.
"""
insertions = iter(insertions)
try:
(index, itokens) = next(insertions) # depends on [control=['try'], data=[]]
except StopIteration:
# no insertions
for item in tokens:
yield item # depends on [control=['for'], data=['item']]
return # depends on [control=['except'], data=[]]
realpos = None
insleft = True
# iterate over the token stream where we want to insert
# the tokens from the insertion list.
for (i, t, v) in tokens:
# first iteration. store the postition of first item
if realpos is None:
realpos = i # depends on [control=['if'], data=['realpos']]
oldi = 0
while insleft and i + len(v) >= index:
tmpval = v[oldi:index - i]
yield (realpos, t, tmpval)
realpos += len(tmpval)
for (it_index, it_token, it_value) in itokens:
yield (realpos, it_token, it_value)
realpos += len(it_value) # depends on [control=['for'], data=[]]
oldi = index - i
try:
(index, itokens) = next(insertions) # depends on [control=['try'], data=[]]
except StopIteration:
insleft = False
break # not strictly necessary # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]]
yield (realpos, t, v[oldi:])
realpos += len(v) - oldi # depends on [control=['for'], data=[]]
# leftover tokens
while insleft:
# no normal tokens, set realpos to zero
realpos = realpos or 0
for (p, t, v) in itokens:
yield (realpos, t, v)
realpos += len(v) # depends on [control=['for'], data=[]]
try:
(index, itokens) = next(insertions) # depends on [control=['try'], data=[]]
except StopIteration:
insleft = False
break # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]] |
def sort_by_length(self):
"""
Sorts dataset by the sequence length.
"""
self.lengths, indices = self.lengths.sort(descending=True)
self.src = [self.src[idx] for idx in indices]
self.indices = indices.tolist()
self.sorted = True | def function[sort_by_length, parameter[self]]:
constant[
Sorts dataset by the sequence length.
]
<ast.Tuple object at 0x7da1b1b03970> assign[=] call[name[self].lengths.sort, parameter[]]
name[self].src assign[=] <ast.ListComp object at 0x7da1b1b02d40>
name[self].indices assign[=] call[name[indices].tolist, parameter[]]
name[self].sorted assign[=] constant[True] | keyword[def] identifier[sort_by_length] ( identifier[self] ):
literal[string]
identifier[self] . identifier[lengths] , identifier[indices] = identifier[self] . identifier[lengths] . identifier[sort] ( identifier[descending] = keyword[True] )
identifier[self] . identifier[src] =[ identifier[self] . identifier[src] [ identifier[idx] ] keyword[for] identifier[idx] keyword[in] identifier[indices] ]
identifier[self] . identifier[indices] = identifier[indices] . identifier[tolist] ()
identifier[self] . identifier[sorted] = keyword[True] | def sort_by_length(self):
"""
Sorts dataset by the sequence length.
"""
(self.lengths, indices) = self.lengths.sort(descending=True)
self.src = [self.src[idx] for idx in indices]
self.indices = indices.tolist()
self.sorted = True |
def boundedFunction(x, minY, ax, ay):
'''
limit [function] to a minimum y value
'''
y = function(x, ax, ay)
return np.maximum(np.nan_to_num(y), minY) | def function[boundedFunction, parameter[x, minY, ax, ay]]:
constant[
limit [function] to a minimum y value
]
variable[y] assign[=] call[name[function], parameter[name[x], name[ax], name[ay]]]
return[call[name[np].maximum, parameter[call[name[np].nan_to_num, parameter[name[y]]], name[minY]]]] | keyword[def] identifier[boundedFunction] ( identifier[x] , identifier[minY] , identifier[ax] , identifier[ay] ):
literal[string]
identifier[y] = identifier[function] ( identifier[x] , identifier[ax] , identifier[ay] )
keyword[return] identifier[np] . identifier[maximum] ( identifier[np] . identifier[nan_to_num] ( identifier[y] ), identifier[minY] ) | def boundedFunction(x, minY, ax, ay):
"""
limit [function] to a minimum y value
"""
y = function(x, ax, ay)
return np.maximum(np.nan_to_num(y), minY) |
def set_language(self, editor, language):
"""
Sets given language to given Model editor.
:param editor: Editor to set language to.
:type editor: Editor
:param language: Language to set.
:type language: Language
:return: Method success.
:rtype: bool
"""
LOGGER.debug("> Setting '{0}' language to '{1}' editor.".format(language.name, editor))
return editor.set_language(language) | def function[set_language, parameter[self, editor, language]]:
constant[
Sets given language to given Model editor.
:param editor: Editor to set language to.
:type editor: Editor
:param language: Language to set.
:type language: Language
:return: Method success.
:rtype: bool
]
call[name[LOGGER].debug, parameter[call[constant[> Setting '{0}' language to '{1}' editor.].format, parameter[name[language].name, name[editor]]]]]
return[call[name[editor].set_language, parameter[name[language]]]] | keyword[def] identifier[set_language] ( identifier[self] , identifier[editor] , identifier[language] ):
literal[string]
identifier[LOGGER] . identifier[debug] ( literal[string] . identifier[format] ( identifier[language] . identifier[name] , identifier[editor] ))
keyword[return] identifier[editor] . identifier[set_language] ( identifier[language] ) | def set_language(self, editor, language):
"""
Sets given language to given Model editor.
:param editor: Editor to set language to.
:type editor: Editor
:param language: Language to set.
:type language: Language
:return: Method success.
:rtype: bool
"""
LOGGER.debug("> Setting '{0}' language to '{1}' editor.".format(language.name, editor))
return editor.set_language(language) |
def write_to_file(chats, chatfile):
"""called every time chats are modified"""
with open(chatfile, 'w') as handler:
handler.write('\n'.join((str(id_) for id_ in chats))) | def function[write_to_file, parameter[chats, chatfile]]:
constant[called every time chats are modified]
with call[name[open], parameter[name[chatfile], constant[w]]] begin[:]
call[name[handler].write, parameter[call[constant[
].join, parameter[<ast.GeneratorExp object at 0x7da1b0e16f20>]]]] | keyword[def] identifier[write_to_file] ( identifier[chats] , identifier[chatfile] ):
literal[string]
keyword[with] identifier[open] ( identifier[chatfile] , literal[string] ) keyword[as] identifier[handler] :
identifier[handler] . identifier[write] ( literal[string] . identifier[join] (( identifier[str] ( identifier[id_] ) keyword[for] identifier[id_] keyword[in] identifier[chats] ))) | def write_to_file(chats, chatfile):
"""called every time chats are modified"""
with open(chatfile, 'w') as handler:
handler.write('\n'.join((str(id_) for id_ in chats))) # depends on [control=['with'], data=['handler']] |
def parse_event_record(self, node):
"""
Parses <EventRecord>
@param node: Node containing the <EventRecord> element
@type node: xml.etree.Element
"""
if self.current_simulation == None:
self.raise_error('<EventRecord> must be only be used inside a ' +
'simulation specification')
if 'quantity' in node.lattrib:
quantity = node.lattrib['quantity']
else:
self.raise_error('<EventRecord> must specify a quantity.')
if 'eventport' in node.lattrib:
eventPort = node.lattrib['eventport']
else:
self.raise_error('<EventRecord> must specify an eventPort.')
self.current_simulation.add_event_record(EventRecord(quantity, eventPort)) | def function[parse_event_record, parameter[self, node]]:
constant[
Parses <EventRecord>
@param node: Node containing the <EventRecord> element
@type node: xml.etree.Element
]
if compare[name[self].current_simulation equal[==] constant[None]] begin[:]
call[name[self].raise_error, parameter[binary_operation[constant[<EventRecord> must be only be used inside a ] + constant[simulation specification]]]]
if compare[constant[quantity] in name[node].lattrib] begin[:]
variable[quantity] assign[=] call[name[node].lattrib][constant[quantity]]
if compare[constant[eventport] in name[node].lattrib] begin[:]
variable[eventPort] assign[=] call[name[node].lattrib][constant[eventport]]
call[name[self].current_simulation.add_event_record, parameter[call[name[EventRecord], parameter[name[quantity], name[eventPort]]]]] | keyword[def] identifier[parse_event_record] ( identifier[self] , identifier[node] ):
literal[string]
keyword[if] identifier[self] . identifier[current_simulation] == keyword[None] :
identifier[self] . identifier[raise_error] ( literal[string] +
literal[string] )
keyword[if] literal[string] keyword[in] identifier[node] . identifier[lattrib] :
identifier[quantity] = identifier[node] . identifier[lattrib] [ literal[string] ]
keyword[else] :
identifier[self] . identifier[raise_error] ( literal[string] )
keyword[if] literal[string] keyword[in] identifier[node] . identifier[lattrib] :
identifier[eventPort] = identifier[node] . identifier[lattrib] [ literal[string] ]
keyword[else] :
identifier[self] . identifier[raise_error] ( literal[string] )
identifier[self] . identifier[current_simulation] . identifier[add_event_record] ( identifier[EventRecord] ( identifier[quantity] , identifier[eventPort] )) | def parse_event_record(self, node):
"""
Parses <EventRecord>
@param node: Node containing the <EventRecord> element
@type node: xml.etree.Element
"""
if self.current_simulation == None:
self.raise_error('<EventRecord> must be only be used inside a ' + 'simulation specification') # depends on [control=['if'], data=[]]
if 'quantity' in node.lattrib:
quantity = node.lattrib['quantity'] # depends on [control=['if'], data=[]]
else:
self.raise_error('<EventRecord> must specify a quantity.')
if 'eventport' in node.lattrib:
eventPort = node.lattrib['eventport'] # depends on [control=['if'], data=[]]
else:
self.raise_error('<EventRecord> must specify an eventPort.')
self.current_simulation.add_event_record(EventRecord(quantity, eventPort)) |
def create_redis_client(redis_address, password=None):
"""Create a Redis client.
Args:
The IP address, port, and password of the Redis server.
Returns:
A Redis client.
"""
redis_ip_address, redis_port = redis_address.split(":")
# For this command to work, some other client (on the same machine
# as Redis) must have run "CONFIG SET protected-mode no".
return redis.StrictRedis(
host=redis_ip_address, port=int(redis_port), password=password) | def function[create_redis_client, parameter[redis_address, password]]:
constant[Create a Redis client.
Args:
The IP address, port, and password of the Redis server.
Returns:
A Redis client.
]
<ast.Tuple object at 0x7da18f58f280> assign[=] call[name[redis_address].split, parameter[constant[:]]]
return[call[name[redis].StrictRedis, parameter[]]] | keyword[def] identifier[create_redis_client] ( identifier[redis_address] , identifier[password] = keyword[None] ):
literal[string]
identifier[redis_ip_address] , identifier[redis_port] = identifier[redis_address] . identifier[split] ( literal[string] )
keyword[return] identifier[redis] . identifier[StrictRedis] (
identifier[host] = identifier[redis_ip_address] , identifier[port] = identifier[int] ( identifier[redis_port] ), identifier[password] = identifier[password] ) | def create_redis_client(redis_address, password=None):
"""Create a Redis client.
Args:
The IP address, port, and password of the Redis server.
Returns:
A Redis client.
"""
(redis_ip_address, redis_port) = redis_address.split(':')
# For this command to work, some other client (on the same machine
# as Redis) must have run "CONFIG SET protected-mode no".
return redis.StrictRedis(host=redis_ip_address, port=int(redis_port), password=password) |
def ledger(self, ledger_id):
"""The ledger details endpoint provides information on a single ledger.
`GET /ledgers/{sequence}
<https://www.stellar.org/developers/horizon/reference/endpoints/ledgers-single.html>`_
:param int ledger_id: The id of the ledger to look up.
:return: The details of a single ledger.
:rtype: dict
"""
endpoint = '/ledgers/{ledger_id}'.format(ledger_id=ledger_id)
return self.query(endpoint) | def function[ledger, parameter[self, ledger_id]]:
constant[The ledger details endpoint provides information on a single ledger.
`GET /ledgers/{sequence}
<https://www.stellar.org/developers/horizon/reference/endpoints/ledgers-single.html>`_
:param int ledger_id: The id of the ledger to look up.
:return: The details of a single ledger.
:rtype: dict
]
variable[endpoint] assign[=] call[constant[/ledgers/{ledger_id}].format, parameter[]]
return[call[name[self].query, parameter[name[endpoint]]]] | keyword[def] identifier[ledger] ( identifier[self] , identifier[ledger_id] ):
literal[string]
identifier[endpoint] = literal[string] . identifier[format] ( identifier[ledger_id] = identifier[ledger_id] )
keyword[return] identifier[self] . identifier[query] ( identifier[endpoint] ) | def ledger(self, ledger_id):
"""The ledger details endpoint provides information on a single ledger.
`GET /ledgers/{sequence}
<https://www.stellar.org/developers/horizon/reference/endpoints/ledgers-single.html>`_
:param int ledger_id: The id of the ledger to look up.
:return: The details of a single ledger.
:rtype: dict
"""
endpoint = '/ledgers/{ledger_id}'.format(ledger_id=ledger_id)
return self.query(endpoint) |
def _parse_file(self):
"""Preprocess and parse C file into an AST"""
# We need to set the CPU type to pull in the right register definitions
# only preprocess the file (-E) and get rid of gcc extensions that aren't
# supported in ISO C.
args = utilities.build_includes(self.arch.includes())
# args.append('-mcpu=%s' % self.arch.property('chip'))
args.append('-E')
args.append('-D__attribute__(x)=')
args.append('-D__extension__=')
self.ast = parse_file(self.filepath, use_cpp=True, cpp_path='arm-none-eabi-gcc', cpp_args=args) | def function[_parse_file, parameter[self]]:
constant[Preprocess and parse C file into an AST]
variable[args] assign[=] call[name[utilities].build_includes, parameter[call[name[self].arch.includes, parameter[]]]]
call[name[args].append, parameter[constant[-E]]]
call[name[args].append, parameter[constant[-D__attribute__(x)=]]]
call[name[args].append, parameter[constant[-D__extension__=]]]
name[self].ast assign[=] call[name[parse_file], parameter[name[self].filepath]] | keyword[def] identifier[_parse_file] ( identifier[self] ):
literal[string]
identifier[args] = identifier[utilities] . identifier[build_includes] ( identifier[self] . identifier[arch] . identifier[includes] ())
identifier[args] . identifier[append] ( literal[string] )
identifier[args] . identifier[append] ( literal[string] )
identifier[args] . identifier[append] ( literal[string] )
identifier[self] . identifier[ast] = identifier[parse_file] ( identifier[self] . identifier[filepath] , identifier[use_cpp] = keyword[True] , identifier[cpp_path] = literal[string] , identifier[cpp_args] = identifier[args] ) | def _parse_file(self):
"""Preprocess and parse C file into an AST"""
# We need to set the CPU type to pull in the right register definitions
# only preprocess the file (-E) and get rid of gcc extensions that aren't
# supported in ISO C.
args = utilities.build_includes(self.arch.includes())
# args.append('-mcpu=%s' % self.arch.property('chip'))
args.append('-E')
args.append('-D__attribute__(x)=')
args.append('-D__extension__=')
self.ast = parse_file(self.filepath, use_cpp=True, cpp_path='arm-none-eabi-gcc', cpp_args=args) |
def is_py_script(filename):
"Returns True if a file is a python executable."
if not os.path.exists(filename) and os.path.isfile(filename):
return False
elif filename.endswith(".py"):
return True
elif not os.access(filename, os.X_OK):
return False
else:
try:
with open(filename, "r") as fp:
first_line = fp.readline().strip()
return "#!" in first_line and "python" in first_line
except StopIteration:
return False | def function[is_py_script, parameter[filename]]:
constant[Returns True if a file is a python executable.]
if <ast.BoolOp object at 0x7da1b09ba7a0> begin[:]
return[constant[False]] | keyword[def] identifier[is_py_script] ( identifier[filename] ):
literal[string]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[filename] ) keyword[and] identifier[os] . identifier[path] . identifier[isfile] ( identifier[filename] ):
keyword[return] keyword[False]
keyword[elif] identifier[filename] . identifier[endswith] ( literal[string] ):
keyword[return] keyword[True]
keyword[elif] keyword[not] identifier[os] . identifier[access] ( identifier[filename] , identifier[os] . identifier[X_OK] ):
keyword[return] keyword[False]
keyword[else] :
keyword[try] :
keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[fp] :
identifier[first_line] = identifier[fp] . identifier[readline] (). identifier[strip] ()
keyword[return] literal[string] keyword[in] identifier[first_line] keyword[and] literal[string] keyword[in] identifier[first_line]
keyword[except] identifier[StopIteration] :
keyword[return] keyword[False] | def is_py_script(filename):
"""Returns True if a file is a python executable."""
if not os.path.exists(filename) and os.path.isfile(filename):
return False # depends on [control=['if'], data=[]]
elif filename.endswith('.py'):
return True # depends on [control=['if'], data=[]]
elif not os.access(filename, os.X_OK):
return False # depends on [control=['if'], data=[]]
else:
try:
with open(filename, 'r') as fp:
first_line = fp.readline().strip() # depends on [control=['with'], data=['fp']]
return '#!' in first_line and 'python' in first_line # depends on [control=['try'], data=[]]
except StopIteration:
return False # depends on [control=['except'], data=[]] |
def to_binary(self):
"""Convert N-ary operators to binary operators."""
node = self.node.to_binary()
if node is self.node:
return self
else:
return _expr(node) | def function[to_binary, parameter[self]]:
constant[Convert N-ary operators to binary operators.]
variable[node] assign[=] call[name[self].node.to_binary, parameter[]]
if compare[name[node] is name[self].node] begin[:]
return[name[self]] | keyword[def] identifier[to_binary] ( identifier[self] ):
literal[string]
identifier[node] = identifier[self] . identifier[node] . identifier[to_binary] ()
keyword[if] identifier[node] keyword[is] identifier[self] . identifier[node] :
keyword[return] identifier[self]
keyword[else] :
keyword[return] identifier[_expr] ( identifier[node] ) | def to_binary(self):
"""Convert N-ary operators to binary operators."""
node = self.node.to_binary()
if node is self.node:
return self # depends on [control=['if'], data=[]]
else:
return _expr(node) |
def decompress(data, compression, width, height, depth, version=1):
"""Decompress raw data.
:param data: compressed data bytes.
:param compression: compression type,
see :py:class:`~psd_tools.constants.Compression`.
:param width: width.
:param height: height.
:param depth: bit depth of the pixel.
:param version: psd file version.
:return: decompressed data bytes.
"""
length = width * height * depth // 8
result = None
if compression == Compression.RAW:
result = data[:length]
elif compression == Compression.PACK_BITS:
result = decode_packbits(data, height, version)
elif compression == Compression.ZIP:
result = zlib.decompress(data)
else:
decompressed = zlib.decompress(data)
result = decode_prediction(decompressed, width, height, depth)
assert len(result) == length, 'len=%d, expected=%d' % (
len(result), length
)
return result | def function[decompress, parameter[data, compression, width, height, depth, version]]:
constant[Decompress raw data.
:param data: compressed data bytes.
:param compression: compression type,
see :py:class:`~psd_tools.constants.Compression`.
:param width: width.
:param height: height.
:param depth: bit depth of the pixel.
:param version: psd file version.
:return: decompressed data bytes.
]
variable[length] assign[=] binary_operation[binary_operation[binary_operation[name[width] * name[height]] * name[depth]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[8]]
variable[result] assign[=] constant[None]
if compare[name[compression] equal[==] name[Compression].RAW] begin[:]
variable[result] assign[=] call[name[data]][<ast.Slice object at 0x7da20e9b2590>]
assert[compare[call[name[len], parameter[name[result]]] equal[==] name[length]]]
return[name[result]] | keyword[def] identifier[decompress] ( identifier[data] , identifier[compression] , identifier[width] , identifier[height] , identifier[depth] , identifier[version] = literal[int] ):
literal[string]
identifier[length] = identifier[width] * identifier[height] * identifier[depth] // literal[int]
identifier[result] = keyword[None]
keyword[if] identifier[compression] == identifier[Compression] . identifier[RAW] :
identifier[result] = identifier[data] [: identifier[length] ]
keyword[elif] identifier[compression] == identifier[Compression] . identifier[PACK_BITS] :
identifier[result] = identifier[decode_packbits] ( identifier[data] , identifier[height] , identifier[version] )
keyword[elif] identifier[compression] == identifier[Compression] . identifier[ZIP] :
identifier[result] = identifier[zlib] . identifier[decompress] ( identifier[data] )
keyword[else] :
identifier[decompressed] = identifier[zlib] . identifier[decompress] ( identifier[data] )
identifier[result] = identifier[decode_prediction] ( identifier[decompressed] , identifier[width] , identifier[height] , identifier[depth] )
keyword[assert] identifier[len] ( identifier[result] )== identifier[length] , literal[string] %(
identifier[len] ( identifier[result] ), identifier[length]
)
keyword[return] identifier[result] | def decompress(data, compression, width, height, depth, version=1):
"""Decompress raw data.
:param data: compressed data bytes.
:param compression: compression type,
see :py:class:`~psd_tools.constants.Compression`.
:param width: width.
:param height: height.
:param depth: bit depth of the pixel.
:param version: psd file version.
:return: decompressed data bytes.
"""
length = width * height * depth // 8
result = None
if compression == Compression.RAW:
result = data[:length] # depends on [control=['if'], data=[]]
elif compression == Compression.PACK_BITS:
result = decode_packbits(data, height, version) # depends on [control=['if'], data=[]]
elif compression == Compression.ZIP:
result = zlib.decompress(data) # depends on [control=['if'], data=[]]
else:
decompressed = zlib.decompress(data)
result = decode_prediction(decompressed, width, height, depth)
assert len(result) == length, 'len=%d, expected=%d' % (len(result), length)
return result |
def conditional_write(strm, fmt, value, *args, **kwargs):
"""Write to stream using fmt and value if value is not None"""
if value is not None:
strm.write(fmt.format(value, *args, **kwargs)) | def function[conditional_write, parameter[strm, fmt, value]]:
constant[Write to stream using fmt and value if value is not None]
if compare[name[value] is_not constant[None]] begin[:]
call[name[strm].write, parameter[call[name[fmt].format, parameter[name[value], <ast.Starred object at 0x7da1b1104160>]]]] | keyword[def] identifier[conditional_write] ( identifier[strm] , identifier[fmt] , identifier[value] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] :
identifier[strm] . identifier[write] ( identifier[fmt] . identifier[format] ( identifier[value] ,* identifier[args] ,** identifier[kwargs] )) | def conditional_write(strm, fmt, value, *args, **kwargs):
"""Write to stream using fmt and value if value is not None"""
if value is not None:
strm.write(fmt.format(value, *args, **kwargs)) # depends on [control=['if'], data=['value']] |
def get_tokens(max_value):
"""Defines tokens.
Args:
max_value: the maximum numeric range for the token.
Returns:
list of string tokens in vocabulary.
"""
vocab = [str(i) for i in range(max_value)]
vocab = set(vocab)
vocab.update(CodeOp.LITERALS)
vocab.update(CodeOp.KEYWORDS)
vocab |= set("".join(vocab))
return sorted(vocab) | def function[get_tokens, parameter[max_value]]:
constant[Defines tokens.
Args:
max_value: the maximum numeric range for the token.
Returns:
list of string tokens in vocabulary.
]
variable[vocab] assign[=] <ast.ListComp object at 0x7da1b1ff49d0>
variable[vocab] assign[=] call[name[set], parameter[name[vocab]]]
call[name[vocab].update, parameter[name[CodeOp].LITERALS]]
call[name[vocab].update, parameter[name[CodeOp].KEYWORDS]]
<ast.AugAssign object at 0x7da1b1c6bb50>
return[call[name[sorted], parameter[name[vocab]]]] | keyword[def] identifier[get_tokens] ( identifier[max_value] ):
literal[string]
identifier[vocab] =[ identifier[str] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[max_value] )]
identifier[vocab] = identifier[set] ( identifier[vocab] )
identifier[vocab] . identifier[update] ( identifier[CodeOp] . identifier[LITERALS] )
identifier[vocab] . identifier[update] ( identifier[CodeOp] . identifier[KEYWORDS] )
identifier[vocab] |= identifier[set] ( literal[string] . identifier[join] ( identifier[vocab] ))
keyword[return] identifier[sorted] ( identifier[vocab] ) | def get_tokens(max_value):
"""Defines tokens.
Args:
max_value: the maximum numeric range for the token.
Returns:
list of string tokens in vocabulary.
"""
vocab = [str(i) for i in range(max_value)]
vocab = set(vocab)
vocab.update(CodeOp.LITERALS)
vocab.update(CodeOp.KEYWORDS)
vocab |= set(''.join(vocab))
return sorted(vocab) |
def get_setting(name):
'''
Get the current configuration for the named audit setting
Args:
name (str): The name of the setting to retrieve
Returns:
str: The current configuration for the named setting
Raises:
KeyError: On invalid setting name
CommandExecutionError: If an error is encountered retrieving the settings
Usage:
.. code-block:: python
import salt.utils.win_lgpo_auditpol
# Get current state of the "Credential Validation" setting
salt.utils.win_lgpo_auditpol.get_setting(name='Credential Validation')
'''
current_settings = get_settings(category='All')
for setting in current_settings:
if name.lower() == setting.lower():
return current_settings[setting]
raise KeyError('Invalid name: {0}'.format(name)) | def function[get_setting, parameter[name]]:
constant[
Get the current configuration for the named audit setting
Args:
name (str): The name of the setting to retrieve
Returns:
str: The current configuration for the named setting
Raises:
KeyError: On invalid setting name
CommandExecutionError: If an error is encountered retrieving the settings
Usage:
.. code-block:: python
import salt.utils.win_lgpo_auditpol
# Get current state of the "Credential Validation" setting
salt.utils.win_lgpo_auditpol.get_setting(name='Credential Validation')
]
variable[current_settings] assign[=] call[name[get_settings], parameter[]]
for taget[name[setting]] in starred[name[current_settings]] begin[:]
if compare[call[name[name].lower, parameter[]] equal[==] call[name[setting].lower, parameter[]]] begin[:]
return[call[name[current_settings]][name[setting]]]
<ast.Raise object at 0x7da18f58fbb0> | keyword[def] identifier[get_setting] ( identifier[name] ):
literal[string]
identifier[current_settings] = identifier[get_settings] ( identifier[category] = literal[string] )
keyword[for] identifier[setting] keyword[in] identifier[current_settings] :
keyword[if] identifier[name] . identifier[lower] ()== identifier[setting] . identifier[lower] ():
keyword[return] identifier[current_settings] [ identifier[setting] ]
keyword[raise] identifier[KeyError] ( literal[string] . identifier[format] ( identifier[name] )) | def get_setting(name):
"""
Get the current configuration for the named audit setting
Args:
name (str): The name of the setting to retrieve
Returns:
str: The current configuration for the named setting
Raises:
KeyError: On invalid setting name
CommandExecutionError: If an error is encountered retrieving the settings
Usage:
.. code-block:: python
import salt.utils.win_lgpo_auditpol
# Get current state of the "Credential Validation" setting
salt.utils.win_lgpo_auditpol.get_setting(name='Credential Validation')
"""
current_settings = get_settings(category='All')
for setting in current_settings:
if name.lower() == setting.lower():
return current_settings[setting] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['setting']]
raise KeyError('Invalid name: {0}'.format(name)) |
def ZernikeSequence(x,y,maxRadial=5,epsilon=1e-10):
"""
Return Zernike values at a given set of x,y coords up to some maximum
radial order
"""
if maxRadial>5:
raise ValueError('Code for higher radial orders not implemented')
# Derive radius and exp(i*theta)
temp = x + 1j*y
r1 = np.abs(temp)
e1 = temp/(r1+epsilon)
# Generate powers of r recursively
r2 = r1*r1
r3 = r2*r1
r4 = r3*r1
r5 = r4*r1
# Generate cos and sin terms recursively from exp(i*theta)
e2 = e1*e1
e3 = e2*e1
e4 = e3*e1
e5 = e4*e1
ctheta = e1.real
stheta = e1.imag
c2theta = e2.real
s2theta = e2.imag
c3theta = e3.real
s3theta = e3.imag
c4theta = e4.real
s4theta = e4.imag
c5theta = e5.real
s5theta = e5.imag
# Generate all the zernikes
zernike = np.zeros((21,)+x.shape )
zernike[0] = 1.0
zernike[1] = 2.0*r1*ctheta
zernike[2] = 2.0*r1*stheta
zernike[3] = sqrt(3.0)*(2.0*r2 - 1.0)
zernike[4] = sqrt(6.0)*r2*s2theta
zernike[5] = sqrt(6.0)*r2*c2theta
zernike[6] = sqrt(8.0)*(3.0*r3 - 2.0*r1)*stheta
zernike[7] = sqrt(8.0)*(3.0*r3 - 2.0*r1)*ctheta
zernike[8] = sqrt(8.0)*r3*s3theta
zernike[9] = sqrt(8.0)*r3*c3theta
zernike[10] = sqrt(5.0)*(6.*r4 - 6.*r2 + 1.)
zernike[11] = sqrt(10.)*(4.*r4 - 3.*r2)*c2theta
zernike[12] = sqrt(10.)*(4.*r4 - 3.*r2)*s2theta
zernike[13] = sqrt(10.)*r4*c4theta
zernike[14] = sqrt(10.)*r4*s4theta
zernike[15] = sqrt(12.)*(10*r5-12*r3+3*r1)*ctheta
zernike[16] = sqrt(12.)*(10*r5-12*r3+3*r1)*stheta
zernike[17] = sqrt(12.)*(5*r5-4*r3)*c3theta
zernike[18] = sqrt(12.)*(5*r5-4*r3)*s3theta
zernike[19] = sqrt(12.)*r5*c5theta
zernike[20] = sqrt(12.)*r5*s5theta
# Make zernike zero outside unit circle (useful for dot product)
zernike = zernike*np.less_equal(r1, 1.0)
return(zernike[:NumZernike(maxRadial)]) | def function[ZernikeSequence, parameter[x, y, maxRadial, epsilon]]:
constant[
Return Zernike values at a given set of x,y coords up to some maximum
radial order
]
if compare[name[maxRadial] greater[>] constant[5]] begin[:]
<ast.Raise object at 0x7da18f7221a0>
variable[temp] assign[=] binary_operation[name[x] + binary_operation[constant[1j] * name[y]]]
variable[r1] assign[=] call[name[np].abs, parameter[name[temp]]]
variable[e1] assign[=] binary_operation[name[temp] / binary_operation[name[r1] + name[epsilon]]]
variable[r2] assign[=] binary_operation[name[r1] * name[r1]]
variable[r3] assign[=] binary_operation[name[r2] * name[r1]]
variable[r4] assign[=] binary_operation[name[r3] * name[r1]]
variable[r5] assign[=] binary_operation[name[r4] * name[r1]]
variable[e2] assign[=] binary_operation[name[e1] * name[e1]]
variable[e3] assign[=] binary_operation[name[e2] * name[e1]]
variable[e4] assign[=] binary_operation[name[e3] * name[e1]]
variable[e5] assign[=] binary_operation[name[e4] * name[e1]]
variable[ctheta] assign[=] name[e1].real
variable[stheta] assign[=] name[e1].imag
variable[c2theta] assign[=] name[e2].real
variable[s2theta] assign[=] name[e2].imag
variable[c3theta] assign[=] name[e3].real
variable[s3theta] assign[=] name[e3].imag
variable[c4theta] assign[=] name[e4].real
variable[s4theta] assign[=] name[e4].imag
variable[c5theta] assign[=] name[e5].real
variable[s5theta] assign[=] name[e5].imag
variable[zernike] assign[=] call[name[np].zeros, parameter[binary_operation[tuple[[<ast.Constant object at 0x7da18f721e70>]] + name[x].shape]]]
call[name[zernike]][constant[0]] assign[=] constant[1.0]
call[name[zernike]][constant[1]] assign[=] binary_operation[binary_operation[constant[2.0] * name[r1]] * name[ctheta]]
call[name[zernike]][constant[2]] assign[=] binary_operation[binary_operation[constant[2.0] * name[r1]] * name[stheta]]
call[name[zernike]][constant[3]] assign[=] binary_operation[call[name[sqrt], parameter[constant[3.0]]] * binary_operation[binary_operation[constant[2.0] * name[r2]] - constant[1.0]]]
call[name[zernike]][constant[4]] assign[=] binary_operation[binary_operation[call[name[sqrt], parameter[constant[6.0]]] * name[r2]] * name[s2theta]]
call[name[zernike]][constant[5]] assign[=] binary_operation[binary_operation[call[name[sqrt], parameter[constant[6.0]]] * name[r2]] * name[c2theta]]
call[name[zernike]][constant[6]] assign[=] binary_operation[binary_operation[call[name[sqrt], parameter[constant[8.0]]] * binary_operation[binary_operation[constant[3.0] * name[r3]] - binary_operation[constant[2.0] * name[r1]]]] * name[stheta]]
call[name[zernike]][constant[7]] assign[=] binary_operation[binary_operation[call[name[sqrt], parameter[constant[8.0]]] * binary_operation[binary_operation[constant[3.0] * name[r3]] - binary_operation[constant[2.0] * name[r1]]]] * name[ctheta]]
call[name[zernike]][constant[8]] assign[=] binary_operation[binary_operation[call[name[sqrt], parameter[constant[8.0]]] * name[r3]] * name[s3theta]]
call[name[zernike]][constant[9]] assign[=] binary_operation[binary_operation[call[name[sqrt], parameter[constant[8.0]]] * name[r3]] * name[c3theta]]
call[name[zernike]][constant[10]] assign[=] binary_operation[call[name[sqrt], parameter[constant[5.0]]] * binary_operation[binary_operation[binary_operation[constant[6.0] * name[r4]] - binary_operation[constant[6.0] * name[r2]]] + constant[1.0]]]
call[name[zernike]][constant[11]] assign[=] binary_operation[binary_operation[call[name[sqrt], parameter[constant[10.0]]] * binary_operation[binary_operation[constant[4.0] * name[r4]] - binary_operation[constant[3.0] * name[r2]]]] * name[c2theta]]
call[name[zernike]][constant[12]] assign[=] binary_operation[binary_operation[call[name[sqrt], parameter[constant[10.0]]] * binary_operation[binary_operation[constant[4.0] * name[r4]] - binary_operation[constant[3.0] * name[r2]]]] * name[s2theta]]
call[name[zernike]][constant[13]] assign[=] binary_operation[binary_operation[call[name[sqrt], parameter[constant[10.0]]] * name[r4]] * name[c4theta]]
call[name[zernike]][constant[14]] assign[=] binary_operation[binary_operation[call[name[sqrt], parameter[constant[10.0]]] * name[r4]] * name[s4theta]]
call[name[zernike]][constant[15]] assign[=] binary_operation[binary_operation[call[name[sqrt], parameter[constant[12.0]]] * binary_operation[binary_operation[binary_operation[constant[10] * name[r5]] - binary_operation[constant[12] * name[r3]]] + binary_operation[constant[3] * name[r1]]]] * name[ctheta]]
call[name[zernike]][constant[16]] assign[=] binary_operation[binary_operation[call[name[sqrt], parameter[constant[12.0]]] * binary_operation[binary_operation[binary_operation[constant[10] * name[r5]] - binary_operation[constant[12] * name[r3]]] + binary_operation[constant[3] * name[r1]]]] * name[stheta]]
call[name[zernike]][constant[17]] assign[=] binary_operation[binary_operation[call[name[sqrt], parameter[constant[12.0]]] * binary_operation[binary_operation[constant[5] * name[r5]] - binary_operation[constant[4] * name[r3]]]] * name[c3theta]]
call[name[zernike]][constant[18]] assign[=] binary_operation[binary_operation[call[name[sqrt], parameter[constant[12.0]]] * binary_operation[binary_operation[constant[5] * name[r5]] - binary_operation[constant[4] * name[r3]]]] * name[s3theta]]
call[name[zernike]][constant[19]] assign[=] binary_operation[binary_operation[call[name[sqrt], parameter[constant[12.0]]] * name[r5]] * name[c5theta]]
call[name[zernike]][constant[20]] assign[=] binary_operation[binary_operation[call[name[sqrt], parameter[constant[12.0]]] * name[r5]] * name[s5theta]]
variable[zernike] assign[=] binary_operation[name[zernike] * call[name[np].less_equal, parameter[name[r1], constant[1.0]]]]
return[call[name[zernike]][<ast.Slice object at 0x7da204622e60>]] | keyword[def] identifier[ZernikeSequence] ( identifier[x] , identifier[y] , identifier[maxRadial] = literal[int] , identifier[epsilon] = literal[int] ):
literal[string]
keyword[if] identifier[maxRadial] > literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[temp] = identifier[x] + literal[int] * identifier[y]
identifier[r1] = identifier[np] . identifier[abs] ( identifier[temp] )
identifier[e1] = identifier[temp] /( identifier[r1] + identifier[epsilon] )
identifier[r2] = identifier[r1] * identifier[r1]
identifier[r3] = identifier[r2] * identifier[r1]
identifier[r4] = identifier[r3] * identifier[r1]
identifier[r5] = identifier[r4] * identifier[r1]
identifier[e2] = identifier[e1] * identifier[e1]
identifier[e3] = identifier[e2] * identifier[e1]
identifier[e4] = identifier[e3] * identifier[e1]
identifier[e5] = identifier[e4] * identifier[e1]
identifier[ctheta] = identifier[e1] . identifier[real]
identifier[stheta] = identifier[e1] . identifier[imag]
identifier[c2theta] = identifier[e2] . identifier[real]
identifier[s2theta] = identifier[e2] . identifier[imag]
identifier[c3theta] = identifier[e3] . identifier[real]
identifier[s3theta] = identifier[e3] . identifier[imag]
identifier[c4theta] = identifier[e4] . identifier[real]
identifier[s4theta] = identifier[e4] . identifier[imag]
identifier[c5theta] = identifier[e5] . identifier[real]
identifier[s5theta] = identifier[e5] . identifier[imag]
identifier[zernike] = identifier[np] . identifier[zeros] (( literal[int] ,)+ identifier[x] . identifier[shape] )
identifier[zernike] [ literal[int] ]= literal[int]
identifier[zernike] [ literal[int] ]= literal[int] * identifier[r1] * identifier[ctheta]
identifier[zernike] [ literal[int] ]= literal[int] * identifier[r1] * identifier[stheta]
identifier[zernike] [ literal[int] ]= identifier[sqrt] ( literal[int] )*( literal[int] * identifier[r2] - literal[int] )
identifier[zernike] [ literal[int] ]= identifier[sqrt] ( literal[int] )* identifier[r2] * identifier[s2theta]
identifier[zernike] [ literal[int] ]= identifier[sqrt] ( literal[int] )* identifier[r2] * identifier[c2theta]
identifier[zernike] [ literal[int] ]= identifier[sqrt] ( literal[int] )*( literal[int] * identifier[r3] - literal[int] * identifier[r1] )* identifier[stheta]
identifier[zernike] [ literal[int] ]= identifier[sqrt] ( literal[int] )*( literal[int] * identifier[r3] - literal[int] * identifier[r1] )* identifier[ctheta]
identifier[zernike] [ literal[int] ]= identifier[sqrt] ( literal[int] )* identifier[r3] * identifier[s3theta]
identifier[zernike] [ literal[int] ]= identifier[sqrt] ( literal[int] )* identifier[r3] * identifier[c3theta]
identifier[zernike] [ literal[int] ]= identifier[sqrt] ( literal[int] )*( literal[int] * identifier[r4] - literal[int] * identifier[r2] + literal[int] )
identifier[zernike] [ literal[int] ]= identifier[sqrt] ( literal[int] )*( literal[int] * identifier[r4] - literal[int] * identifier[r2] )* identifier[c2theta]
identifier[zernike] [ literal[int] ]= identifier[sqrt] ( literal[int] )*( literal[int] * identifier[r4] - literal[int] * identifier[r2] )* identifier[s2theta]
identifier[zernike] [ literal[int] ]= identifier[sqrt] ( literal[int] )* identifier[r4] * identifier[c4theta]
identifier[zernike] [ literal[int] ]= identifier[sqrt] ( literal[int] )* identifier[r4] * identifier[s4theta]
identifier[zernike] [ literal[int] ]= identifier[sqrt] ( literal[int] )*( literal[int] * identifier[r5] - literal[int] * identifier[r3] + literal[int] * identifier[r1] )* identifier[ctheta]
identifier[zernike] [ literal[int] ]= identifier[sqrt] ( literal[int] )*( literal[int] * identifier[r5] - literal[int] * identifier[r3] + literal[int] * identifier[r1] )* identifier[stheta]
identifier[zernike] [ literal[int] ]= identifier[sqrt] ( literal[int] )*( literal[int] * identifier[r5] - literal[int] * identifier[r3] )* identifier[c3theta]
identifier[zernike] [ literal[int] ]= identifier[sqrt] ( literal[int] )*( literal[int] * identifier[r5] - literal[int] * identifier[r3] )* identifier[s3theta]
identifier[zernike] [ literal[int] ]= identifier[sqrt] ( literal[int] )* identifier[r5] * identifier[c5theta]
identifier[zernike] [ literal[int] ]= identifier[sqrt] ( literal[int] )* identifier[r5] * identifier[s5theta]
identifier[zernike] = identifier[zernike] * identifier[np] . identifier[less_equal] ( identifier[r1] , literal[int] )
keyword[return] ( identifier[zernike] [: identifier[NumZernike] ( identifier[maxRadial] )]) | def ZernikeSequence(x, y, maxRadial=5, epsilon=1e-10):
"""
Return Zernike values at a given set of x,y coords up to some maximum
radial order
"""
if maxRadial > 5:
raise ValueError('Code for higher radial orders not implemented') # depends on [control=['if'], data=[]] # Derive radius and exp(i*theta)
temp = x + 1j * y
r1 = np.abs(temp)
e1 = temp / (r1 + epsilon)
# Generate powers of r recursively
r2 = r1 * r1
r3 = r2 * r1
r4 = r3 * r1
r5 = r4 * r1
# Generate cos and sin terms recursively from exp(i*theta)
e2 = e1 * e1
e3 = e2 * e1
e4 = e3 * e1
e5 = e4 * e1
ctheta = e1.real
stheta = e1.imag
c2theta = e2.real
s2theta = e2.imag
c3theta = e3.real
s3theta = e3.imag
c4theta = e4.real
s4theta = e4.imag
c5theta = e5.real
s5theta = e5.imag
# Generate all the zernikes
zernike = np.zeros((21,) + x.shape)
zernike[0] = 1.0
zernike[1] = 2.0 * r1 * ctheta
zernike[2] = 2.0 * r1 * stheta
zernike[3] = sqrt(3.0) * (2.0 * r2 - 1.0)
zernike[4] = sqrt(6.0) * r2 * s2theta
zernike[5] = sqrt(6.0) * r2 * c2theta
zernike[6] = sqrt(8.0) * (3.0 * r3 - 2.0 * r1) * stheta
zernike[7] = sqrt(8.0) * (3.0 * r3 - 2.0 * r1) * ctheta
zernike[8] = sqrt(8.0) * r3 * s3theta
zernike[9] = sqrt(8.0) * r3 * c3theta
zernike[10] = sqrt(5.0) * (6.0 * r4 - 6.0 * r2 + 1.0)
zernike[11] = sqrt(10.0) * (4.0 * r4 - 3.0 * r2) * c2theta
zernike[12] = sqrt(10.0) * (4.0 * r4 - 3.0 * r2) * s2theta
zernike[13] = sqrt(10.0) * r4 * c4theta
zernike[14] = sqrt(10.0) * r4 * s4theta
zernike[15] = sqrt(12.0) * (10 * r5 - 12 * r3 + 3 * r1) * ctheta
zernike[16] = sqrt(12.0) * (10 * r5 - 12 * r3 + 3 * r1) * stheta
zernike[17] = sqrt(12.0) * (5 * r5 - 4 * r3) * c3theta
zernike[18] = sqrt(12.0) * (5 * r5 - 4 * r3) * s3theta
zernike[19] = sqrt(12.0) * r5 * c5theta
zernike[20] = sqrt(12.0) * r5 * s5theta
# Make zernike zero outside unit circle (useful for dot product)
zernike = zernike * np.less_equal(r1, 1.0)
return zernike[:NumZernike(maxRadial)] |
def _pop_params(cls, kwargs):
"""
Pop entries from the `kwargs` passed to cls.__new__ based on the values
in `cls.params`.
Parameters
----------
kwargs : dict
The kwargs passed to cls.__new__.
Returns
-------
params : list[(str, object)]
A list of string, value pairs containing the entries in cls.params.
Raises
------
TypeError
Raised if any parameter values are not passed or not hashable.
"""
params = cls.params
if not isinstance(params, Mapping):
params = {k: NotSpecified for k in params}
param_values = []
for key, default_value in params.items():
try:
value = kwargs.pop(key, default_value)
if value is NotSpecified:
raise KeyError(key)
# Check here that the value is hashable so that we fail here
# instead of trying to hash the param values tuple later.
hash(value)
except KeyError:
raise TypeError(
"{typename} expected a keyword parameter {name!r}.".format(
typename=cls.__name__,
name=key
)
)
except TypeError:
# Value wasn't hashable.
raise TypeError(
"{typename} expected a hashable value for parameter "
"{name!r}, but got {value!r} instead.".format(
typename=cls.__name__,
name=key,
value=value,
)
)
param_values.append((key, value))
return tuple(param_values) | def function[_pop_params, parameter[cls, kwargs]]:
constant[
Pop entries from the `kwargs` passed to cls.__new__ based on the values
in `cls.params`.
Parameters
----------
kwargs : dict
The kwargs passed to cls.__new__.
Returns
-------
params : list[(str, object)]
A list of string, value pairs containing the entries in cls.params.
Raises
------
TypeError
Raised if any parameter values are not passed or not hashable.
]
variable[params] assign[=] name[cls].params
if <ast.UnaryOp object at 0x7da1b1ea3400> begin[:]
variable[params] assign[=] <ast.DictComp object at 0x7da1b1ea17b0>
variable[param_values] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b1ea15d0>, <ast.Name object at 0x7da1b1ea1870>]]] in starred[call[name[params].items, parameter[]]] begin[:]
<ast.Try object at 0x7da1b1ea1540>
call[name[param_values].append, parameter[tuple[[<ast.Name object at 0x7da1b1e8f340>, <ast.Name object at 0x7da1b1e8e620>]]]]
return[call[name[tuple], parameter[name[param_values]]]] | keyword[def] identifier[_pop_params] ( identifier[cls] , identifier[kwargs] ):
literal[string]
identifier[params] = identifier[cls] . identifier[params]
keyword[if] keyword[not] identifier[isinstance] ( identifier[params] , identifier[Mapping] ):
identifier[params] ={ identifier[k] : identifier[NotSpecified] keyword[for] identifier[k] keyword[in] identifier[params] }
identifier[param_values] =[]
keyword[for] identifier[key] , identifier[default_value] keyword[in] identifier[params] . identifier[items] ():
keyword[try] :
identifier[value] = identifier[kwargs] . identifier[pop] ( identifier[key] , identifier[default_value] )
keyword[if] identifier[value] keyword[is] identifier[NotSpecified] :
keyword[raise] identifier[KeyError] ( identifier[key] )
identifier[hash] ( identifier[value] )
keyword[except] identifier[KeyError] :
keyword[raise] identifier[TypeError] (
literal[string] . identifier[format] (
identifier[typename] = identifier[cls] . identifier[__name__] ,
identifier[name] = identifier[key]
)
)
keyword[except] identifier[TypeError] :
keyword[raise] identifier[TypeError] (
literal[string]
literal[string] . identifier[format] (
identifier[typename] = identifier[cls] . identifier[__name__] ,
identifier[name] = identifier[key] ,
identifier[value] = identifier[value] ,
)
)
identifier[param_values] . identifier[append] (( identifier[key] , identifier[value] ))
keyword[return] identifier[tuple] ( identifier[param_values] ) | def _pop_params(cls, kwargs):
"""
Pop entries from the `kwargs` passed to cls.__new__ based on the values
in `cls.params`.
Parameters
----------
kwargs : dict
The kwargs passed to cls.__new__.
Returns
-------
params : list[(str, object)]
A list of string, value pairs containing the entries in cls.params.
Raises
------
TypeError
Raised if any parameter values are not passed or not hashable.
"""
params = cls.params
if not isinstance(params, Mapping):
params = {k: NotSpecified for k in params} # depends on [control=['if'], data=[]]
param_values = []
for (key, default_value) in params.items():
try:
value = kwargs.pop(key, default_value)
if value is NotSpecified:
raise KeyError(key) # depends on [control=['if'], data=[]]
# Check here that the value is hashable so that we fail here
# instead of trying to hash the param values tuple later.
hash(value) # depends on [control=['try'], data=[]]
except KeyError:
raise TypeError('{typename} expected a keyword parameter {name!r}.'.format(typename=cls.__name__, name=key)) # depends on [control=['except'], data=[]]
except TypeError:
# Value wasn't hashable.
raise TypeError('{typename} expected a hashable value for parameter {name!r}, but got {value!r} instead.'.format(typename=cls.__name__, name=key, value=value)) # depends on [control=['except'], data=[]]
param_values.append((key, value)) # depends on [control=['for'], data=[]]
return tuple(param_values) |
def get_magsymops(self, data):
"""
Equivalent to get_symops except for magnetic symmetry groups.
Separate function since additional operation for time reversal symmetry
(which changes magnetic moments on sites) needs to be returned.
"""
magsymmops = []
# check to see if magCIF file explicitly contains magnetic symmetry operations
if data.data.get("_space_group_symop_magn_operation.xyz"):
xyzt = data.data.get("_space_group_symop_magn_operation.xyz")
if isinstance(xyzt, str):
xyzt = [xyzt]
magsymmops = [MagSymmOp.from_xyzt_string(s) for s in xyzt]
if data.data.get("_space_group_symop_magn_centering.xyz"):
xyzt = data.data.get("_space_group_symop_magn_centering.xyz")
if isinstance(xyzt, str):
xyzt = [xyzt]
centering_symops = [MagSymmOp.from_xyzt_string(s) for s in xyzt]
all_ops = []
for op in magsymmops:
for centering_op in centering_symops:
new_translation = [i - np.floor(i) for i
in
op.translation_vector + centering_op.translation_vector]
new_time_reversal = op.time_reversal * centering_op.time_reversal
all_ops.append(
MagSymmOp.from_rotation_and_translation_and_time_reversal(
rotation_matrix=op.rotation_matrix,
translation_vec=new_translation,
time_reversal=new_time_reversal))
magsymmops = all_ops
# else check to see if it specifies a magnetic space group
elif data.data.get("_space_group_magn.name_BNS") or data.data.get(
"_space_group_magn.number_BNS"):
if data.data.get("_space_group_magn.name_BNS"):
# get BNS label for MagneticSpaceGroup()
id = data.data.get("_space_group_magn.name_BNS")
else:
# get BNS number for MagneticSpaceGroup()
# by converting string to list of ints
id = list(map(int, (
data.data.get("_space_group_magn.number_BNS").split("."))))
msg = MagneticSpaceGroup(id)
if data.data.get("_space_group_magn.transform_BNS_Pp_abc"):
if data.data.get(
"_space_group_magn.transform_BNS_Pp_abc") != "a,b,c;0,0,0":
return NotImplementedError(
"Non-standard settings not currently supported.")
elif data.data.get("_space_group_magn.transform_BNS_Pp"):
return NotImplementedError(
"Incomplete specification to implement.")
magsymmops = msg.symmetry_ops
if not magsymmops:
msg = "No magnetic symmetry detected, using primitive symmetry."
warnings.warn(msg)
self.errors.append(msg)
magsymmops = [MagSymmOp.from_xyzt_string("x, y, z, 1")]
return magsymmops | def function[get_magsymops, parameter[self, data]]:
constant[
Equivalent to get_symops except for magnetic symmetry groups.
Separate function since additional operation for time reversal symmetry
(which changes magnetic moments on sites) needs to be returned.
]
variable[magsymmops] assign[=] list[[]]
if call[name[data].data.get, parameter[constant[_space_group_symop_magn_operation.xyz]]] begin[:]
variable[xyzt] assign[=] call[name[data].data.get, parameter[constant[_space_group_symop_magn_operation.xyz]]]
if call[name[isinstance], parameter[name[xyzt], name[str]]] begin[:]
variable[xyzt] assign[=] list[[<ast.Name object at 0x7da1b1c379a0>]]
variable[magsymmops] assign[=] <ast.ListComp object at 0x7da1b1c34040>
if call[name[data].data.get, parameter[constant[_space_group_symop_magn_centering.xyz]]] begin[:]
variable[xyzt] assign[=] call[name[data].data.get, parameter[constant[_space_group_symop_magn_centering.xyz]]]
if call[name[isinstance], parameter[name[xyzt], name[str]]] begin[:]
variable[xyzt] assign[=] list[[<ast.Name object at 0x7da1b1c34610>]]
variable[centering_symops] assign[=] <ast.ListComp object at 0x7da1b1c346a0>
variable[all_ops] assign[=] list[[]]
for taget[name[op]] in starred[name[magsymmops]] begin[:]
for taget[name[centering_op]] in starred[name[centering_symops]] begin[:]
variable[new_translation] assign[=] <ast.ListComp object at 0x7da1b1c34a60>
variable[new_time_reversal] assign[=] binary_operation[name[op].time_reversal * name[centering_op].time_reversal]
call[name[all_ops].append, parameter[call[name[MagSymmOp].from_rotation_and_translation_and_time_reversal, parameter[]]]]
variable[magsymmops] assign[=] name[all_ops]
if <ast.UnaryOp object at 0x7da18f09d4e0> begin[:]
variable[msg] assign[=] constant[No magnetic symmetry detected, using primitive symmetry.]
call[name[warnings].warn, parameter[name[msg]]]
call[name[self].errors.append, parameter[name[msg]]]
variable[magsymmops] assign[=] list[[<ast.Call object at 0x7da1b1c6ab30>]]
return[name[magsymmops]] | keyword[def] identifier[get_magsymops] ( identifier[self] , identifier[data] ):
literal[string]
identifier[magsymmops] =[]
keyword[if] identifier[data] . identifier[data] . identifier[get] ( literal[string] ):
identifier[xyzt] = identifier[data] . identifier[data] . identifier[get] ( literal[string] )
keyword[if] identifier[isinstance] ( identifier[xyzt] , identifier[str] ):
identifier[xyzt] =[ identifier[xyzt] ]
identifier[magsymmops] =[ identifier[MagSymmOp] . identifier[from_xyzt_string] ( identifier[s] ) keyword[for] identifier[s] keyword[in] identifier[xyzt] ]
keyword[if] identifier[data] . identifier[data] . identifier[get] ( literal[string] ):
identifier[xyzt] = identifier[data] . identifier[data] . identifier[get] ( literal[string] )
keyword[if] identifier[isinstance] ( identifier[xyzt] , identifier[str] ):
identifier[xyzt] =[ identifier[xyzt] ]
identifier[centering_symops] =[ identifier[MagSymmOp] . identifier[from_xyzt_string] ( identifier[s] ) keyword[for] identifier[s] keyword[in] identifier[xyzt] ]
identifier[all_ops] =[]
keyword[for] identifier[op] keyword[in] identifier[magsymmops] :
keyword[for] identifier[centering_op] keyword[in] identifier[centering_symops] :
identifier[new_translation] =[ identifier[i] - identifier[np] . identifier[floor] ( identifier[i] ) keyword[for] identifier[i]
keyword[in]
identifier[op] . identifier[translation_vector] + identifier[centering_op] . identifier[translation_vector] ]
identifier[new_time_reversal] = identifier[op] . identifier[time_reversal] * identifier[centering_op] . identifier[time_reversal]
identifier[all_ops] . identifier[append] (
identifier[MagSymmOp] . identifier[from_rotation_and_translation_and_time_reversal] (
identifier[rotation_matrix] = identifier[op] . identifier[rotation_matrix] ,
identifier[translation_vec] = identifier[new_translation] ,
identifier[time_reversal] = identifier[new_time_reversal] ))
identifier[magsymmops] = identifier[all_ops]
keyword[elif] identifier[data] . identifier[data] . identifier[get] ( literal[string] ) keyword[or] identifier[data] . identifier[data] . identifier[get] (
literal[string] ):
keyword[if] identifier[data] . identifier[data] . identifier[get] ( literal[string] ):
identifier[id] = identifier[data] . identifier[data] . identifier[get] ( literal[string] )
keyword[else] :
identifier[id] = identifier[list] ( identifier[map] ( identifier[int] ,(
identifier[data] . identifier[data] . identifier[get] ( literal[string] ). identifier[split] ( literal[string] ))))
identifier[msg] = identifier[MagneticSpaceGroup] ( identifier[id] )
keyword[if] identifier[data] . identifier[data] . identifier[get] ( literal[string] ):
keyword[if] identifier[data] . identifier[data] . identifier[get] (
literal[string] )!= literal[string] :
keyword[return] identifier[NotImplementedError] (
literal[string] )
keyword[elif] identifier[data] . identifier[data] . identifier[get] ( literal[string] ):
keyword[return] identifier[NotImplementedError] (
literal[string] )
identifier[magsymmops] = identifier[msg] . identifier[symmetry_ops]
keyword[if] keyword[not] identifier[magsymmops] :
identifier[msg] = literal[string]
identifier[warnings] . identifier[warn] ( identifier[msg] )
identifier[self] . identifier[errors] . identifier[append] ( identifier[msg] )
identifier[magsymmops] =[ identifier[MagSymmOp] . identifier[from_xyzt_string] ( literal[string] )]
keyword[return] identifier[magsymmops] | def get_magsymops(self, data):
"""
Equivalent to get_symops except for magnetic symmetry groups.
Separate function since additional operation for time reversal symmetry
(which changes magnetic moments on sites) needs to be returned.
"""
magsymmops = []
# check to see if magCIF file explicitly contains magnetic symmetry operations
if data.data.get('_space_group_symop_magn_operation.xyz'):
xyzt = data.data.get('_space_group_symop_magn_operation.xyz')
if isinstance(xyzt, str):
xyzt = [xyzt] # depends on [control=['if'], data=[]]
magsymmops = [MagSymmOp.from_xyzt_string(s) for s in xyzt]
if data.data.get('_space_group_symop_magn_centering.xyz'):
xyzt = data.data.get('_space_group_symop_magn_centering.xyz')
if isinstance(xyzt, str):
xyzt = [xyzt] # depends on [control=['if'], data=[]]
centering_symops = [MagSymmOp.from_xyzt_string(s) for s in xyzt]
all_ops = []
for op in magsymmops:
for centering_op in centering_symops:
new_translation = [i - np.floor(i) for i in op.translation_vector + centering_op.translation_vector]
new_time_reversal = op.time_reversal * centering_op.time_reversal
all_ops.append(MagSymmOp.from_rotation_and_translation_and_time_reversal(rotation_matrix=op.rotation_matrix, translation_vec=new_translation, time_reversal=new_time_reversal)) # depends on [control=['for'], data=['centering_op']] # depends on [control=['for'], data=['op']]
magsymmops = all_ops # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# else check to see if it specifies a magnetic space group
elif data.data.get('_space_group_magn.name_BNS') or data.data.get('_space_group_magn.number_BNS'):
if data.data.get('_space_group_magn.name_BNS'):
# get BNS label for MagneticSpaceGroup()
id = data.data.get('_space_group_magn.name_BNS') # depends on [control=['if'], data=[]]
else:
# get BNS number for MagneticSpaceGroup()
# by converting string to list of ints
id = list(map(int, data.data.get('_space_group_magn.number_BNS').split('.')))
msg = MagneticSpaceGroup(id)
if data.data.get('_space_group_magn.transform_BNS_Pp_abc'):
if data.data.get('_space_group_magn.transform_BNS_Pp_abc') != 'a,b,c;0,0,0':
return NotImplementedError('Non-standard settings not currently supported.') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif data.data.get('_space_group_magn.transform_BNS_Pp'):
return NotImplementedError('Incomplete specification to implement.') # depends on [control=['if'], data=[]]
magsymmops = msg.symmetry_ops # depends on [control=['if'], data=[]]
if not magsymmops:
msg = 'No magnetic symmetry detected, using primitive symmetry.'
warnings.warn(msg)
self.errors.append(msg)
magsymmops = [MagSymmOp.from_xyzt_string('x, y, z, 1')] # depends on [control=['if'], data=[]]
return magsymmops |
def _linearEOM(y,t,pot):
"""
NAME:
linearEOM
PURPOSE:
the one-dimensional equation-of-motion
INPUT:
y - current phase-space position
t - current time
pot - (list of) linearPotential instance(s)
OUTPUT:
dy/dt
HISTORY:
2010-07-13 - Bovy (NYU)
"""
return [y[1],_evaluatelinearForces(pot,y[0],t=t)] | def function[_linearEOM, parameter[y, t, pot]]:
constant[
NAME:
linearEOM
PURPOSE:
the one-dimensional equation-of-motion
INPUT:
y - current phase-space position
t - current time
pot - (list of) linearPotential instance(s)
OUTPUT:
dy/dt
HISTORY:
2010-07-13 - Bovy (NYU)
]
return[list[[<ast.Subscript object at 0x7da1b0e8bf70>, <ast.Call object at 0x7da1b0e8b610>]]] | keyword[def] identifier[_linearEOM] ( identifier[y] , identifier[t] , identifier[pot] ):
literal[string]
keyword[return] [ identifier[y] [ literal[int] ], identifier[_evaluatelinearForces] ( identifier[pot] , identifier[y] [ literal[int] ], identifier[t] = identifier[t] )] | def _linearEOM(y, t, pot):
"""
NAME:
linearEOM
PURPOSE:
the one-dimensional equation-of-motion
INPUT:
y - current phase-space position
t - current time
pot - (list of) linearPotential instance(s)
OUTPUT:
dy/dt
HISTORY:
2010-07-13 - Bovy (NYU)
"""
return [y[1], _evaluatelinearForces(pot, y[0], t=t)] |
def _rlfunc(rl,lz,pot):
"""Function that gives rvc-lz"""
thisvcirc= vcirc(pot,rl,use_physical=False)
return rl*thisvcirc-lz | def function[_rlfunc, parameter[rl, lz, pot]]:
constant[Function that gives rvc-lz]
variable[thisvcirc] assign[=] call[name[vcirc], parameter[name[pot], name[rl]]]
return[binary_operation[binary_operation[name[rl] * name[thisvcirc]] - name[lz]]] | keyword[def] identifier[_rlfunc] ( identifier[rl] , identifier[lz] , identifier[pot] ):
literal[string]
identifier[thisvcirc] = identifier[vcirc] ( identifier[pot] , identifier[rl] , identifier[use_physical] = keyword[False] )
keyword[return] identifier[rl] * identifier[thisvcirc] - identifier[lz] | def _rlfunc(rl, lz, pot):
"""Function that gives rvc-lz"""
thisvcirc = vcirc(pot, rl, use_physical=False)
return rl * thisvcirc - lz |
def write(self,
fout=None,
fmt=SPARSE,
schema_only=False,
data_only=False):
"""
Write an arff structure to a string.
"""
assert not (schema_only and data_only), 'Make up your mind.'
assert fmt in FORMATS, 'Invalid format "%s". Should be one of: %s' % (fmt, ', '.join(FORMATS))
close = False
if fout is None:
close = True
fout = StringIO()
if not data_only:
print('% ' + re.sub("\n", "\n% ", '\n'.join(self.comment)), file=fout)
print("@relation " + self.relation, file=fout)
self.write_attributes(fout=fout)
if not schema_only:
print("@data", file=fout)
for d in self.data:
line_str = self.write_line(d, fmt=fmt)
if line_str:
print(line_str, file=fout)
if isinstance(fout, StringIO) and close:
return fout.getvalue() | def function[write, parameter[self, fout, fmt, schema_only, data_only]]:
constant[
Write an arff structure to a string.
]
assert[<ast.UnaryOp object at 0x7da1b101ce20>]
assert[compare[name[fmt] in name[FORMATS]]]
variable[close] assign[=] constant[False]
if compare[name[fout] is constant[None]] begin[:]
variable[close] assign[=] constant[True]
variable[fout] assign[=] call[name[StringIO], parameter[]]
if <ast.UnaryOp object at 0x7da1b101d9f0> begin[:]
call[name[print], parameter[binary_operation[constant[% ] + call[name[re].sub, parameter[constant[
], constant[
% ], call[constant[
].join, parameter[name[self].comment]]]]]]]
call[name[print], parameter[binary_operation[constant[@relation ] + name[self].relation]]]
call[name[self].write_attributes, parameter[]]
if <ast.UnaryOp object at 0x7da1b1107130> begin[:]
call[name[print], parameter[constant[@data]]]
for taget[name[d]] in starred[name[self].data] begin[:]
variable[line_str] assign[=] call[name[self].write_line, parameter[name[d]]]
if name[line_str] begin[:]
call[name[print], parameter[name[line_str]]]
if <ast.BoolOp object at 0x7da1b11073a0> begin[:]
return[call[name[fout].getvalue, parameter[]]] | keyword[def] identifier[write] ( identifier[self] ,
identifier[fout] = keyword[None] ,
identifier[fmt] = identifier[SPARSE] ,
identifier[schema_only] = keyword[False] ,
identifier[data_only] = keyword[False] ):
literal[string]
keyword[assert] keyword[not] ( identifier[schema_only] keyword[and] identifier[data_only] ), literal[string]
keyword[assert] identifier[fmt] keyword[in] identifier[FORMATS] , literal[string] %( identifier[fmt] , literal[string] . identifier[join] ( identifier[FORMATS] ))
identifier[close] = keyword[False]
keyword[if] identifier[fout] keyword[is] keyword[None] :
identifier[close] = keyword[True]
identifier[fout] = identifier[StringIO] ()
keyword[if] keyword[not] identifier[data_only] :
identifier[print] ( literal[string] + identifier[re] . identifier[sub] ( literal[string] , literal[string] , literal[string] . identifier[join] ( identifier[self] . identifier[comment] )), identifier[file] = identifier[fout] )
identifier[print] ( literal[string] + identifier[self] . identifier[relation] , identifier[file] = identifier[fout] )
identifier[self] . identifier[write_attributes] ( identifier[fout] = identifier[fout] )
keyword[if] keyword[not] identifier[schema_only] :
identifier[print] ( literal[string] , identifier[file] = identifier[fout] )
keyword[for] identifier[d] keyword[in] identifier[self] . identifier[data] :
identifier[line_str] = identifier[self] . identifier[write_line] ( identifier[d] , identifier[fmt] = identifier[fmt] )
keyword[if] identifier[line_str] :
identifier[print] ( identifier[line_str] , identifier[file] = identifier[fout] )
keyword[if] identifier[isinstance] ( identifier[fout] , identifier[StringIO] ) keyword[and] identifier[close] :
keyword[return] identifier[fout] . identifier[getvalue] () | def write(self, fout=None, fmt=SPARSE, schema_only=False, data_only=False):
"""
Write an arff structure to a string.
"""
assert not (schema_only and data_only), 'Make up your mind.'
assert fmt in FORMATS, 'Invalid format "%s". Should be one of: %s' % (fmt, ', '.join(FORMATS))
close = False
if fout is None:
close = True
fout = StringIO() # depends on [control=['if'], data=['fout']]
if not data_only:
print('% ' + re.sub('\n', '\n% ', '\n'.join(self.comment)), file=fout)
print('@relation ' + self.relation, file=fout)
self.write_attributes(fout=fout) # depends on [control=['if'], data=[]]
if not schema_only:
print('@data', file=fout)
for d in self.data:
line_str = self.write_line(d, fmt=fmt)
if line_str:
print(line_str, file=fout) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['d']] # depends on [control=['if'], data=[]]
if isinstance(fout, StringIO) and close:
return fout.getvalue() # depends on [control=['if'], data=[]] |
def do_macro_kwarg(parser, token):
""" Function taking a parsed template tag
to a MacroKwargNode.
"""
try:
tag_name, keyword = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError(
"{0} tag requires exactly one argument, a keyword".format(
token.contents.split()[0]))
# add some validation of the keyword argument here.
nodelist = parser.parse(('endmacro_kwarg',))
parser.delete_first_token()
return MacroKwargNode(keyword, nodelist) | def function[do_macro_kwarg, parameter[parser, token]]:
constant[ Function taking a parsed template tag
to a MacroKwargNode.
]
<ast.Try object at 0x7da207f98d90>
variable[nodelist] assign[=] call[name[parser].parse, parameter[tuple[[<ast.Constant object at 0x7da207f9a6b0>]]]]
call[name[parser].delete_first_token, parameter[]]
return[call[name[MacroKwargNode], parameter[name[keyword], name[nodelist]]]] | keyword[def] identifier[do_macro_kwarg] ( identifier[parser] , identifier[token] ):
literal[string]
keyword[try] :
identifier[tag_name] , identifier[keyword] = identifier[token] . identifier[split_contents] ()
keyword[except] identifier[ValueError] :
keyword[raise] identifier[template] . identifier[TemplateSyntaxError] (
literal[string] . identifier[format] (
identifier[token] . identifier[contents] . identifier[split] ()[ literal[int] ]))
identifier[nodelist] = identifier[parser] . identifier[parse] (( literal[string] ,))
identifier[parser] . identifier[delete_first_token] ()
keyword[return] identifier[MacroKwargNode] ( identifier[keyword] , identifier[nodelist] ) | def do_macro_kwarg(parser, token):
""" Function taking a parsed template tag
to a MacroKwargNode.
"""
try:
(tag_name, keyword) = token.split_contents() # depends on [control=['try'], data=[]]
except ValueError:
raise template.TemplateSyntaxError('{0} tag requires exactly one argument, a keyword'.format(token.contents.split()[0])) # depends on [control=['except'], data=[]]
# add some validation of the keyword argument here.
nodelist = parser.parse(('endmacro_kwarg',))
parser.delete_first_token()
return MacroKwargNode(keyword, nodelist) |
def available_state(self, state: State) -> Tuple[State, ...]:
""" Return the state reachable from a given state. """
result = []
for gene in self.genes:
result.extend(self.available_state_for_gene(gene, state))
if len(result) > 1 and state in result:
result.remove(state)
return tuple(result) | def function[available_state, parameter[self, state]]:
constant[ Return the state reachable from a given state. ]
variable[result] assign[=] list[[]]
for taget[name[gene]] in starred[name[self].genes] begin[:]
call[name[result].extend, parameter[call[name[self].available_state_for_gene, parameter[name[gene], name[state]]]]]
if <ast.BoolOp object at 0x7da207f00b80> begin[:]
call[name[result].remove, parameter[name[state]]]
return[call[name[tuple], parameter[name[result]]]] | keyword[def] identifier[available_state] ( identifier[self] , identifier[state] : identifier[State] )-> identifier[Tuple] [ identifier[State] ,...]:
literal[string]
identifier[result] =[]
keyword[for] identifier[gene] keyword[in] identifier[self] . identifier[genes] :
identifier[result] . identifier[extend] ( identifier[self] . identifier[available_state_for_gene] ( identifier[gene] , identifier[state] ))
keyword[if] identifier[len] ( identifier[result] )> literal[int] keyword[and] identifier[state] keyword[in] identifier[result] :
identifier[result] . identifier[remove] ( identifier[state] )
keyword[return] identifier[tuple] ( identifier[result] ) | def available_state(self, state: State) -> Tuple[State, ...]:
""" Return the state reachable from a given state. """
result = []
for gene in self.genes:
result.extend(self.available_state_for_gene(gene, state)) # depends on [control=['for'], data=['gene']]
if len(result) > 1 and state in result:
result.remove(state) # depends on [control=['if'], data=[]]
return tuple(result) |
def learn_q(self, predicted_q_arr, real_q_arr):
'''
Infernce Q-Value.
Args:
predicted_q_arr: `np.ndarray` of predicted Q-Values.
real_q_arr: `np.ndarray` of real Q-Values.
'''
"""
if self.__q_shape is None:
raise ValueError("Before learning, You should execute `__inference_q`.")
"""
loss = self.__computable_loss.compute_loss(predicted_q_arr, real_q_arr)
delta_arr = self.__computable_loss.compute_delta(predicted_q_arr, real_q_arr)
delta_arr = self.__cnn.back_propagation(delta_arr)
self.__cnn.optimize(self.__learning_rate, 1)
self.__loss_list.append(loss) | def function[learn_q, parameter[self, predicted_q_arr, real_q_arr]]:
constant[
Infernce Q-Value.
Args:
predicted_q_arr: `np.ndarray` of predicted Q-Values.
real_q_arr: `np.ndarray` of real Q-Values.
]
constant[
if self.__q_shape is None:
raise ValueError("Before learning, You should execute `__inference_q`.")
]
variable[loss] assign[=] call[name[self].__computable_loss.compute_loss, parameter[name[predicted_q_arr], name[real_q_arr]]]
variable[delta_arr] assign[=] call[name[self].__computable_loss.compute_delta, parameter[name[predicted_q_arr], name[real_q_arr]]]
variable[delta_arr] assign[=] call[name[self].__cnn.back_propagation, parameter[name[delta_arr]]]
call[name[self].__cnn.optimize, parameter[name[self].__learning_rate, constant[1]]]
call[name[self].__loss_list.append, parameter[name[loss]]] | keyword[def] identifier[learn_q] ( identifier[self] , identifier[predicted_q_arr] , identifier[real_q_arr] ):
literal[string]
literal[string]
identifier[loss] = identifier[self] . identifier[__computable_loss] . identifier[compute_loss] ( identifier[predicted_q_arr] , identifier[real_q_arr] )
identifier[delta_arr] = identifier[self] . identifier[__computable_loss] . identifier[compute_delta] ( identifier[predicted_q_arr] , identifier[real_q_arr] )
identifier[delta_arr] = identifier[self] . identifier[__cnn] . identifier[back_propagation] ( identifier[delta_arr] )
identifier[self] . identifier[__cnn] . identifier[optimize] ( identifier[self] . identifier[__learning_rate] , literal[int] )
identifier[self] . identifier[__loss_list] . identifier[append] ( identifier[loss] ) | def learn_q(self, predicted_q_arr, real_q_arr):
"""
Infernce Q-Value.
Args:
predicted_q_arr: `np.ndarray` of predicted Q-Values.
real_q_arr: `np.ndarray` of real Q-Values.
"""
'\n if self.__q_shape is None:\n raise ValueError("Before learning, You should execute `__inference_q`.")\n '
loss = self.__computable_loss.compute_loss(predicted_q_arr, real_q_arr)
delta_arr = self.__computable_loss.compute_delta(predicted_q_arr, real_q_arr)
delta_arr = self.__cnn.back_propagation(delta_arr)
self.__cnn.optimize(self.__learning_rate, 1)
self.__loss_list.append(loss) |
def format(self, fmt, locale=None):
"""
Formats the instance using the given format.
:param fmt: The format to use
:type fmt: str
:param locale: The locale to use
:type locale: str or None
:rtype: str
"""
return self._formatter.format(self, fmt, locale) | def function[format, parameter[self, fmt, locale]]:
constant[
Formats the instance using the given format.
:param fmt: The format to use
:type fmt: str
:param locale: The locale to use
:type locale: str or None
:rtype: str
]
return[call[name[self]._formatter.format, parameter[name[self], name[fmt], name[locale]]]] | keyword[def] identifier[format] ( identifier[self] , identifier[fmt] , identifier[locale] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[_formatter] . identifier[format] ( identifier[self] , identifier[fmt] , identifier[locale] ) | def format(self, fmt, locale=None):
"""
Formats the instance using the given format.
:param fmt: The format to use
:type fmt: str
:param locale: The locale to use
:type locale: str or None
:rtype: str
"""
return self._formatter.format(self, fmt, locale) |
def pulse_magnitude(time, magnitude, start, repeat_time=0):
""" Implements xmile's PULSE function
PULSE: Generate a one-DT wide pulse at the given time
Parameters: 2 or 3: (magnitude, first time[, interval])
Without interval or when interval = 0, the PULSE is generated only once
Example: PULSE(20, 12, 5) generates a pulse value of 20/DT at time 12, 17, 22, etc.
In rage [-inf, start) returns 0
In range [start + n * repeat_time, start + n * repeat_time + dt) return magnitude/dt
In rage [start + n * repeat_time + dt, start + (n + 1) * repeat_time) return 0
"""
t = time()
small = 1e-6 # What is considered zero according to Vensim Help
if repeat_time <= small:
if abs(t - start) < time.step():
return magnitude * time.step()
else:
return 0
else:
if abs((t - start) % repeat_time) < time.step():
return magnitude * time.step()
else:
return 0 | def function[pulse_magnitude, parameter[time, magnitude, start, repeat_time]]:
constant[ Implements xmile's PULSE function
PULSE: Generate a one-DT wide pulse at the given time
Parameters: 2 or 3: (magnitude, first time[, interval])
Without interval or when interval = 0, the PULSE is generated only once
Example: PULSE(20, 12, 5) generates a pulse value of 20/DT at time 12, 17, 22, etc.
In rage [-inf, start) returns 0
In range [start + n * repeat_time, start + n * repeat_time + dt) return magnitude/dt
In rage [start + n * repeat_time + dt, start + (n + 1) * repeat_time) return 0
]
variable[t] assign[=] call[name[time], parameter[]]
variable[small] assign[=] constant[1e-06]
if compare[name[repeat_time] less_or_equal[<=] name[small]] begin[:]
if compare[call[name[abs], parameter[binary_operation[name[t] - name[start]]]] less[<] call[name[time].step, parameter[]]] begin[:]
return[binary_operation[name[magnitude] * call[name[time].step, parameter[]]]] | keyword[def] identifier[pulse_magnitude] ( identifier[time] , identifier[magnitude] , identifier[start] , identifier[repeat_time] = literal[int] ):
literal[string]
identifier[t] = identifier[time] ()
identifier[small] = literal[int]
keyword[if] identifier[repeat_time] <= identifier[small] :
keyword[if] identifier[abs] ( identifier[t] - identifier[start] )< identifier[time] . identifier[step] ():
keyword[return] identifier[magnitude] * identifier[time] . identifier[step] ()
keyword[else] :
keyword[return] literal[int]
keyword[else] :
keyword[if] identifier[abs] (( identifier[t] - identifier[start] )% identifier[repeat_time] )< identifier[time] . identifier[step] ():
keyword[return] identifier[magnitude] * identifier[time] . identifier[step] ()
keyword[else] :
keyword[return] literal[int] | def pulse_magnitude(time, magnitude, start, repeat_time=0):
""" Implements xmile's PULSE function
PULSE: Generate a one-DT wide pulse at the given time
Parameters: 2 or 3: (magnitude, first time[, interval])
Without interval or when interval = 0, the PULSE is generated only once
Example: PULSE(20, 12, 5) generates a pulse value of 20/DT at time 12, 17, 22, etc.
In rage [-inf, start) returns 0
In range [start + n * repeat_time, start + n * repeat_time + dt) return magnitude/dt
In rage [start + n * repeat_time + dt, start + (n + 1) * repeat_time) return 0
"""
t = time()
small = 1e-06 # What is considered zero according to Vensim Help
if repeat_time <= small:
if abs(t - start) < time.step():
return magnitude * time.step() # depends on [control=['if'], data=[]]
else:
return 0 # depends on [control=['if'], data=[]]
elif abs((t - start) % repeat_time) < time.step():
return magnitude * time.step() # depends on [control=['if'], data=[]]
else:
return 0 |
def set_requestable(self, requestable=True):
# type: (bool) -> None
"""Set the dataset to be of type requestable or not
Args:
requestable (bool): Set whether dataset is requestable. Defaults to True.
Returns:
None
"""
self.data['is_requestdata_type'] = requestable
if requestable:
self.data['private'] = False | def function[set_requestable, parameter[self, requestable]]:
constant[Set the dataset to be of type requestable or not
Args:
requestable (bool): Set whether dataset is requestable. Defaults to True.
Returns:
None
]
call[name[self].data][constant[is_requestdata_type]] assign[=] name[requestable]
if name[requestable] begin[:]
call[name[self].data][constant[private]] assign[=] constant[False] | keyword[def] identifier[set_requestable] ( identifier[self] , identifier[requestable] = keyword[True] ):
literal[string]
identifier[self] . identifier[data] [ literal[string] ]= identifier[requestable]
keyword[if] identifier[requestable] :
identifier[self] . identifier[data] [ literal[string] ]= keyword[False] | def set_requestable(self, requestable=True):
# type: (bool) -> None
'Set the dataset to be of type requestable or not\n\n Args:\n requestable (bool): Set whether dataset is requestable. Defaults to True.\n\n Returns:\n None\n '
self.data['is_requestdata_type'] = requestable
if requestable:
self.data['private'] = False # depends on [control=['if'], data=[]] |
def union_sql(view_name, *tables):
"""This function generates string containing SQL code, that creates
a big VIEW, that consists of many SELECTs.
>>> utils.union_sql('global', 'foo', 'bar', 'baz')
'CREATE VIEW global SELECT * FROM foo UNION SELECT * FROM bar UNION SELECT * FROM baz'
"""
if not tables:
raise Exception("no tables given")
ret = ""
pre = "CREATE VIEW %s AS SELECT * FROM " % view_name
for table in tables:
ret += pre + table
pre = " UNION SELECT * FROM "
return ret | def function[union_sql, parameter[view_name]]:
constant[This function generates string containing SQL code, that creates
a big VIEW, that consists of many SELECTs.
>>> utils.union_sql('global', 'foo', 'bar', 'baz')
'CREATE VIEW global SELECT * FROM foo UNION SELECT * FROM bar UNION SELECT * FROM baz'
]
if <ast.UnaryOp object at 0x7da1b0aa4370> begin[:]
<ast.Raise object at 0x7da1b0aa45e0>
variable[ret] assign[=] constant[]
variable[pre] assign[=] binary_operation[constant[CREATE VIEW %s AS SELECT * FROM ] <ast.Mod object at 0x7da2590d6920> name[view_name]]
for taget[name[table]] in starred[name[tables]] begin[:]
<ast.AugAssign object at 0x7da1b0a06890>
variable[pre] assign[=] constant[ UNION SELECT * FROM ]
return[name[ret]] | keyword[def] identifier[union_sql] ( identifier[view_name] ,* identifier[tables] ):
literal[string]
keyword[if] keyword[not] identifier[tables] :
keyword[raise] identifier[Exception] ( literal[string] )
identifier[ret] = literal[string]
identifier[pre] = literal[string] % identifier[view_name]
keyword[for] identifier[table] keyword[in] identifier[tables] :
identifier[ret] += identifier[pre] + identifier[table]
identifier[pre] = literal[string]
keyword[return] identifier[ret] | def union_sql(view_name, *tables):
"""This function generates string containing SQL code, that creates
a big VIEW, that consists of many SELECTs.
>>> utils.union_sql('global', 'foo', 'bar', 'baz')
'CREATE VIEW global SELECT * FROM foo UNION SELECT * FROM bar UNION SELECT * FROM baz'
"""
if not tables:
raise Exception('no tables given') # depends on [control=['if'], data=[]]
ret = ''
pre = 'CREATE VIEW %s AS SELECT * FROM ' % view_name
for table in tables:
ret += pre + table
pre = ' UNION SELECT * FROM ' # depends on [control=['for'], data=['table']]
return ret |
def non_parallel(self, vector):
"""Return True if vectors are non-parallel.
Non-parallel vectors are vectors which are neither parallel
nor perpendicular to each other.
"""
if (self.is_parallel(vector) is not True and
self.is_perpendicular(vector) is not True):
return True
return False | def function[non_parallel, parameter[self, vector]]:
constant[Return True if vectors are non-parallel.
Non-parallel vectors are vectors which are neither parallel
nor perpendicular to each other.
]
if <ast.BoolOp object at 0x7da1b0fac280> begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[non_parallel] ( identifier[self] , identifier[vector] ):
literal[string]
keyword[if] ( identifier[self] . identifier[is_parallel] ( identifier[vector] ) keyword[is] keyword[not] keyword[True] keyword[and]
identifier[self] . identifier[is_perpendicular] ( identifier[vector] ) keyword[is] keyword[not] keyword[True] ):
keyword[return] keyword[True]
keyword[return] keyword[False] | def non_parallel(self, vector):
"""Return True if vectors are non-parallel.
Non-parallel vectors are vectors which are neither parallel
nor perpendicular to each other.
"""
if self.is_parallel(vector) is not True and self.is_perpendicular(vector) is not True:
return True # depends on [control=['if'], data=[]]
return False |
def add_effect(effect_id, *args, **kwargs):
'''If inside a side-effect, adds an effect to it.'''
effect = fiber.get_stack_var(SIDE_EFFECT_TAG)
if effect is None:
return False
effect.add_effect(effect_id, *args, **kwargs)
return True | def function[add_effect, parameter[effect_id]]:
constant[If inside a side-effect, adds an effect to it.]
variable[effect] assign[=] call[name[fiber].get_stack_var, parameter[name[SIDE_EFFECT_TAG]]]
if compare[name[effect] is constant[None]] begin[:]
return[constant[False]]
call[name[effect].add_effect, parameter[name[effect_id], <ast.Starred object at 0x7da18bcca1d0>]]
return[constant[True]] | keyword[def] identifier[add_effect] ( identifier[effect_id] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[effect] = identifier[fiber] . identifier[get_stack_var] ( identifier[SIDE_EFFECT_TAG] )
keyword[if] identifier[effect] keyword[is] keyword[None] :
keyword[return] keyword[False]
identifier[effect] . identifier[add_effect] ( identifier[effect_id] ,* identifier[args] ,** identifier[kwargs] )
keyword[return] keyword[True] | def add_effect(effect_id, *args, **kwargs):
"""If inside a side-effect, adds an effect to it."""
effect = fiber.get_stack_var(SIDE_EFFECT_TAG)
if effect is None:
return False # depends on [control=['if'], data=[]]
effect.add_effect(effect_id, *args, **kwargs)
return True |
def encode(codec, stream):
"""Wraps openjp2 library function opj_encode.
Encode an image into a JPEG 2000 codestream.
Parameters
----------
codec : CODEC_TYPE
The jpeg2000 codec.
stream : STREAM_TYPE_P
The stream to which data is written.
Raises
------
RuntimeError
If the OpenJPEG library routine opj_encode fails.
"""
OPENJP2.opj_encode.argtypes = [CODEC_TYPE, STREAM_TYPE_P]
OPENJP2.opj_encode.restype = check_error
OPENJP2.opj_encode(codec, stream) | def function[encode, parameter[codec, stream]]:
constant[Wraps openjp2 library function opj_encode.
Encode an image into a JPEG 2000 codestream.
Parameters
----------
codec : CODEC_TYPE
The jpeg2000 codec.
stream : STREAM_TYPE_P
The stream to which data is written.
Raises
------
RuntimeError
If the OpenJPEG library routine opj_encode fails.
]
name[OPENJP2].opj_encode.argtypes assign[=] list[[<ast.Name object at 0x7da204622140>, <ast.Name object at 0x7da204621990>]]
name[OPENJP2].opj_encode.restype assign[=] name[check_error]
call[name[OPENJP2].opj_encode, parameter[name[codec], name[stream]]] | keyword[def] identifier[encode] ( identifier[codec] , identifier[stream] ):
literal[string]
identifier[OPENJP2] . identifier[opj_encode] . identifier[argtypes] =[ identifier[CODEC_TYPE] , identifier[STREAM_TYPE_P] ]
identifier[OPENJP2] . identifier[opj_encode] . identifier[restype] = identifier[check_error]
identifier[OPENJP2] . identifier[opj_encode] ( identifier[codec] , identifier[stream] ) | def encode(codec, stream):
"""Wraps openjp2 library function opj_encode.
Encode an image into a JPEG 2000 codestream.
Parameters
----------
codec : CODEC_TYPE
The jpeg2000 codec.
stream : STREAM_TYPE_P
The stream to which data is written.
Raises
------
RuntimeError
If the OpenJPEG library routine opj_encode fails.
"""
OPENJP2.opj_encode.argtypes = [CODEC_TYPE, STREAM_TYPE_P]
OPENJP2.opj_encode.restype = check_error
OPENJP2.opj_encode(codec, stream) |
def CreateMock(self, class_to_mock):
"""Create a new mock object.
Args:
# class_to_mock: the class to be mocked
class_to_mock: class
Returns:
MockObject that can be used as the class_to_mock would be.
"""
new_mock = MockObject(class_to_mock)
self._mock_objects.append(new_mock)
return new_mock | def function[CreateMock, parameter[self, class_to_mock]]:
constant[Create a new mock object.
Args:
# class_to_mock: the class to be mocked
class_to_mock: class
Returns:
MockObject that can be used as the class_to_mock would be.
]
variable[new_mock] assign[=] call[name[MockObject], parameter[name[class_to_mock]]]
call[name[self]._mock_objects.append, parameter[name[new_mock]]]
return[name[new_mock]] | keyword[def] identifier[CreateMock] ( identifier[self] , identifier[class_to_mock] ):
literal[string]
identifier[new_mock] = identifier[MockObject] ( identifier[class_to_mock] )
identifier[self] . identifier[_mock_objects] . identifier[append] ( identifier[new_mock] )
keyword[return] identifier[new_mock] | def CreateMock(self, class_to_mock):
"""Create a new mock object.
Args:
# class_to_mock: the class to be mocked
class_to_mock: class
Returns:
MockObject that can be used as the class_to_mock would be.
"""
new_mock = MockObject(class_to_mock)
self._mock_objects.append(new_mock)
return new_mock |
def ip_hide_community_list_holder_community_list_standard_instance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip = ET.SubElement(config, "ip", xmlns="urn:brocade.com:mgmt:brocade-common-def")
hide_community_list_holder = ET.SubElement(ip, "hide-community-list-holder", xmlns="urn:brocade.com:mgmt:brocade-ip-policy")
community_list = ET.SubElement(hide_community_list_holder, "community-list")
standard = ET.SubElement(community_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
seq_keyword_key = ET.SubElement(standard, "seq-keyword")
seq_keyword_key.text = kwargs.pop('seq_keyword')
instance = ET.SubElement(standard, "instance")
instance.text = kwargs.pop('instance')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[ip_hide_community_list_holder_community_list_standard_instance, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[ip] assign[=] call[name[ET].SubElement, parameter[name[config], constant[ip]]]
variable[hide_community_list_holder] assign[=] call[name[ET].SubElement, parameter[name[ip], constant[hide-community-list-holder]]]
variable[community_list] assign[=] call[name[ET].SubElement, parameter[name[hide_community_list_holder], constant[community-list]]]
variable[standard] assign[=] call[name[ET].SubElement, parameter[name[community_list], constant[standard]]]
variable[name_key] assign[=] call[name[ET].SubElement, parameter[name[standard], constant[name]]]
name[name_key].text assign[=] call[name[kwargs].pop, parameter[constant[name]]]
variable[seq_keyword_key] assign[=] call[name[ET].SubElement, parameter[name[standard], constant[seq-keyword]]]
name[seq_keyword_key].text assign[=] call[name[kwargs].pop, parameter[constant[seq_keyword]]]
variable[instance] assign[=] call[name[ET].SubElement, parameter[name[standard], constant[instance]]]
name[instance].text assign[=] call[name[kwargs].pop, parameter[constant[instance]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[ip_hide_community_list_holder_community_list_standard_instance] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[ip] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[hide_community_list_holder] = identifier[ET] . identifier[SubElement] ( identifier[ip] , literal[string] , identifier[xmlns] = literal[string] )
identifier[community_list] = identifier[ET] . identifier[SubElement] ( identifier[hide_community_list_holder] , literal[string] )
identifier[standard] = identifier[ET] . identifier[SubElement] ( identifier[community_list] , literal[string] )
identifier[name_key] = identifier[ET] . identifier[SubElement] ( identifier[standard] , literal[string] )
identifier[name_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[seq_keyword_key] = identifier[ET] . identifier[SubElement] ( identifier[standard] , literal[string] )
identifier[seq_keyword_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[instance] = identifier[ET] . identifier[SubElement] ( identifier[standard] , literal[string] )
identifier[instance] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def ip_hide_community_list_holder_community_list_standard_instance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
ip = ET.SubElement(config, 'ip', xmlns='urn:brocade.com:mgmt:brocade-common-def')
hide_community_list_holder = ET.SubElement(ip, 'hide-community-list-holder', xmlns='urn:brocade.com:mgmt:brocade-ip-policy')
community_list = ET.SubElement(hide_community_list_holder, 'community-list')
standard = ET.SubElement(community_list, 'standard')
name_key = ET.SubElement(standard, 'name')
name_key.text = kwargs.pop('name')
seq_keyword_key = ET.SubElement(standard, 'seq-keyword')
seq_keyword_key.text = kwargs.pop('seq_keyword')
instance = ET.SubElement(standard, 'instance')
instance.text = kwargs.pop('instance')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def _font_name(self, ufo):
"""Generate a postscript-style font name."""
family_name = (
ufo.info.familyName.replace(" ", "")
if ufo.info.familyName is not None
else "None"
)
style_name = (
ufo.info.styleName.replace(" ", "")
if ufo.info.styleName is not None
else "None"
)
return "{}-{}".format(family_name, style_name) | def function[_font_name, parameter[self, ufo]]:
constant[Generate a postscript-style font name.]
variable[family_name] assign[=] <ast.IfExp object at 0x7da1b1112d70>
variable[style_name] assign[=] <ast.IfExp object at 0x7da1b12c4f10>
return[call[constant[{}-{}].format, parameter[name[family_name], name[style_name]]]] | keyword[def] identifier[_font_name] ( identifier[self] , identifier[ufo] ):
literal[string]
identifier[family_name] =(
identifier[ufo] . identifier[info] . identifier[familyName] . identifier[replace] ( literal[string] , literal[string] )
keyword[if] identifier[ufo] . identifier[info] . identifier[familyName] keyword[is] keyword[not] keyword[None]
keyword[else] literal[string]
)
identifier[style_name] =(
identifier[ufo] . identifier[info] . identifier[styleName] . identifier[replace] ( literal[string] , literal[string] )
keyword[if] identifier[ufo] . identifier[info] . identifier[styleName] keyword[is] keyword[not] keyword[None]
keyword[else] literal[string]
)
keyword[return] literal[string] . identifier[format] ( identifier[family_name] , identifier[style_name] ) | def _font_name(self, ufo):
"""Generate a postscript-style font name."""
family_name = ufo.info.familyName.replace(' ', '') if ufo.info.familyName is not None else 'None'
style_name = ufo.info.styleName.replace(' ', '') if ufo.info.styleName is not None else 'None'
return '{}-{}'.format(family_name, style_name) |
def cmd_queue_peaks(self):
"""Generate a list of the requests peaks on the queue.
A queue peak is defined by the biggest value on the backend queue
on a series of log lines that are between log lines without being
queued.
.. warning::
Allow to configure up to which peak can be ignored. Currently
set to 1.
"""
threshold = 1
peaks = []
current_peak = 0
current_queue = 0
current_span = 0
first_on_queue = None
for line in self._valid_lines:
current_queue = line.queue_backend
if current_queue > 0:
current_span += 1
if first_on_queue is None:
first_on_queue = line.accept_date
if current_queue == 0 and current_peak > threshold:
data = {
'peak': current_peak,
'span': current_span,
'first': first_on_queue,
'last': line.accept_date,
}
peaks.append(data)
current_peak = 0
current_span = 0
first_on_queue = None
if current_queue > current_peak:
current_peak = current_queue
# case of a series that does not end
if current_queue > 0 and current_peak > threshold:
data = {
'peak': current_peak,
'span': current_span,
'first': first_on_queue,
'last': line.accept_date,
}
peaks.append(data)
return peaks | def function[cmd_queue_peaks, parameter[self]]:
constant[Generate a list of the requests peaks on the queue.
A queue peak is defined by the biggest value on the backend queue
on a series of log lines that are between log lines without being
queued.
.. warning::
Allow to configure up to which peak can be ignored. Currently
set to 1.
]
variable[threshold] assign[=] constant[1]
variable[peaks] assign[=] list[[]]
variable[current_peak] assign[=] constant[0]
variable[current_queue] assign[=] constant[0]
variable[current_span] assign[=] constant[0]
variable[first_on_queue] assign[=] constant[None]
for taget[name[line]] in starred[name[self]._valid_lines] begin[:]
variable[current_queue] assign[=] name[line].queue_backend
if compare[name[current_queue] greater[>] constant[0]] begin[:]
<ast.AugAssign object at 0x7da1b10413c0>
if compare[name[first_on_queue] is constant[None]] begin[:]
variable[first_on_queue] assign[=] name[line].accept_date
if <ast.BoolOp object at 0x7da1b10431f0> begin[:]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b1043070>, <ast.Constant object at 0x7da1b10421a0>, <ast.Constant object at 0x7da1b1040ac0>, <ast.Constant object at 0x7da1b10424a0>], [<ast.Name object at 0x7da1b1041a50>, <ast.Name object at 0x7da1b10412a0>, <ast.Name object at 0x7da1b10405b0>, <ast.Attribute object at 0x7da1b10415d0>]]
call[name[peaks].append, parameter[name[data]]]
variable[current_peak] assign[=] constant[0]
variable[current_span] assign[=] constant[0]
variable[first_on_queue] assign[=] constant[None]
if compare[name[current_queue] greater[>] name[current_peak]] begin[:]
variable[current_peak] assign[=] name[current_queue]
if <ast.BoolOp object at 0x7da1b1041c90> begin[:]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b1040f40>, <ast.Constant object at 0x7da1b1042fb0>, <ast.Constant object at 0x7da1b1040280>, <ast.Constant object at 0x7da1b1040580>], [<ast.Name object at 0x7da1b1042d10>, <ast.Name object at 0x7da1b1040b80>, <ast.Name object at 0x7da1b10420e0>, <ast.Attribute object at 0x7da1b1042c80>]]
call[name[peaks].append, parameter[name[data]]]
return[name[peaks]] | keyword[def] identifier[cmd_queue_peaks] ( identifier[self] ):
literal[string]
identifier[threshold] = literal[int]
identifier[peaks] =[]
identifier[current_peak] = literal[int]
identifier[current_queue] = literal[int]
identifier[current_span] = literal[int]
identifier[first_on_queue] = keyword[None]
keyword[for] identifier[line] keyword[in] identifier[self] . identifier[_valid_lines] :
identifier[current_queue] = identifier[line] . identifier[queue_backend]
keyword[if] identifier[current_queue] > literal[int] :
identifier[current_span] += literal[int]
keyword[if] identifier[first_on_queue] keyword[is] keyword[None] :
identifier[first_on_queue] = identifier[line] . identifier[accept_date]
keyword[if] identifier[current_queue] == literal[int] keyword[and] identifier[current_peak] > identifier[threshold] :
identifier[data] ={
literal[string] : identifier[current_peak] ,
literal[string] : identifier[current_span] ,
literal[string] : identifier[first_on_queue] ,
literal[string] : identifier[line] . identifier[accept_date] ,
}
identifier[peaks] . identifier[append] ( identifier[data] )
identifier[current_peak] = literal[int]
identifier[current_span] = literal[int]
identifier[first_on_queue] = keyword[None]
keyword[if] identifier[current_queue] > identifier[current_peak] :
identifier[current_peak] = identifier[current_queue]
keyword[if] identifier[current_queue] > literal[int] keyword[and] identifier[current_peak] > identifier[threshold] :
identifier[data] ={
literal[string] : identifier[current_peak] ,
literal[string] : identifier[current_span] ,
literal[string] : identifier[first_on_queue] ,
literal[string] : identifier[line] . identifier[accept_date] ,
}
identifier[peaks] . identifier[append] ( identifier[data] )
keyword[return] identifier[peaks] | def cmd_queue_peaks(self):
"""Generate a list of the requests peaks on the queue.
A queue peak is defined by the biggest value on the backend queue
on a series of log lines that are between log lines without being
queued.
.. warning::
Allow to configure up to which peak can be ignored. Currently
set to 1.
"""
threshold = 1
peaks = []
current_peak = 0
current_queue = 0
current_span = 0
first_on_queue = None
for line in self._valid_lines:
current_queue = line.queue_backend
if current_queue > 0:
current_span += 1
if first_on_queue is None:
first_on_queue = line.accept_date # depends on [control=['if'], data=['first_on_queue']] # depends on [control=['if'], data=[]]
if current_queue == 0 and current_peak > threshold:
data = {'peak': current_peak, 'span': current_span, 'first': first_on_queue, 'last': line.accept_date}
peaks.append(data)
current_peak = 0
current_span = 0
first_on_queue = None # depends on [control=['if'], data=[]]
if current_queue > current_peak:
current_peak = current_queue # depends on [control=['if'], data=['current_queue', 'current_peak']] # depends on [control=['for'], data=['line']]
# case of a series that does not end
if current_queue > 0 and current_peak > threshold:
data = {'peak': current_peak, 'span': current_span, 'first': first_on_queue, 'last': line.accept_date}
peaks.append(data) # depends on [control=['if'], data=[]]
return peaks |
def ManagedCreate(super_cls):
"""Dynamically creates a `create` method for a `ObjectSet.Managed` class
that calls the `super_cls.create`.
The first positional argument that is passed to the `super_cls.create` is
the `_manager` that was set using `ObjectSet.Managed`. The created object
is added to the `ObjectSet.Managed` also placed in the correct
`_data[field]` and `_orig_data[field]` for the `_manager` object.
"""
@wraps(super_cls.create)
async def _create(self, *args, **kwargs):
cls = type(self)
manager = getattr(cls, '_manager', None)
manager_field = getattr(cls, '_manager_field', None)
if manager is not None and manager_field is not None:
args = (manager,) + args
new_obj = await super_cls.create(*args, **kwargs)
self._items = self._items + [new_obj]
manager._data[manager_field.name] = (
manager._data[manager_field.name] +
[new_obj._data])
manager._orig_data[manager_field.name] = (
manager._orig_data[manager_field.name] +
[new_obj._data])
return new_obj
else:
raise AttributeError(
'create is not supported; %s is not a managed set' % (
super_cls.__name__))
return _create | def function[ManagedCreate, parameter[super_cls]]:
constant[Dynamically creates a `create` method for a `ObjectSet.Managed` class
that calls the `super_cls.create`.
The first positional argument that is passed to the `super_cls.create` is
the `_manager` that was set using `ObjectSet.Managed`. The created object
is added to the `ObjectSet.Managed` also placed in the correct
`_data[field]` and `_orig_data[field]` for the `_manager` object.
]
<ast.AsyncFunctionDef object at 0x7da18eb56950>
return[name[_create]] | keyword[def] identifier[ManagedCreate] ( identifier[super_cls] ):
literal[string]
@ identifier[wraps] ( identifier[super_cls] . identifier[create] )
keyword[async] keyword[def] identifier[_create] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
identifier[cls] = identifier[type] ( identifier[self] )
identifier[manager] = identifier[getattr] ( identifier[cls] , literal[string] , keyword[None] )
identifier[manager_field] = identifier[getattr] ( identifier[cls] , literal[string] , keyword[None] )
keyword[if] identifier[manager] keyword[is] keyword[not] keyword[None] keyword[and] identifier[manager_field] keyword[is] keyword[not] keyword[None] :
identifier[args] =( identifier[manager] ,)+ identifier[args]
identifier[new_obj] = keyword[await] identifier[super_cls] . identifier[create] (* identifier[args] ,** identifier[kwargs] )
identifier[self] . identifier[_items] = identifier[self] . identifier[_items] +[ identifier[new_obj] ]
identifier[manager] . identifier[_data] [ identifier[manager_field] . identifier[name] ]=(
identifier[manager] . identifier[_data] [ identifier[manager_field] . identifier[name] ]+
[ identifier[new_obj] . identifier[_data] ])
identifier[manager] . identifier[_orig_data] [ identifier[manager_field] . identifier[name] ]=(
identifier[manager] . identifier[_orig_data] [ identifier[manager_field] . identifier[name] ]+
[ identifier[new_obj] . identifier[_data] ])
keyword[return] identifier[new_obj]
keyword[else] :
keyword[raise] identifier[AttributeError] (
literal[string] %(
identifier[super_cls] . identifier[__name__] ))
keyword[return] identifier[_create] | def ManagedCreate(super_cls):
"""Dynamically creates a `create` method for a `ObjectSet.Managed` class
that calls the `super_cls.create`.
The first positional argument that is passed to the `super_cls.create` is
the `_manager` that was set using `ObjectSet.Managed`. The created object
is added to the `ObjectSet.Managed` also placed in the correct
`_data[field]` and `_orig_data[field]` for the `_manager` object.
"""
@wraps(super_cls.create)
async def _create(self, *args, **kwargs):
cls = type(self)
manager = getattr(cls, '_manager', None)
manager_field = getattr(cls, '_manager_field', None)
if manager is not None and manager_field is not None:
args = (manager,) + args
new_obj = await super_cls.create(*args, **kwargs)
self._items = self._items + [new_obj]
manager._data[manager_field.name] = manager._data[manager_field.name] + [new_obj._data]
manager._orig_data[manager_field.name] = manager._orig_data[manager_field.name] + [new_obj._data]
return new_obj # depends on [control=['if'], data=[]]
else:
raise AttributeError('create is not supported; %s is not a managed set' % super_cls.__name__)
return _create |
def queue_async_stats_job(klass, account, ids, metric_groups, **kwargs):
"""
Queues a list of metrics for a specified set of object IDs asynchronously
"""
params = klass._standard_params(ids, metric_groups, **kwargs)
params['platform'] = kwargs.get('platform', None)
params['country'] = kwargs.get('country', None)
params['segmentation_type'] = kwargs.get('segmentation_type', None)
resource = klass.RESOURCE_ASYNC.format(account_id=account.id)
response = Request(account.client, 'post', resource, params=params).perform()
return response.body['data'] | def function[queue_async_stats_job, parameter[klass, account, ids, metric_groups]]:
constant[
Queues a list of metrics for a specified set of object IDs asynchronously
]
variable[params] assign[=] call[name[klass]._standard_params, parameter[name[ids], name[metric_groups]]]
call[name[params]][constant[platform]] assign[=] call[name[kwargs].get, parameter[constant[platform], constant[None]]]
call[name[params]][constant[country]] assign[=] call[name[kwargs].get, parameter[constant[country], constant[None]]]
call[name[params]][constant[segmentation_type]] assign[=] call[name[kwargs].get, parameter[constant[segmentation_type], constant[None]]]
variable[resource] assign[=] call[name[klass].RESOURCE_ASYNC.format, parameter[]]
variable[response] assign[=] call[call[name[Request], parameter[name[account].client, constant[post], name[resource]]].perform, parameter[]]
return[call[name[response].body][constant[data]]] | keyword[def] identifier[queue_async_stats_job] ( identifier[klass] , identifier[account] , identifier[ids] , identifier[metric_groups] ,** identifier[kwargs] ):
literal[string]
identifier[params] = identifier[klass] . identifier[_standard_params] ( identifier[ids] , identifier[metric_groups] ,** identifier[kwargs] )
identifier[params] [ literal[string] ]= identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] )
identifier[params] [ literal[string] ]= identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] )
identifier[params] [ literal[string] ]= identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] )
identifier[resource] = identifier[klass] . identifier[RESOURCE_ASYNC] . identifier[format] ( identifier[account_id] = identifier[account] . identifier[id] )
identifier[response] = identifier[Request] ( identifier[account] . identifier[client] , literal[string] , identifier[resource] , identifier[params] = identifier[params] ). identifier[perform] ()
keyword[return] identifier[response] . identifier[body] [ literal[string] ] | def queue_async_stats_job(klass, account, ids, metric_groups, **kwargs):
"""
Queues a list of metrics for a specified set of object IDs asynchronously
"""
params = klass._standard_params(ids, metric_groups, **kwargs)
params['platform'] = kwargs.get('platform', None)
params['country'] = kwargs.get('country', None)
params['segmentation_type'] = kwargs.get('segmentation_type', None)
resource = klass.RESOURCE_ASYNC.format(account_id=account.id)
response = Request(account.client, 'post', resource, params=params).perform()
return response.body['data'] |
def on_remove_row(self, event, row_num=-1):
"""
Remove specified grid row.
If no row number is given, remove the last row.
"""
text = "Are you sure? If you select delete you won't be able to retrieve these rows..."
dia = pw.ChooseOne(self, "Yes, delete rows", "Leave rows for now", text)
dia.Centre()
result = dia.ShowModal()
if result == wx.ID_NO:
return
default = (255, 255, 255, 255)
if row_num == -1:
# unhighlight any selected rows:
for row in self.selected_rows:
attr = wx.grid.GridCellAttr()
attr.SetBackgroundColour(default)
self.grid.SetRowAttr(row, attr)
row_num = self.grid.GetNumberRows() - 1
self.deleteRowButton.Disable()
self.selected_rows = {row_num}
# remove row(s) from the contribution
df = self.contribution.tables[self.grid_type].df
row_nums = list(range(len(df)))
df = df.iloc[[i for i in row_nums if i not in self.selected_rows]]
self.contribution.tables[self.grid_type].df = df
# now remove row(s) from grid
# delete rows, adjusting the row # appropriately as you delete
for num, row in enumerate(self.selected_rows):
row -= num
if row < 0:
row = 0
self.grid.remove_row(row)
attr = wx.grid.GridCellAttr()
attr.SetBackgroundColour(default)
self.grid.SetRowAttr(row, attr)
# reset the grid
self.selected_rows = set()
self.deleteRowButton.Disable()
self.grid.Refresh()
self.main_sizer.Fit(self) | def function[on_remove_row, parameter[self, event, row_num]]:
constant[
Remove specified grid row.
If no row number is given, remove the last row.
]
variable[text] assign[=] constant[Are you sure? If you select delete you won't be able to retrieve these rows...]
variable[dia] assign[=] call[name[pw].ChooseOne, parameter[name[self], constant[Yes, delete rows], constant[Leave rows for now], name[text]]]
call[name[dia].Centre, parameter[]]
variable[result] assign[=] call[name[dia].ShowModal, parameter[]]
if compare[name[result] equal[==] name[wx].ID_NO] begin[:]
return[None]
variable[default] assign[=] tuple[[<ast.Constant object at 0x7da1b04fd3f0>, <ast.Constant object at 0x7da1b04fe920>, <ast.Constant object at 0x7da1b04fda80>, <ast.Constant object at 0x7da1b04fe6e0>]]
if compare[name[row_num] equal[==] <ast.UnaryOp object at 0x7da1b04fe8c0>] begin[:]
for taget[name[row]] in starred[name[self].selected_rows] begin[:]
variable[attr] assign[=] call[name[wx].grid.GridCellAttr, parameter[]]
call[name[attr].SetBackgroundColour, parameter[name[default]]]
call[name[self].grid.SetRowAttr, parameter[name[row], name[attr]]]
variable[row_num] assign[=] binary_operation[call[name[self].grid.GetNumberRows, parameter[]] - constant[1]]
call[name[self].deleteRowButton.Disable, parameter[]]
name[self].selected_rows assign[=] <ast.Set object at 0x7da1b04ff220>
variable[df] assign[=] call[name[self].contribution.tables][name[self].grid_type].df
variable[row_nums] assign[=] call[name[list], parameter[call[name[range], parameter[call[name[len], parameter[name[df]]]]]]]
variable[df] assign[=] call[name[df].iloc][<ast.ListComp object at 0x7da1b04fe140>]
call[name[self].contribution.tables][name[self].grid_type].df assign[=] name[df]
for taget[tuple[[<ast.Name object at 0x7da1b04fd240>, <ast.Name object at 0x7da1b04fc310>]]] in starred[call[name[enumerate], parameter[name[self].selected_rows]]] begin[:]
<ast.AugAssign object at 0x7da1b04fcc70>
if compare[name[row] less[<] constant[0]] begin[:]
variable[row] assign[=] constant[0]
call[name[self].grid.remove_row, parameter[name[row]]]
variable[attr] assign[=] call[name[wx].grid.GridCellAttr, parameter[]]
call[name[attr].SetBackgroundColour, parameter[name[default]]]
call[name[self].grid.SetRowAttr, parameter[name[row], name[attr]]]
name[self].selected_rows assign[=] call[name[set], parameter[]]
call[name[self].deleteRowButton.Disable, parameter[]]
call[name[self].grid.Refresh, parameter[]]
call[name[self].main_sizer.Fit, parameter[name[self]]] | keyword[def] identifier[on_remove_row] ( identifier[self] , identifier[event] , identifier[row_num] =- literal[int] ):
literal[string]
identifier[text] = literal[string]
identifier[dia] = identifier[pw] . identifier[ChooseOne] ( identifier[self] , literal[string] , literal[string] , identifier[text] )
identifier[dia] . identifier[Centre] ()
identifier[result] = identifier[dia] . identifier[ShowModal] ()
keyword[if] identifier[result] == identifier[wx] . identifier[ID_NO] :
keyword[return]
identifier[default] =( literal[int] , literal[int] , literal[int] , literal[int] )
keyword[if] identifier[row_num] ==- literal[int] :
keyword[for] identifier[row] keyword[in] identifier[self] . identifier[selected_rows] :
identifier[attr] = identifier[wx] . identifier[grid] . identifier[GridCellAttr] ()
identifier[attr] . identifier[SetBackgroundColour] ( identifier[default] )
identifier[self] . identifier[grid] . identifier[SetRowAttr] ( identifier[row] , identifier[attr] )
identifier[row_num] = identifier[self] . identifier[grid] . identifier[GetNumberRows] ()- literal[int]
identifier[self] . identifier[deleteRowButton] . identifier[Disable] ()
identifier[self] . identifier[selected_rows] ={ identifier[row_num] }
identifier[df] = identifier[self] . identifier[contribution] . identifier[tables] [ identifier[self] . identifier[grid_type] ]. identifier[df]
identifier[row_nums] = identifier[list] ( identifier[range] ( identifier[len] ( identifier[df] )))
identifier[df] = identifier[df] . identifier[iloc] [[ identifier[i] keyword[for] identifier[i] keyword[in] identifier[row_nums] keyword[if] identifier[i] keyword[not] keyword[in] identifier[self] . identifier[selected_rows] ]]
identifier[self] . identifier[contribution] . identifier[tables] [ identifier[self] . identifier[grid_type] ]. identifier[df] = identifier[df]
keyword[for] identifier[num] , identifier[row] keyword[in] identifier[enumerate] ( identifier[self] . identifier[selected_rows] ):
identifier[row] -= identifier[num]
keyword[if] identifier[row] < literal[int] :
identifier[row] = literal[int]
identifier[self] . identifier[grid] . identifier[remove_row] ( identifier[row] )
identifier[attr] = identifier[wx] . identifier[grid] . identifier[GridCellAttr] ()
identifier[attr] . identifier[SetBackgroundColour] ( identifier[default] )
identifier[self] . identifier[grid] . identifier[SetRowAttr] ( identifier[row] , identifier[attr] )
identifier[self] . identifier[selected_rows] = identifier[set] ()
identifier[self] . identifier[deleteRowButton] . identifier[Disable] ()
identifier[self] . identifier[grid] . identifier[Refresh] ()
identifier[self] . identifier[main_sizer] . identifier[Fit] ( identifier[self] ) | def on_remove_row(self, event, row_num=-1):
"""
Remove specified grid row.
If no row number is given, remove the last row.
"""
text = "Are you sure? If you select delete you won't be able to retrieve these rows..."
dia = pw.ChooseOne(self, 'Yes, delete rows', 'Leave rows for now', text)
dia.Centre()
result = dia.ShowModal()
if result == wx.ID_NO:
return # depends on [control=['if'], data=[]]
default = (255, 255, 255, 255)
if row_num == -1:
# unhighlight any selected rows:
for row in self.selected_rows:
attr = wx.grid.GridCellAttr()
attr.SetBackgroundColour(default)
self.grid.SetRowAttr(row, attr) # depends on [control=['for'], data=['row']]
row_num = self.grid.GetNumberRows() - 1
self.deleteRowButton.Disable()
self.selected_rows = {row_num} # depends on [control=['if'], data=['row_num']]
# remove row(s) from the contribution
df = self.contribution.tables[self.grid_type].df
row_nums = list(range(len(df)))
df = df.iloc[[i for i in row_nums if i not in self.selected_rows]]
self.contribution.tables[self.grid_type].df = df
# now remove row(s) from grid
# delete rows, adjusting the row # appropriately as you delete
for (num, row) in enumerate(self.selected_rows):
row -= num
if row < 0:
row = 0 # depends on [control=['if'], data=['row']]
self.grid.remove_row(row)
attr = wx.grid.GridCellAttr()
attr.SetBackgroundColour(default)
self.grid.SetRowAttr(row, attr) # depends on [control=['for'], data=[]]
# reset the grid
self.selected_rows = set()
self.deleteRowButton.Disable()
self.grid.Refresh()
self.main_sizer.Fit(self) |
def set_home(self, new_home):
"""
Sets the user's home. The argument can be a Position object or a
tuple containing location data.
"""
if type(new_home) is Position:
self.home = new_home
elif type(new_home) is tuple:
self.home = Position(location=new_home)
else:
self.home = Position(antenna=new_home)
self.reset_cache() | def function[set_home, parameter[self, new_home]]:
constant[
Sets the user's home. The argument can be a Position object or a
tuple containing location data.
]
if compare[call[name[type], parameter[name[new_home]]] is name[Position]] begin[:]
name[self].home assign[=] name[new_home]
call[name[self].reset_cache, parameter[]] | keyword[def] identifier[set_home] ( identifier[self] , identifier[new_home] ):
literal[string]
keyword[if] identifier[type] ( identifier[new_home] ) keyword[is] identifier[Position] :
identifier[self] . identifier[home] = identifier[new_home]
keyword[elif] identifier[type] ( identifier[new_home] ) keyword[is] identifier[tuple] :
identifier[self] . identifier[home] = identifier[Position] ( identifier[location] = identifier[new_home] )
keyword[else] :
identifier[self] . identifier[home] = identifier[Position] ( identifier[antenna] = identifier[new_home] )
identifier[self] . identifier[reset_cache] () | def set_home(self, new_home):
"""
Sets the user's home. The argument can be a Position object or a
tuple containing location data.
"""
if type(new_home) is Position:
self.home = new_home # depends on [control=['if'], data=[]]
elif type(new_home) is tuple:
self.home = Position(location=new_home) # depends on [control=['if'], data=[]]
else:
self.home = Position(antenna=new_home)
self.reset_cache() |
def remove_child(self, idx=None, *, name=None, node=None):
"""Remove a child node from the current node instance.
:param idx: Index of child node to be removed.
:type idx: int
:param name: The first child node found with «name» will be removed.
:type name: str
:param node: Child node to be removed.
:type node: Node
:returns: The node that has been removed, or False if not successful.
:rtype: Node or False
"""
if (idx and isinstance(idx, int) and
-len(self.childs) <= idx < len(self.childs) ):
return self.childs.pop(idx)
if name and isinstance(name, str):
found_node = None
for _n in self.childs:
if _n.name == name:
found_node = _n
break
if found_node:
self.childs.remove(found_node)
return found_node
if node and node in self.childs:
self.childs.remove(node)
return node
return False | def function[remove_child, parameter[self, idx]]:
constant[Remove a child node from the current node instance.
:param idx: Index of child node to be removed.
:type idx: int
:param name: The first child node found with «name» will be removed.
:type name: str
:param node: Child node to be removed.
:type node: Node
:returns: The node that has been removed, or False if not successful.
:rtype: Node or False
]
if <ast.BoolOp object at 0x7da18fe92e00> begin[:]
return[call[name[self].childs.pop, parameter[name[idx]]]]
if <ast.BoolOp object at 0x7da18fe91e10> begin[:]
variable[found_node] assign[=] constant[None]
for taget[name[_n]] in starred[name[self].childs] begin[:]
if compare[name[_n].name equal[==] name[name]] begin[:]
variable[found_node] assign[=] name[_n]
break
if name[found_node] begin[:]
call[name[self].childs.remove, parameter[name[found_node]]]
return[name[found_node]]
if <ast.BoolOp object at 0x7da18fe918a0> begin[:]
call[name[self].childs.remove, parameter[name[node]]]
return[name[node]]
return[constant[False]] | keyword[def] identifier[remove_child] ( identifier[self] , identifier[idx] = keyword[None] ,*, identifier[name] = keyword[None] , identifier[node] = keyword[None] ):
literal[string]
keyword[if] ( identifier[idx] keyword[and] identifier[isinstance] ( identifier[idx] , identifier[int] ) keyword[and]
- identifier[len] ( identifier[self] . identifier[childs] )<= identifier[idx] < identifier[len] ( identifier[self] . identifier[childs] )):
keyword[return] identifier[self] . identifier[childs] . identifier[pop] ( identifier[idx] )
keyword[if] identifier[name] keyword[and] identifier[isinstance] ( identifier[name] , identifier[str] ):
identifier[found_node] = keyword[None]
keyword[for] identifier[_n] keyword[in] identifier[self] . identifier[childs] :
keyword[if] identifier[_n] . identifier[name] == identifier[name] :
identifier[found_node] = identifier[_n]
keyword[break]
keyword[if] identifier[found_node] :
identifier[self] . identifier[childs] . identifier[remove] ( identifier[found_node] )
keyword[return] identifier[found_node]
keyword[if] identifier[node] keyword[and] identifier[node] keyword[in] identifier[self] . identifier[childs] :
identifier[self] . identifier[childs] . identifier[remove] ( identifier[node] )
keyword[return] identifier[node]
keyword[return] keyword[False] | def remove_child(self, idx=None, *, name=None, node=None):
"""Remove a child node from the current node instance.
:param idx: Index of child node to be removed.
:type idx: int
:param name: The first child node found with «name» will be removed.
:type name: str
:param node: Child node to be removed.
:type node: Node
:returns: The node that has been removed, or False if not successful.
:rtype: Node or False
"""
if idx and isinstance(idx, int) and (-len(self.childs) <= idx < len(self.childs)):
return self.childs.pop(idx) # depends on [control=['if'], data=[]]
if name and isinstance(name, str):
found_node = None
for _n in self.childs:
if _n.name == name:
found_node = _n
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['_n']]
if found_node:
self.childs.remove(found_node)
return found_node # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if node and node in self.childs:
self.childs.remove(node)
return node # depends on [control=['if'], data=[]]
return False |
def verbose(self):
"""
Make it the verbose log.
A verbose log can be only shown when user want to see more logs.
It works as::
log.verbose.warn('this is a verbose warn')
log.verbose.info('this is a verbose info')
"""
log = copy.copy(self)
log._is_verbose = True
return log | def function[verbose, parameter[self]]:
constant[
Make it the verbose log.
A verbose log can be only shown when user want to see more logs.
It works as::
log.verbose.warn('this is a verbose warn')
log.verbose.info('this is a verbose info')
]
variable[log] assign[=] call[name[copy].copy, parameter[name[self]]]
name[log]._is_verbose assign[=] constant[True]
return[name[log]] | keyword[def] identifier[verbose] ( identifier[self] ):
literal[string]
identifier[log] = identifier[copy] . identifier[copy] ( identifier[self] )
identifier[log] . identifier[_is_verbose] = keyword[True]
keyword[return] identifier[log] | def verbose(self):
"""
Make it the verbose log.
A verbose log can be only shown when user want to see more logs.
It works as::
log.verbose.warn('this is a verbose warn')
log.verbose.info('this is a verbose info')
"""
log = copy.copy(self)
log._is_verbose = True
return log |
def _get_hanging_wall_coeffs_mag(self, C, mag):
"""
Returns the hanging wall magnitude term defined in equation 14
"""
if mag < 5.5:
return 0.0
elif mag > 6.5:
return 1.0 + C["a2"] * (mag - 6.5)
else:
return (mag - 5.5) * (1.0 + C["a2"] * (mag - 6.5)) | def function[_get_hanging_wall_coeffs_mag, parameter[self, C, mag]]:
constant[
Returns the hanging wall magnitude term defined in equation 14
]
if compare[name[mag] less[<] constant[5.5]] begin[:]
return[constant[0.0]] | keyword[def] identifier[_get_hanging_wall_coeffs_mag] ( identifier[self] , identifier[C] , identifier[mag] ):
literal[string]
keyword[if] identifier[mag] < literal[int] :
keyword[return] literal[int]
keyword[elif] identifier[mag] > literal[int] :
keyword[return] literal[int] + identifier[C] [ literal[string] ]*( identifier[mag] - literal[int] )
keyword[else] :
keyword[return] ( identifier[mag] - literal[int] )*( literal[int] + identifier[C] [ literal[string] ]*( identifier[mag] - literal[int] )) | def _get_hanging_wall_coeffs_mag(self, C, mag):
"""
Returns the hanging wall magnitude term defined in equation 14
"""
if mag < 5.5:
return 0.0 # depends on [control=['if'], data=[]]
elif mag > 6.5:
return 1.0 + C['a2'] * (mag - 6.5) # depends on [control=['if'], data=['mag']]
else:
return (mag - 5.5) * (1.0 + C['a2'] * (mag - 6.5)) |
def template_filter(self, name: Optional[str]=None) -> Callable:
"""Add a template filter.
This is designed to be used as a decorator. An example usage,
.. code-block:: python
@app.template_filter('name')
def to_upper(value):
return value.upper()
Arguments:
name: The filter name (defaults to function name).
"""
def decorator(func: Callable) -> Callable:
self.add_template_filter(func, name=name)
return func
return decorator | def function[template_filter, parameter[self, name]]:
constant[Add a template filter.
This is designed to be used as a decorator. An example usage,
.. code-block:: python
@app.template_filter('name')
def to_upper(value):
return value.upper()
Arguments:
name: The filter name (defaults to function name).
]
def function[decorator, parameter[func]]:
call[name[self].add_template_filter, parameter[name[func]]]
return[name[func]]
return[name[decorator]] | keyword[def] identifier[template_filter] ( identifier[self] , identifier[name] : identifier[Optional] [ identifier[str] ]= keyword[None] )-> identifier[Callable] :
literal[string]
keyword[def] identifier[decorator] ( identifier[func] : identifier[Callable] )-> identifier[Callable] :
identifier[self] . identifier[add_template_filter] ( identifier[func] , identifier[name] = identifier[name] )
keyword[return] identifier[func]
keyword[return] identifier[decorator] | def template_filter(self, name: Optional[str]=None) -> Callable:
"""Add a template filter.
This is designed to be used as a decorator. An example usage,
.. code-block:: python
@app.template_filter('name')
def to_upper(value):
return value.upper()
Arguments:
name: The filter name (defaults to function name).
"""
def decorator(func: Callable) -> Callable:
self.add_template_filter(func, name=name)
return func
return decorator |
def _number_shapes_ancestors(self, topoTypeA, topoTypeB, topologicalEntity):
'''returns the number of shape ancestors
If you want to know how many edges a faces has:
_number_shapes_ancestors(self, TopAbs_EDGE, TopAbs_FACE, edg)
will return the number of edges a faces has
@param topoTypeA:
@param topoTypeB:
@param topologicalEntity:
'''
topo_set = set()
_map = TopTools_IndexedDataMapOfShapeListOfShape()
topexp_MapShapesAndAncestors(self.myShape, topoTypeA, topoTypeB, _map)
results = _map.FindFromKey(topologicalEntity)
if results.IsEmpty():
return None
topology_iterator = TopTools_ListIteratorOfListOfShape(results)
while topology_iterator.More():
topo_set.add(topology_iterator.Value())
topology_iterator.Next()
return len(topo_set) | def function[_number_shapes_ancestors, parameter[self, topoTypeA, topoTypeB, topologicalEntity]]:
constant[returns the number of shape ancestors
If you want to know how many edges a faces has:
_number_shapes_ancestors(self, TopAbs_EDGE, TopAbs_FACE, edg)
will return the number of edges a faces has
@param topoTypeA:
@param topoTypeB:
@param topologicalEntity:
]
variable[topo_set] assign[=] call[name[set], parameter[]]
variable[_map] assign[=] call[name[TopTools_IndexedDataMapOfShapeListOfShape], parameter[]]
call[name[topexp_MapShapesAndAncestors], parameter[name[self].myShape, name[topoTypeA], name[topoTypeB], name[_map]]]
variable[results] assign[=] call[name[_map].FindFromKey, parameter[name[topologicalEntity]]]
if call[name[results].IsEmpty, parameter[]] begin[:]
return[constant[None]]
variable[topology_iterator] assign[=] call[name[TopTools_ListIteratorOfListOfShape], parameter[name[results]]]
while call[name[topology_iterator].More, parameter[]] begin[:]
call[name[topo_set].add, parameter[call[name[topology_iterator].Value, parameter[]]]]
call[name[topology_iterator].Next, parameter[]]
return[call[name[len], parameter[name[topo_set]]]] | keyword[def] identifier[_number_shapes_ancestors] ( identifier[self] , identifier[topoTypeA] , identifier[topoTypeB] , identifier[topologicalEntity] ):
literal[string]
identifier[topo_set] = identifier[set] ()
identifier[_map] = identifier[TopTools_IndexedDataMapOfShapeListOfShape] ()
identifier[topexp_MapShapesAndAncestors] ( identifier[self] . identifier[myShape] , identifier[topoTypeA] , identifier[topoTypeB] , identifier[_map] )
identifier[results] = identifier[_map] . identifier[FindFromKey] ( identifier[topologicalEntity] )
keyword[if] identifier[results] . identifier[IsEmpty] ():
keyword[return] keyword[None]
identifier[topology_iterator] = identifier[TopTools_ListIteratorOfListOfShape] ( identifier[results] )
keyword[while] identifier[topology_iterator] . identifier[More] ():
identifier[topo_set] . identifier[add] ( identifier[topology_iterator] . identifier[Value] ())
identifier[topology_iterator] . identifier[Next] ()
keyword[return] identifier[len] ( identifier[topo_set] ) | def _number_shapes_ancestors(self, topoTypeA, topoTypeB, topologicalEntity):
"""returns the number of shape ancestors
If you want to know how many edges a faces has:
_number_shapes_ancestors(self, TopAbs_EDGE, TopAbs_FACE, edg)
will return the number of edges a faces has
@param topoTypeA:
@param topoTypeB:
@param topologicalEntity:
"""
topo_set = set()
_map = TopTools_IndexedDataMapOfShapeListOfShape()
topexp_MapShapesAndAncestors(self.myShape, topoTypeA, topoTypeB, _map)
results = _map.FindFromKey(topologicalEntity)
if results.IsEmpty():
return None # depends on [control=['if'], data=[]]
topology_iterator = TopTools_ListIteratorOfListOfShape(results)
while topology_iterator.More():
topo_set.add(topology_iterator.Value())
topology_iterator.Next() # depends on [control=['while'], data=[]]
return len(topo_set) |
def gisland(self, dae):
"""Reset g(x) for islanded buses and areas"""
if (not self.islanded_buses) and (not self.island_sets):
return
a, v = list(), list()
# for islanded areas without a slack bus
# TODO: fix for islanded sets without sw
# for island in self.island_sets:
# nosw = 1
# for item in self.system.SW.bus:
# if self.uid[item] in island:
# nosw = 0
# break
# if nosw:
# self.islanded_buses += island
# self.island_sets.remove(island)
a = self.islanded_buses
v = [self.n + item for item in a]
dae.g[a] = 0
dae.g[v] = 0 | def function[gisland, parameter[self, dae]]:
constant[Reset g(x) for islanded buses and areas]
if <ast.BoolOp object at 0x7da18dc9abc0> begin[:]
return[None]
<ast.Tuple object at 0x7da18dc9baf0> assign[=] tuple[[<ast.Call object at 0x7da18dc9b670>, <ast.Call object at 0x7da18dc99450>]]
variable[a] assign[=] name[self].islanded_buses
variable[v] assign[=] <ast.ListComp object at 0x7da18dc9a2c0>
call[name[dae].g][name[a]] assign[=] constant[0]
call[name[dae].g][name[v]] assign[=] constant[0] | keyword[def] identifier[gisland] ( identifier[self] , identifier[dae] ):
literal[string]
keyword[if] ( keyword[not] identifier[self] . identifier[islanded_buses] ) keyword[and] ( keyword[not] identifier[self] . identifier[island_sets] ):
keyword[return]
identifier[a] , identifier[v] = identifier[list] (), identifier[list] ()
identifier[a] = identifier[self] . identifier[islanded_buses]
identifier[v] =[ identifier[self] . identifier[n] + identifier[item] keyword[for] identifier[item] keyword[in] identifier[a] ]
identifier[dae] . identifier[g] [ identifier[a] ]= literal[int]
identifier[dae] . identifier[g] [ identifier[v] ]= literal[int] | def gisland(self, dae):
"""Reset g(x) for islanded buses and areas"""
if not self.islanded_buses and (not self.island_sets):
return # depends on [control=['if'], data=[]]
(a, v) = (list(), list())
# for islanded areas without a slack bus
# TODO: fix for islanded sets without sw
# for island in self.island_sets:
# nosw = 1
# for item in self.system.SW.bus:
# if self.uid[item] in island:
# nosw = 0
# break
# if nosw:
# self.islanded_buses += island
# self.island_sets.remove(island)
a = self.islanded_buses
v = [self.n + item for item in a]
dae.g[a] = 0
dae.g[v] = 0 |
def redirect_to_assignment_override_for_section(self, assignment_id, course_section_id):
"""
Redirect to the assignment override for a section.
Responds with a redirect to the override for the given section, if any
(404 otherwise).
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - course_section_id
"""ID"""
path["course_section_id"] = course_section_id
# REQUIRED - PATH - assignment_id
"""ID"""
path["assignment_id"] = assignment_id
self.logger.debug("GET /api/v1/sections/{course_section_id}/assignments/{assignment_id}/override with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("GET", "/api/v1/sections/{course_section_id}/assignments/{assignment_id}/override".format(**path), data=data, params=params, no_data=True) | def function[redirect_to_assignment_override_for_section, parameter[self, assignment_id, course_section_id]]:
constant[
Redirect to the assignment override for a section.
Responds with a redirect to the override for the given section, if any
(404 otherwise).
]
variable[path] assign[=] dictionary[[], []]
variable[data] assign[=] dictionary[[], []]
variable[params] assign[=] dictionary[[], []]
constant[ID]
call[name[path]][constant[course_section_id]] assign[=] name[course_section_id]
constant[ID]
call[name[path]][constant[assignment_id]] assign[=] name[assignment_id]
call[name[self].logger.debug, parameter[call[constant[GET /api/v1/sections/{course_section_id}/assignments/{assignment_id}/override with query params: {params} and form data: {data}].format, parameter[]]]]
return[call[name[self].generic_request, parameter[constant[GET], call[constant[/api/v1/sections/{course_section_id}/assignments/{assignment_id}/override].format, parameter[]]]]] | keyword[def] identifier[redirect_to_assignment_override_for_section] ( identifier[self] , identifier[assignment_id] , identifier[course_section_id] ):
literal[string]
identifier[path] ={}
identifier[data] ={}
identifier[params] ={}
literal[string]
identifier[path] [ literal[string] ]= identifier[course_section_id]
literal[string]
identifier[path] [ literal[string] ]= identifier[assignment_id]
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[params] = identifier[params] , identifier[data] = identifier[data] ,** identifier[path] ))
keyword[return] identifier[self] . identifier[generic_request] ( literal[string] , literal[string] . identifier[format] (** identifier[path] ), identifier[data] = identifier[data] , identifier[params] = identifier[params] , identifier[no_data] = keyword[True] ) | def redirect_to_assignment_override_for_section(self, assignment_id, course_section_id):
"""
Redirect to the assignment override for a section.
Responds with a redirect to the override for the given section, if any
(404 otherwise).
"""
path = {}
data = {}
params = {} # REQUIRED - PATH - course_section_id
'ID'
path['course_section_id'] = course_section_id # REQUIRED - PATH - assignment_id
'ID'
path['assignment_id'] = assignment_id
self.logger.debug('GET /api/v1/sections/{course_section_id}/assignments/{assignment_id}/override with query params: {params} and form data: {data}'.format(params=params, data=data, **path))
return self.generic_request('GET', '/api/v1/sections/{course_section_id}/assignments/{assignment_id}/override'.format(**path), data=data, params=params, no_data=True) |
def find(self, id, columns=None):
"""
Find a model by its primary key
:param id: The primary key value
:type id: mixed
:param columns: The columns to retrieve
:type columns: list
:return: The found model
:rtype: orator.Model
"""
if columns is None:
columns = ["*"]
if isinstance(id, list):
return self.find_many(id, columns)
self._query.where(self._model.get_qualified_key_name(), "=", id)
return self.first(columns) | def function[find, parameter[self, id, columns]]:
constant[
Find a model by its primary key
:param id: The primary key value
:type id: mixed
:param columns: The columns to retrieve
:type columns: list
:return: The found model
:rtype: orator.Model
]
if compare[name[columns] is constant[None]] begin[:]
variable[columns] assign[=] list[[<ast.Constant object at 0x7da20c7c86a0>]]
if call[name[isinstance], parameter[name[id], name[list]]] begin[:]
return[call[name[self].find_many, parameter[name[id], name[columns]]]]
call[name[self]._query.where, parameter[call[name[self]._model.get_qualified_key_name, parameter[]], constant[=], name[id]]]
return[call[name[self].first, parameter[name[columns]]]] | keyword[def] identifier[find] ( identifier[self] , identifier[id] , identifier[columns] = keyword[None] ):
literal[string]
keyword[if] identifier[columns] keyword[is] keyword[None] :
identifier[columns] =[ literal[string] ]
keyword[if] identifier[isinstance] ( identifier[id] , identifier[list] ):
keyword[return] identifier[self] . identifier[find_many] ( identifier[id] , identifier[columns] )
identifier[self] . identifier[_query] . identifier[where] ( identifier[self] . identifier[_model] . identifier[get_qualified_key_name] (), literal[string] , identifier[id] )
keyword[return] identifier[self] . identifier[first] ( identifier[columns] ) | def find(self, id, columns=None):
"""
Find a model by its primary key
:param id: The primary key value
:type id: mixed
:param columns: The columns to retrieve
:type columns: list
:return: The found model
:rtype: orator.Model
"""
if columns is None:
columns = ['*'] # depends on [control=['if'], data=['columns']]
if isinstance(id, list):
return self.find_many(id, columns) # depends on [control=['if'], data=[]]
self._query.where(self._model.get_qualified_key_name(), '=', id)
return self.first(columns) |
def update_recurring(self, recurring_id, recurring_dict):
"""
Updates a recurring
:param recurring_id: the recurring id
:param recurring_dict: dict
:return: dict
"""
return self._create_put_request(resource=RECURRINGS, billomat_id=recurring_id, send_data=recurring_dict) | def function[update_recurring, parameter[self, recurring_id, recurring_dict]]:
constant[
Updates a recurring
:param recurring_id: the recurring id
:param recurring_dict: dict
:return: dict
]
return[call[name[self]._create_put_request, parameter[]]] | keyword[def] identifier[update_recurring] ( identifier[self] , identifier[recurring_id] , identifier[recurring_dict] ):
literal[string]
keyword[return] identifier[self] . identifier[_create_put_request] ( identifier[resource] = identifier[RECURRINGS] , identifier[billomat_id] = identifier[recurring_id] , identifier[send_data] = identifier[recurring_dict] ) | def update_recurring(self, recurring_id, recurring_dict):
"""
Updates a recurring
:param recurring_id: the recurring id
:param recurring_dict: dict
:return: dict
"""
return self._create_put_request(resource=RECURRINGS, billomat_id=recurring_id, send_data=recurring_dict) |
def iterscan(self, match="*", count=1000):
""" Much slower than iter(), but much more memory efficient if
k/v's retrieved are one-offs
@match: matches member names in the sorted set
@count: the user specified the amount of work that should be done
at every call in order to retrieve elements from the collection
-> iterator of |(member, score)| pairs
"""
if self.serialized:
return map(
lambda x: (self._loads(x[0]), self.cast(x[1])),
self._client.zscan_iter(
self.key_prefix, match=match, count=count))
else:
return map(
lambda x: (self._decode(x[0]), self.cast(x[1])),
self._client.zscan_iter(
self.key_prefix, match=match, count=count)) | def function[iterscan, parameter[self, match, count]]:
constant[ Much slower than iter(), but much more memory efficient if
k/v's retrieved are one-offs
@match: matches member names in the sorted set
@count: the user specified the amount of work that should be done
at every call in order to retrieve elements from the collection
-> iterator of |(member, score)| pairs
]
if name[self].serialized begin[:]
return[call[name[map], parameter[<ast.Lambda object at 0x7da1b28f47c0>, call[name[self]._client.zscan_iter, parameter[name[self].key_prefix]]]]] | keyword[def] identifier[iterscan] ( identifier[self] , identifier[match] = literal[string] , identifier[count] = literal[int] ):
literal[string]
keyword[if] identifier[self] . identifier[serialized] :
keyword[return] identifier[map] (
keyword[lambda] identifier[x] :( identifier[self] . identifier[_loads] ( identifier[x] [ literal[int] ]), identifier[self] . identifier[cast] ( identifier[x] [ literal[int] ])),
identifier[self] . identifier[_client] . identifier[zscan_iter] (
identifier[self] . identifier[key_prefix] , identifier[match] = identifier[match] , identifier[count] = identifier[count] ))
keyword[else] :
keyword[return] identifier[map] (
keyword[lambda] identifier[x] :( identifier[self] . identifier[_decode] ( identifier[x] [ literal[int] ]), identifier[self] . identifier[cast] ( identifier[x] [ literal[int] ])),
identifier[self] . identifier[_client] . identifier[zscan_iter] (
identifier[self] . identifier[key_prefix] , identifier[match] = identifier[match] , identifier[count] = identifier[count] )) | def iterscan(self, match='*', count=1000):
""" Much slower than iter(), but much more memory efficient if
k/v's retrieved are one-offs
@match: matches member names in the sorted set
@count: the user specified the amount of work that should be done
at every call in order to retrieve elements from the collection
-> iterator of |(member, score)| pairs
"""
if self.serialized:
return map(lambda x: (self._loads(x[0]), self.cast(x[1])), self._client.zscan_iter(self.key_prefix, match=match, count=count)) # depends on [control=['if'], data=[]]
else:
return map(lambda x: (self._decode(x[0]), self.cast(x[1])), self._client.zscan_iter(self.key_prefix, match=match, count=count)) |
def override_tab_value(data, headers, new_value=' ', **_):
"""Override tab values in the *data* with *new_value*.
:param iterable data: An :term:`iterable` (e.g. list) of rows.
:param iterable headers: The column headers.
:param new_value: The new value to use for tab.
:return: The processed data and headers.
:rtype: tuple
"""
return (([v.replace('\t', new_value) if isinstance(v, text_type) else v
for v in row] for row in data),
headers) | def function[override_tab_value, parameter[data, headers, new_value]]:
constant[Override tab values in the *data* with *new_value*.
:param iterable data: An :term:`iterable` (e.g. list) of rows.
:param iterable headers: The column headers.
:param new_value: The new value to use for tab.
:return: The processed data and headers.
:rtype: tuple
]
return[tuple[[<ast.GeneratorExp object at 0x7da2054a79d0>, <ast.Name object at 0x7da2054a5f60>]]] | keyword[def] identifier[override_tab_value] ( identifier[data] , identifier[headers] , identifier[new_value] = literal[string] ,** identifier[_] ):
literal[string]
keyword[return] (([ identifier[v] . identifier[replace] ( literal[string] , identifier[new_value] ) keyword[if] identifier[isinstance] ( identifier[v] , identifier[text_type] ) keyword[else] identifier[v]
keyword[for] identifier[v] keyword[in] identifier[row] ] keyword[for] identifier[row] keyword[in] identifier[data] ),
identifier[headers] ) | def override_tab_value(data, headers, new_value=' ', **_):
"""Override tab values in the *data* with *new_value*.
:param iterable data: An :term:`iterable` (e.g. list) of rows.
:param iterable headers: The column headers.
:param new_value: The new value to use for tab.
:return: The processed data and headers.
:rtype: tuple
"""
return (([v.replace('\t', new_value) if isinstance(v, text_type) else v for v in row] for row in data), headers) |
def _set_dst_vtep_ip_any(self, v, load=False):
"""
Setter method for dst_vtep_ip_any, mapped from YANG variable /overlay/access_list/type/vxlan/standard/seq/dst_vtep_ip_any (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_dst_vtep_ip_any is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_dst_vtep_ip_any() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="dst-vtep-ip-any", rest_name="dst-vtep-ip-any", parent=self, choice=(u'choice-dst-vtep-ip', u'case-dst-vtep-ip-any'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'dst vtep ip address: any', u'cli-incomplete-command': None, u'cli-suppress-no': None}}, namespace='urn:brocade.com:mgmt:brocade-vxlan-visibility', defining_module='brocade-vxlan-visibility', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """dst_vtep_ip_any must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="dst-vtep-ip-any", rest_name="dst-vtep-ip-any", parent=self, choice=(u'choice-dst-vtep-ip', u'case-dst-vtep-ip-any'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'dst vtep ip address: any', u'cli-incomplete-command': None, u'cli-suppress-no': None}}, namespace='urn:brocade.com:mgmt:brocade-vxlan-visibility', defining_module='brocade-vxlan-visibility', yang_type='empty', is_config=True)""",
})
self.__dst_vtep_ip_any = t
if hasattr(self, '_set'):
self._set() | def function[_set_dst_vtep_ip_any, parameter[self, v, load]]:
constant[
Setter method for dst_vtep_ip_any, mapped from YANG variable /overlay/access_list/type/vxlan/standard/seq/dst_vtep_ip_any (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_dst_vtep_ip_any is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_dst_vtep_ip_any() directly.
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da1b26ac220>
name[self].__dst_vtep_ip_any assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]] | keyword[def] identifier[_set_dst_vtep_ip_any] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[YANGBool] , identifier[is_leaf] = keyword[True] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[choice] =( literal[string] , literal[string] ), identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__dst_vtep_ip_any] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] () | def _set_dst_vtep_ip_any(self, v, load=False):
"""
Setter method for dst_vtep_ip_any, mapped from YANG variable /overlay/access_list/type/vxlan/standard/seq/dst_vtep_ip_any (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_dst_vtep_ip_any is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_dst_vtep_ip_any() directly.
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=YANGBool, is_leaf=True, yang_name='dst-vtep-ip-any', rest_name='dst-vtep-ip-any', parent=self, choice=(u'choice-dst-vtep-ip', u'case-dst-vtep-ip-any'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'dst vtep ip address: any', u'cli-incomplete-command': None, u'cli-suppress-no': None}}, namespace='urn:brocade.com:mgmt:brocade-vxlan-visibility', defining_module='brocade-vxlan-visibility', yang_type='empty', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'dst_vtep_ip_any must be of a type compatible with empty', 'defined-type': 'empty', 'generated-type': 'YANGDynClass(base=YANGBool, is_leaf=True, yang_name="dst-vtep-ip-any", rest_name="dst-vtep-ip-any", parent=self, choice=(u\'choice-dst-vtep-ip\', u\'case-dst-vtep-ip-any\'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'dst vtep ip address: any\', u\'cli-incomplete-command\': None, u\'cli-suppress-no\': None}}, namespace=\'urn:brocade.com:mgmt:brocade-vxlan-visibility\', defining_module=\'brocade-vxlan-visibility\', yang_type=\'empty\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__dst_vtep_ip_any = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]] |
def decode(buff):
"""
Transforms the raw buffer data read in into a list of bytes
"""
pp = list(map(ord, buff))
if 0 == len(pp) == 1:
pp = []
return pp | def function[decode, parameter[buff]]:
constant[
Transforms the raw buffer data read in into a list of bytes
]
variable[pp] assign[=] call[name[list], parameter[call[name[map], parameter[name[ord], name[buff]]]]]
if compare[constant[0] equal[==] call[name[len], parameter[name[pp]]]] begin[:]
variable[pp] assign[=] list[[]]
return[name[pp]] | keyword[def] identifier[decode] ( identifier[buff] ):
literal[string]
identifier[pp] = identifier[list] ( identifier[map] ( identifier[ord] , identifier[buff] ))
keyword[if] literal[int] == identifier[len] ( identifier[pp] )== literal[int] :
identifier[pp] =[]
keyword[return] identifier[pp] | def decode(buff):
"""
Transforms the raw buffer data read in into a list of bytes
"""
pp = list(map(ord, buff))
if 0 == len(pp) == 1:
pp = [] # depends on [control=['if'], data=[]]
return pp |
def internal_error (out=stderr, etype=None, evalue=None, tb=None):
"""Print internal error message (output defaults to stderr)."""
print(os.linesep, file=out)
print(_("""********** Oops, I did it again. *************
You have found an internal error in LinkChecker. Please write a bug report
at %s
and include the following information:
- the URL or file you are testing
- the system information below
When using the commandline client:
- your commandline arguments and any custom configuration files.
- the output of a debug run with option "-Dall"
Not disclosing some of the information above due to privacy reasons is ok.
I will try to help you nonetheless, but you have to give me something
I can work with ;) .
""") % configuration.SupportUrl, file=out)
if etype is None:
etype = sys.exc_info()[0]
if evalue is None:
evalue = sys.exc_info()[1]
if tb is None:
tb = sys.exc_info()[2]
better_exchook2.better_exchook(etype, evalue, tb, out=out)
print_app_info(out=out)
print_proxy_info(out=out)
print_locale_info(out=out)
print(os.linesep,
_("******** LinkChecker internal error, over and out ********"), file=out) | def function[internal_error, parameter[out, etype, evalue, tb]]:
constant[Print internal error message (output defaults to stderr).]
call[name[print], parameter[name[os].linesep]]
call[name[print], parameter[binary_operation[call[name[_], parameter[constant[********** Oops, I did it again. *************
You have found an internal error in LinkChecker. Please write a bug report
at %s
and include the following information:
- the URL or file you are testing
- the system information below
When using the commandline client:
- your commandline arguments and any custom configuration files.
- the output of a debug run with option "-Dall"
Not disclosing some of the information above due to privacy reasons is ok.
I will try to help you nonetheless, but you have to give me something
I can work with ;) .
]]] <ast.Mod object at 0x7da2590d6920> name[configuration].SupportUrl]]]
if compare[name[etype] is constant[None]] begin[:]
variable[etype] assign[=] call[call[name[sys].exc_info, parameter[]]][constant[0]]
if compare[name[evalue] is constant[None]] begin[:]
variable[evalue] assign[=] call[call[name[sys].exc_info, parameter[]]][constant[1]]
if compare[name[tb] is constant[None]] begin[:]
variable[tb] assign[=] call[call[name[sys].exc_info, parameter[]]][constant[2]]
call[name[better_exchook2].better_exchook, parameter[name[etype], name[evalue], name[tb]]]
call[name[print_app_info], parameter[]]
call[name[print_proxy_info], parameter[]]
call[name[print_locale_info], parameter[]]
call[name[print], parameter[name[os].linesep, call[name[_], parameter[constant[******** LinkChecker internal error, over and out ********]]]]] | keyword[def] identifier[internal_error] ( identifier[out] = identifier[stderr] , identifier[etype] = keyword[None] , identifier[evalue] = keyword[None] , identifier[tb] = keyword[None] ):
literal[string]
identifier[print] ( identifier[os] . identifier[linesep] , identifier[file] = identifier[out] )
identifier[print] ( identifier[_] ( literal[string] )% identifier[configuration] . identifier[SupportUrl] , identifier[file] = identifier[out] )
keyword[if] identifier[etype] keyword[is] keyword[None] :
identifier[etype] = identifier[sys] . identifier[exc_info] ()[ literal[int] ]
keyword[if] identifier[evalue] keyword[is] keyword[None] :
identifier[evalue] = identifier[sys] . identifier[exc_info] ()[ literal[int] ]
keyword[if] identifier[tb] keyword[is] keyword[None] :
identifier[tb] = identifier[sys] . identifier[exc_info] ()[ literal[int] ]
identifier[better_exchook2] . identifier[better_exchook] ( identifier[etype] , identifier[evalue] , identifier[tb] , identifier[out] = identifier[out] )
identifier[print_app_info] ( identifier[out] = identifier[out] )
identifier[print_proxy_info] ( identifier[out] = identifier[out] )
identifier[print_locale_info] ( identifier[out] = identifier[out] )
identifier[print] ( identifier[os] . identifier[linesep] ,
identifier[_] ( literal[string] ), identifier[file] = identifier[out] ) | def internal_error(out=stderr, etype=None, evalue=None, tb=None):
"""Print internal error message (output defaults to stderr)."""
print(os.linesep, file=out)
print(_('********** Oops, I did it again. *************\n\nYou have found an internal error in LinkChecker. Please write a bug report\nat %s\nand include the following information:\n- the URL or file you are testing\n- the system information below\n\nWhen using the commandline client:\n- your commandline arguments and any custom configuration files.\n- the output of a debug run with option "-Dall"\n\nNot disclosing some of the information above due to privacy reasons is ok.\nI will try to help you nonetheless, but you have to give me something\nI can work with ;) .\n') % configuration.SupportUrl, file=out)
if etype is None:
etype = sys.exc_info()[0] # depends on [control=['if'], data=['etype']]
if evalue is None:
evalue = sys.exc_info()[1] # depends on [control=['if'], data=['evalue']]
if tb is None:
tb = sys.exc_info()[2] # depends on [control=['if'], data=['tb']]
better_exchook2.better_exchook(etype, evalue, tb, out=out)
print_app_info(out=out)
print_proxy_info(out=out)
print_locale_info(out=out)
print(os.linesep, _('******** LinkChecker internal error, over and out ********'), file=out) |
def auth_get_token(self, check_scope=True):
'Refresh or acquire access_token.'
res = self.auth_access_data_raw = self._auth_token_request()
return self._auth_token_process(res, check_scope=check_scope) | def function[auth_get_token, parameter[self, check_scope]]:
constant[Refresh or acquire access_token.]
variable[res] assign[=] call[name[self]._auth_token_request, parameter[]]
return[call[name[self]._auth_token_process, parameter[name[res]]]] | keyword[def] identifier[auth_get_token] ( identifier[self] , identifier[check_scope] = keyword[True] ):
literal[string]
identifier[res] = identifier[self] . identifier[auth_access_data_raw] = identifier[self] . identifier[_auth_token_request] ()
keyword[return] identifier[self] . identifier[_auth_token_process] ( identifier[res] , identifier[check_scope] = identifier[check_scope] ) | def auth_get_token(self, check_scope=True):
"""Refresh or acquire access_token."""
res = self.auth_access_data_raw = self._auth_token_request()
return self._auth_token_process(res, check_scope=check_scope) |
def modified_environ(*remove: str, **update: str) -> Iterator[None]:
"""
Temporarily updates the ``os.environ`` dictionary in-place and resets it to the original state
when finished.
(https://stackoverflow.com/questions/2059482/
python-temporarily-modify-the-current-processs-environment/34333710#34333710)
The ``os.environ`` dictionary is updated in-place so that the modification is sure to work in
all situations.
Args:
remove: Environment variables to remove.
update: Dictionary of environment variables and values to add/update.
Examples:
>>> with modified_environ(Test='abc'):
... import os
... print(os.environ.get('Test'))
abc
>>> print(os.environ.get('Test'))
None
"""
env = os.environ
update = update or {}
remove = remove or ()
# List of environment variables being updated or removed.
stomped = (set(update.keys()) | set(remove)) & set(env.keys())
# Environment variables and values to restore on exit.
update_after = {k: env[k] for k in stomped}
# Environment variables and values to remove on exit.
remove_after = frozenset(k for k in update if k not in env)
try:
env.update(update)
[env.pop(k, None) for k in remove] # pylint: disable=expression-not-assigned
yield
finally:
env.update(update_after)
[env.pop(k) for k in remove_after] | def function[modified_environ, parameter[]]:
constant[
Temporarily updates the ``os.environ`` dictionary in-place and resets it to the original state
when finished.
(https://stackoverflow.com/questions/2059482/
python-temporarily-modify-the-current-processs-environment/34333710#34333710)
The ``os.environ`` dictionary is updated in-place so that the modification is sure to work in
all situations.
Args:
remove: Environment variables to remove.
update: Dictionary of environment variables and values to add/update.
Examples:
>>> with modified_environ(Test='abc'):
... import os
... print(os.environ.get('Test'))
abc
>>> print(os.environ.get('Test'))
None
]
variable[env] assign[=] name[os].environ
variable[update] assign[=] <ast.BoolOp object at 0x7da20c6c64a0>
variable[remove] assign[=] <ast.BoolOp object at 0x7da20c6c6fb0>
variable[stomped] assign[=] binary_operation[binary_operation[call[name[set], parameter[call[name[update].keys, parameter[]]]] <ast.BitOr object at 0x7da2590d6aa0> call[name[set], parameter[name[remove]]]] <ast.BitAnd object at 0x7da2590d6b60> call[name[set], parameter[call[name[env].keys, parameter[]]]]]
variable[update_after] assign[=] <ast.DictComp object at 0x7da20c6c4c70>
variable[remove_after] assign[=] call[name[frozenset], parameter[<ast.GeneratorExp object at 0x7da20c6c7130>]]
<ast.Try object at 0x7da20c6c7340> | keyword[def] identifier[modified_environ] (* identifier[remove] : identifier[str] ,** identifier[update] : identifier[str] )-> identifier[Iterator] [ keyword[None] ]:
literal[string]
identifier[env] = identifier[os] . identifier[environ]
identifier[update] = identifier[update] keyword[or] {}
identifier[remove] = identifier[remove] keyword[or] ()
identifier[stomped] =( identifier[set] ( identifier[update] . identifier[keys] ())| identifier[set] ( identifier[remove] ))& identifier[set] ( identifier[env] . identifier[keys] ())
identifier[update_after] ={ identifier[k] : identifier[env] [ identifier[k] ] keyword[for] identifier[k] keyword[in] identifier[stomped] }
identifier[remove_after] = identifier[frozenset] ( identifier[k] keyword[for] identifier[k] keyword[in] identifier[update] keyword[if] identifier[k] keyword[not] keyword[in] identifier[env] )
keyword[try] :
identifier[env] . identifier[update] ( identifier[update] )
[ identifier[env] . identifier[pop] ( identifier[k] , keyword[None] ) keyword[for] identifier[k] keyword[in] identifier[remove] ]
keyword[yield]
keyword[finally] :
identifier[env] . identifier[update] ( identifier[update_after] )
[ identifier[env] . identifier[pop] ( identifier[k] ) keyword[for] identifier[k] keyword[in] identifier[remove_after] ] | def modified_environ(*remove: str, **update: str) -> Iterator[None]:
"""
Temporarily updates the ``os.environ`` dictionary in-place and resets it to the original state
when finished.
(https://stackoverflow.com/questions/2059482/
python-temporarily-modify-the-current-processs-environment/34333710#34333710)
The ``os.environ`` dictionary is updated in-place so that the modification is sure to work in
all situations.
Args:
remove: Environment variables to remove.
update: Dictionary of environment variables and values to add/update.
Examples:
>>> with modified_environ(Test='abc'):
... import os
... print(os.environ.get('Test'))
abc
>>> print(os.environ.get('Test'))
None
"""
env = os.environ
update = update or {}
remove = remove or ()
# List of environment variables being updated or removed.
stomped = (set(update.keys()) | set(remove)) & set(env.keys())
# Environment variables and values to restore on exit.
update_after = {k: env[k] for k in stomped}
# Environment variables and values to remove on exit.
remove_after = frozenset((k for k in update if k not in env))
try:
env.update(update)
[env.pop(k, None) for k in remove] # pylint: disable=expression-not-assigned
yield # depends on [control=['try'], data=[]]
finally:
env.update(update_after)
[env.pop(k) for k in remove_after] |
def get_output_shapes(self):
"""Get the shapes of the outputs."""
outputs = self.execs[0].outputs
shapes = [out.shape for out in outputs]
concat_shapes = []
for key, the_shape, axis in zip(self.symbol.list_outputs(), shapes, self.output_layouts):
the_shape = list(the_shape)
if axis >= 0:
the_shape[axis] = self.batch_size
concat_shapes.append((key, tuple(the_shape)))
return concat_shapes | def function[get_output_shapes, parameter[self]]:
constant[Get the shapes of the outputs.]
variable[outputs] assign[=] call[name[self].execs][constant[0]].outputs
variable[shapes] assign[=] <ast.ListComp object at 0x7da1b2066530>
variable[concat_shapes] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b2064ee0>, <ast.Name object at 0x7da1b2064ac0>, <ast.Name object at 0x7da1b2067dc0>]]] in starred[call[name[zip], parameter[call[name[self].symbol.list_outputs, parameter[]], name[shapes], name[self].output_layouts]]] begin[:]
variable[the_shape] assign[=] call[name[list], parameter[name[the_shape]]]
if compare[name[axis] greater_or_equal[>=] constant[0]] begin[:]
call[name[the_shape]][name[axis]] assign[=] name[self].batch_size
call[name[concat_shapes].append, parameter[tuple[[<ast.Name object at 0x7da1b2065ea0>, <ast.Call object at 0x7da1b2066740>]]]]
return[name[concat_shapes]] | keyword[def] identifier[get_output_shapes] ( identifier[self] ):
literal[string]
identifier[outputs] = identifier[self] . identifier[execs] [ literal[int] ]. identifier[outputs]
identifier[shapes] =[ identifier[out] . identifier[shape] keyword[for] identifier[out] keyword[in] identifier[outputs] ]
identifier[concat_shapes] =[]
keyword[for] identifier[key] , identifier[the_shape] , identifier[axis] keyword[in] identifier[zip] ( identifier[self] . identifier[symbol] . identifier[list_outputs] (), identifier[shapes] , identifier[self] . identifier[output_layouts] ):
identifier[the_shape] = identifier[list] ( identifier[the_shape] )
keyword[if] identifier[axis] >= literal[int] :
identifier[the_shape] [ identifier[axis] ]= identifier[self] . identifier[batch_size]
identifier[concat_shapes] . identifier[append] (( identifier[key] , identifier[tuple] ( identifier[the_shape] )))
keyword[return] identifier[concat_shapes] | def get_output_shapes(self):
"""Get the shapes of the outputs."""
outputs = self.execs[0].outputs
shapes = [out.shape for out in outputs]
concat_shapes = []
for (key, the_shape, axis) in zip(self.symbol.list_outputs(), shapes, self.output_layouts):
the_shape = list(the_shape)
if axis >= 0:
the_shape[axis] = self.batch_size # depends on [control=['if'], data=['axis']]
concat_shapes.append((key, tuple(the_shape))) # depends on [control=['for'], data=[]]
return concat_shapes |
def dirtool(operation, directory):
"""
Tools For Directories (If Exists, Make And Delete)
:raises ValueError: Nor a string or a list was provided.
"""
operation = operation.lower()
if operation == 'exists':
return bool(os.path.exists(directory))
if operation == 'create':
os.makedirs(directory)
elif operation == 'delete':
os.rmdir(directory)
else:
raise ValueError('Invalid operation provided.') | def function[dirtool, parameter[operation, directory]]:
constant[
Tools For Directories (If Exists, Make And Delete)
:raises ValueError: Nor a string or a list was provided.
]
variable[operation] assign[=] call[name[operation].lower, parameter[]]
if compare[name[operation] equal[==] constant[exists]] begin[:]
return[call[name[bool], parameter[call[name[os].path.exists, parameter[name[directory]]]]]]
if compare[name[operation] equal[==] constant[create]] begin[:]
call[name[os].makedirs, parameter[name[directory]]] | keyword[def] identifier[dirtool] ( identifier[operation] , identifier[directory] ):
literal[string]
identifier[operation] = identifier[operation] . identifier[lower] ()
keyword[if] identifier[operation] == literal[string] :
keyword[return] identifier[bool] ( identifier[os] . identifier[path] . identifier[exists] ( identifier[directory] ))
keyword[if] identifier[operation] == literal[string] :
identifier[os] . identifier[makedirs] ( identifier[directory] )
keyword[elif] identifier[operation] == literal[string] :
identifier[os] . identifier[rmdir] ( identifier[directory] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] ) | def dirtool(operation, directory):
"""
Tools For Directories (If Exists, Make And Delete)
:raises ValueError: Nor a string or a list was provided.
"""
operation = operation.lower()
if operation == 'exists':
return bool(os.path.exists(directory)) # depends on [control=['if'], data=[]]
if operation == 'create':
os.makedirs(directory) # depends on [control=['if'], data=[]]
elif operation == 'delete':
os.rmdir(directory) # depends on [control=['if'], data=[]]
else:
raise ValueError('Invalid operation provided.') |
def find_source_files_from_list(self, file_names):
"""
Finds all source files that actually exists from a list of file names.
:param list[str] file_names: The list of file names.
"""
for file_name in file_names:
if os.path.exists(file_name):
routine_name = os.path.splitext(os.path.basename(file_name))[0]
if routine_name not in self._source_file_names:
self._source_file_names[routine_name] = file_name
else:
self._io.error("Files '{0}' and '{1}' have the same basename.".
format(self._source_file_names[routine_name], file_name))
self.error_file_names.add(file_name)
else:
self._io.error("File not exists: '{0}'".format(file_name))
self.error_file_names.add(file_name) | def function[find_source_files_from_list, parameter[self, file_names]]:
constant[
Finds all source files that actually exists from a list of file names.
:param list[str] file_names: The list of file names.
]
for taget[name[file_name]] in starred[name[file_names]] begin[:]
if call[name[os].path.exists, parameter[name[file_name]]] begin[:]
variable[routine_name] assign[=] call[call[name[os].path.splitext, parameter[call[name[os].path.basename, parameter[name[file_name]]]]]][constant[0]]
if compare[name[routine_name] <ast.NotIn object at 0x7da2590d7190> name[self]._source_file_names] begin[:]
call[name[self]._source_file_names][name[routine_name]] assign[=] name[file_name] | keyword[def] identifier[find_source_files_from_list] ( identifier[self] , identifier[file_names] ):
literal[string]
keyword[for] identifier[file_name] keyword[in] identifier[file_names] :
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[file_name] ):
identifier[routine_name] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[os] . identifier[path] . identifier[basename] ( identifier[file_name] ))[ literal[int] ]
keyword[if] identifier[routine_name] keyword[not] keyword[in] identifier[self] . identifier[_source_file_names] :
identifier[self] . identifier[_source_file_names] [ identifier[routine_name] ]= identifier[file_name]
keyword[else] :
identifier[self] . identifier[_io] . identifier[error] ( literal[string] .
identifier[format] ( identifier[self] . identifier[_source_file_names] [ identifier[routine_name] ], identifier[file_name] ))
identifier[self] . identifier[error_file_names] . identifier[add] ( identifier[file_name] )
keyword[else] :
identifier[self] . identifier[_io] . identifier[error] ( literal[string] . identifier[format] ( identifier[file_name] ))
identifier[self] . identifier[error_file_names] . identifier[add] ( identifier[file_name] ) | def find_source_files_from_list(self, file_names):
"""
Finds all source files that actually exists from a list of file names.
:param list[str] file_names: The list of file names.
"""
for file_name in file_names:
if os.path.exists(file_name):
routine_name = os.path.splitext(os.path.basename(file_name))[0]
if routine_name not in self._source_file_names:
self._source_file_names[routine_name] = file_name # depends on [control=['if'], data=['routine_name']]
else:
self._io.error("Files '{0}' and '{1}' have the same basename.".format(self._source_file_names[routine_name], file_name))
self.error_file_names.add(file_name) # depends on [control=['if'], data=[]]
else:
self._io.error("File not exists: '{0}'".format(file_name))
self.error_file_names.add(file_name) # depends on [control=['for'], data=['file_name']] |
def date_decoder(dic):
"""Add python types decoding. See JsonEncoder"""
if '__date__' in dic:
try:
d = datetime.date(**{c: v for c, v in dic.items() if not c == "__date__"})
except (TypeError, ValueError):
raise json.JSONDecodeError("Corrupted date format !", str(dic), 1)
elif '__datetime__' in dic:
try:
d = datetime.datetime(**{c: v for c, v in dic.items() if not c == "__datetime__"})
except (TypeError, ValueError):
raise json.JSONDecodeError("Corrupted datetime format !", str(dic), 1)
else:
return dic
return d | def function[date_decoder, parameter[dic]]:
constant[Add python types decoding. See JsonEncoder]
if compare[constant[__date__] in name[dic]] begin[:]
<ast.Try object at 0x7da1b1110940>
return[name[d]] | keyword[def] identifier[date_decoder] ( identifier[dic] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[dic] :
keyword[try] :
identifier[d] = identifier[datetime] . identifier[date] (**{ identifier[c] : identifier[v] keyword[for] identifier[c] , identifier[v] keyword[in] identifier[dic] . identifier[items] () keyword[if] keyword[not] identifier[c] == literal[string] })
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[json] . identifier[JSONDecodeError] ( literal[string] , identifier[str] ( identifier[dic] ), literal[int] )
keyword[elif] literal[string] keyword[in] identifier[dic] :
keyword[try] :
identifier[d] = identifier[datetime] . identifier[datetime] (**{ identifier[c] : identifier[v] keyword[for] identifier[c] , identifier[v] keyword[in] identifier[dic] . identifier[items] () keyword[if] keyword[not] identifier[c] == literal[string] })
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[json] . identifier[JSONDecodeError] ( literal[string] , identifier[str] ( identifier[dic] ), literal[int] )
keyword[else] :
keyword[return] identifier[dic]
keyword[return] identifier[d] | def date_decoder(dic):
"""Add python types decoding. See JsonEncoder"""
if '__date__' in dic:
try:
d = datetime.date(**{c: v for (c, v) in dic.items() if not c == '__date__'}) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise json.JSONDecodeError('Corrupted date format !', str(dic), 1) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['dic']]
elif '__datetime__' in dic:
try:
d = datetime.datetime(**{c: v for (c, v) in dic.items() if not c == '__datetime__'}) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise json.JSONDecodeError('Corrupted datetime format !', str(dic), 1) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['dic']]
else:
return dic
return d |
def add(self, name, session, **kwargs):
'''taobao.postage.add 添加邮费模板
添加邮费模板
新增的邮费模板属于当前会话用户
postage_mode_types、postage_mode_dests、postage_mode_prices、 postage_mode_increases四个字段组合起来表示邮费的子模板列表。每个邮费子模板都包含了type(邮费类型,有post、 express、ems可以选择)、dest(邮费模板应用地区,每个模板可以使用于多个地区,每个地区填入他的代码,地区与地区之间用半角逗号分隔)、 price(邮费基价)、increment(邮费增价)四个部分。如果有多个子模板,则将他们的4个部分分别组合,之间用半角分号隔开(注意每个模板的每个部分的位置要一样。即,子模板1号的type、dest、price、increment都要排在这四个参数的第一位;子模板2号要排在第二位……以此类推)'''
request = TOPRequest('taobao.postage.add')
request['name'] = name
for k, v in kwargs.iteritems():
if k not in ('post_price', 'post_increase', 'express_price', 'express_increase', 'ems_price', 'ems_increase', 'memo', 'postage_mode_types', 'postage_mode_dests', 'postage_mode_prices', 'postage_mode_increases') and v==None: continue
request[k] = v
self.create(self.execute(request, session)['postage'])
return self | def function[add, parameter[self, name, session]]:
constant[taobao.postage.add 添加邮费模板
添加邮费模板
新增的邮费模板属于当前会话用户
postage_mode_types、postage_mode_dests、postage_mode_prices、 postage_mode_increases四个字段组合起来表示邮费的子模板列表。每个邮费子模板都包含了type(邮费类型,有post、 express、ems可以选择)、dest(邮费模板应用地区,每个模板可以使用于多个地区,每个地区填入他的代码,地区与地区之间用半角逗号分隔)、 price(邮费基价)、increment(邮费增价)四个部分。如果有多个子模板,则将他们的4个部分分别组合,之间用半角分号隔开(注意每个模板的每个部分的位置要一样。即,子模板1号的type、dest、price、increment都要排在这四个参数的第一位;子模板2号要排在第二位……以此类推)]
variable[request] assign[=] call[name[TOPRequest], parameter[constant[taobao.postage.add]]]
call[name[request]][constant[name]] assign[=] name[name]
for taget[tuple[[<ast.Name object at 0x7da1b26166b0>, <ast.Name object at 0x7da1b2617280>]]] in starred[call[name[kwargs].iteritems, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da1b2617a30> begin[:]
continue
call[name[request]][name[k]] assign[=] name[v]
call[name[self].create, parameter[call[call[name[self].execute, parameter[name[request], name[session]]]][constant[postage]]]]
return[name[self]] | keyword[def] identifier[add] ( identifier[self] , identifier[name] , identifier[session] ,** identifier[kwargs] ):
literal[string]
identifier[request] = identifier[TOPRequest] ( literal[string] )
identifier[request] [ literal[string] ]= identifier[name]
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[kwargs] . identifier[iteritems] ():
keyword[if] identifier[k] keyword[not] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ) keyword[and] identifier[v] == keyword[None] : keyword[continue]
identifier[request] [ identifier[k] ]= identifier[v]
identifier[self] . identifier[create] ( identifier[self] . identifier[execute] ( identifier[request] , identifier[session] )[ literal[string] ])
keyword[return] identifier[self] | def add(self, name, session, **kwargs):
"""taobao.postage.add 添加邮费模板
添加邮费模板
新增的邮费模板属于当前会话用户
postage_mode_types、postage_mode_dests、postage_mode_prices、 postage_mode_increases四个字段组合起来表示邮费的子模板列表。每个邮费子模板都包含了type(邮费类型,有post、 express、ems可以选择)、dest(邮费模板应用地区,每个模板可以使用于多个地区,每个地区填入他的代码,地区与地区之间用半角逗号分隔)、 price(邮费基价)、increment(邮费增价)四个部分。如果有多个子模板,则将他们的4个部分分别组合,之间用半角分号隔开(注意每个模板的每个部分的位置要一样。即,子模板1号的type、dest、price、increment都要排在这四个参数的第一位;子模板2号要排在第二位……以此类推)"""
request = TOPRequest('taobao.postage.add')
request['name'] = name
for (k, v) in kwargs.iteritems():
if k not in ('post_price', 'post_increase', 'express_price', 'express_increase', 'ems_price', 'ems_increase', 'memo', 'postage_mode_types', 'postage_mode_dests', 'postage_mode_prices', 'postage_mode_increases') and v == None:
continue # depends on [control=['if'], data=[]]
request[k] = v # depends on [control=['for'], data=[]]
self.create(self.execute(request, session)['postage'])
return self |
async def stream(self,
event_type: Type[TStreamEvent],
num_events: Optional[int] = None) -> AsyncGenerator[TStreamEvent, None]:
"""
Stream all events that match the specified event type. This returns an
``AsyncIterable[BaseEvent]`` which can be consumed through an ``async for`` loop.
An optional ``num_events`` parameter can be passed to stop streaming after a maximum amount
of events was received.
"""
queue: asyncio.Queue = asyncio.Queue()
if event_type not in self._queues:
self._queues[event_type] = []
self._queues[event_type].append(queue)
i = None if num_events is None else 0
while True:
try:
yield await queue.get()
except GeneratorExit:
self._queues[event_type].remove(queue)
break
except asyncio.CancelledError:
self._queues[event_type].remove(queue)
break
else:
if i is None:
continue
i += 1
if i >= cast(int, num_events):
self._queues[event_type].remove(queue)
break | <ast.AsyncFunctionDef object at 0x7da1b0e2c370> | keyword[async] keyword[def] identifier[stream] ( identifier[self] ,
identifier[event_type] : identifier[Type] [ identifier[TStreamEvent] ],
identifier[num_events] : identifier[Optional] [ identifier[int] ]= keyword[None] )-> identifier[AsyncGenerator] [ identifier[TStreamEvent] , keyword[None] ]:
literal[string]
identifier[queue] : identifier[asyncio] . identifier[Queue] = identifier[asyncio] . identifier[Queue] ()
keyword[if] identifier[event_type] keyword[not] keyword[in] identifier[self] . identifier[_queues] :
identifier[self] . identifier[_queues] [ identifier[event_type] ]=[]
identifier[self] . identifier[_queues] [ identifier[event_type] ]. identifier[append] ( identifier[queue] )
identifier[i] = keyword[None] keyword[if] identifier[num_events] keyword[is] keyword[None] keyword[else] literal[int]
keyword[while] keyword[True] :
keyword[try] :
keyword[yield] keyword[await] identifier[queue] . identifier[get] ()
keyword[except] identifier[GeneratorExit] :
identifier[self] . identifier[_queues] [ identifier[event_type] ]. identifier[remove] ( identifier[queue] )
keyword[break]
keyword[except] identifier[asyncio] . identifier[CancelledError] :
identifier[self] . identifier[_queues] [ identifier[event_type] ]. identifier[remove] ( identifier[queue] )
keyword[break]
keyword[else] :
keyword[if] identifier[i] keyword[is] keyword[None] :
keyword[continue]
identifier[i] += literal[int]
keyword[if] identifier[i] >= identifier[cast] ( identifier[int] , identifier[num_events] ):
identifier[self] . identifier[_queues] [ identifier[event_type] ]. identifier[remove] ( identifier[queue] )
keyword[break] | async def stream(self, event_type: Type[TStreamEvent], num_events: Optional[int]=None) -> AsyncGenerator[TStreamEvent, None]:
"""
Stream all events that match the specified event type. This returns an
``AsyncIterable[BaseEvent]`` which can be consumed through an ``async for`` loop.
An optional ``num_events`` parameter can be passed to stop streaming after a maximum amount
of events was received.
"""
queue: asyncio.Queue = asyncio.Queue()
if event_type not in self._queues:
self._queues[event_type] = [] # depends on [control=['if'], data=['event_type']]
self._queues[event_type].append(queue)
i = None if num_events is None else 0
while True:
try:
yield (await queue.get()) # depends on [control=['try'], data=[]]
except GeneratorExit:
self._queues[event_type].remove(queue)
break # depends on [control=['except'], data=[]]
except asyncio.CancelledError:
self._queues[event_type].remove(queue)
break # depends on [control=['except'], data=[]]
else:
if i is None:
continue # depends on [control=['if'], data=[]]
i += 1
if i >= cast(int, num_events):
self._queues[event_type].remove(queue)
break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] |
def main(self, argv=None, loop=LOOP_NEVER):
"""A possible mainline handler for a script, like so:
import cmdln
class MyCmd(cmdln.Cmdln):
name = "mycmd"
...
if __name__ == "__main__":
MyCmd().main()
By default this will use sys.argv to issue a single command to
'MyCmd', then exit. The 'loop' argument can be use to control
interactive shell behaviour.
Arguments:
"argv" (optional, default sys.argv) is the command to run.
It must be a sequence, where the first element is the
command name and subsequent elements the args for that
command.
"loop" (optional, default LOOP_NEVER) is a constant
indicating if a command loop should be started (i.e. an
interactive shell). Valid values (constants on this module):
LOOP_ALWAYS start loop and run "argv", if any
LOOP_NEVER run "argv" (or .emptyline()) and exit
LOOP_IF_EMPTY run "argv", if given, and exit;
otherwise, start loop
"""
if argv is None:
import sys
argv = sys.argv
else:
argv = argv[:] # don't modify caller's list
self.optparser = self.get_optparser()
if self.optparser: # i.e. optparser=None means don't process for opts
try:
self.options, args = self.optparser.parse_args(argv[1:])
except CmdlnUserError as ex:
msg = "%s: %s\nTry '%s help' for info.\n" % (self.name, ex,
self.name)
self.stderr.write(self._str(msg))
self.stderr.flush()
return 1
except StopOptionProcessing as ex:
return 0
else:
self.options, args = None, argv[1:]
retval = self.postoptparse()
if retval:
return retval
if loop == LOOP_ALWAYS:
if args:
self.cmdqueue.append(args)
return self.cmdloop()
elif loop == LOOP_NEVER:
if args:
return self.cmd(args)
else:
return self.emptyline()
elif loop == LOOP_IF_EMPTY:
if args:
return self.cmd(args)
else:
return self.cmdloop() | def function[main, parameter[self, argv, loop]]:
constant[A possible mainline handler for a script, like so:
import cmdln
class MyCmd(cmdln.Cmdln):
name = "mycmd"
...
if __name__ == "__main__":
MyCmd().main()
By default this will use sys.argv to issue a single command to
'MyCmd', then exit. The 'loop' argument can be use to control
interactive shell behaviour.
Arguments:
"argv" (optional, default sys.argv) is the command to run.
It must be a sequence, where the first element is the
command name and subsequent elements the args for that
command.
"loop" (optional, default LOOP_NEVER) is a constant
indicating if a command loop should be started (i.e. an
interactive shell). Valid values (constants on this module):
LOOP_ALWAYS start loop and run "argv", if any
LOOP_NEVER run "argv" (or .emptyline()) and exit
LOOP_IF_EMPTY run "argv", if given, and exit;
otherwise, start loop
]
if compare[name[argv] is constant[None]] begin[:]
import module[sys]
variable[argv] assign[=] name[sys].argv
name[self].optparser assign[=] call[name[self].get_optparser, parameter[]]
if name[self].optparser begin[:]
<ast.Try object at 0x7da1b031b910>
variable[retval] assign[=] call[name[self].postoptparse, parameter[]]
if name[retval] begin[:]
return[name[retval]]
if compare[name[loop] equal[==] name[LOOP_ALWAYS]] begin[:]
if name[args] begin[:]
call[name[self].cmdqueue.append, parameter[name[args]]]
return[call[name[self].cmdloop, parameter[]]] | keyword[def] identifier[main] ( identifier[self] , identifier[argv] = keyword[None] , identifier[loop] = identifier[LOOP_NEVER] ):
literal[string]
keyword[if] identifier[argv] keyword[is] keyword[None] :
keyword[import] identifier[sys]
identifier[argv] = identifier[sys] . identifier[argv]
keyword[else] :
identifier[argv] = identifier[argv] [:]
identifier[self] . identifier[optparser] = identifier[self] . identifier[get_optparser] ()
keyword[if] identifier[self] . identifier[optparser] :
keyword[try] :
identifier[self] . identifier[options] , identifier[args] = identifier[self] . identifier[optparser] . identifier[parse_args] ( identifier[argv] [ literal[int] :])
keyword[except] identifier[CmdlnUserError] keyword[as] identifier[ex] :
identifier[msg] = literal[string] %( identifier[self] . identifier[name] , identifier[ex] ,
identifier[self] . identifier[name] )
identifier[self] . identifier[stderr] . identifier[write] ( identifier[self] . identifier[_str] ( identifier[msg] ))
identifier[self] . identifier[stderr] . identifier[flush] ()
keyword[return] literal[int]
keyword[except] identifier[StopOptionProcessing] keyword[as] identifier[ex] :
keyword[return] literal[int]
keyword[else] :
identifier[self] . identifier[options] , identifier[args] = keyword[None] , identifier[argv] [ literal[int] :]
identifier[retval] = identifier[self] . identifier[postoptparse] ()
keyword[if] identifier[retval] :
keyword[return] identifier[retval]
keyword[if] identifier[loop] == identifier[LOOP_ALWAYS] :
keyword[if] identifier[args] :
identifier[self] . identifier[cmdqueue] . identifier[append] ( identifier[args] )
keyword[return] identifier[self] . identifier[cmdloop] ()
keyword[elif] identifier[loop] == identifier[LOOP_NEVER] :
keyword[if] identifier[args] :
keyword[return] identifier[self] . identifier[cmd] ( identifier[args] )
keyword[else] :
keyword[return] identifier[self] . identifier[emptyline] ()
keyword[elif] identifier[loop] == identifier[LOOP_IF_EMPTY] :
keyword[if] identifier[args] :
keyword[return] identifier[self] . identifier[cmd] ( identifier[args] )
keyword[else] :
keyword[return] identifier[self] . identifier[cmdloop] () | def main(self, argv=None, loop=LOOP_NEVER):
"""A possible mainline handler for a script, like so:
import cmdln
class MyCmd(cmdln.Cmdln):
name = "mycmd"
...
if __name__ == "__main__":
MyCmd().main()
By default this will use sys.argv to issue a single command to
'MyCmd', then exit. The 'loop' argument can be use to control
interactive shell behaviour.
Arguments:
"argv" (optional, default sys.argv) is the command to run.
It must be a sequence, where the first element is the
command name and subsequent elements the args for that
command.
"loop" (optional, default LOOP_NEVER) is a constant
indicating if a command loop should be started (i.e. an
interactive shell). Valid values (constants on this module):
LOOP_ALWAYS start loop and run "argv", if any
LOOP_NEVER run "argv" (or .emptyline()) and exit
LOOP_IF_EMPTY run "argv", if given, and exit;
otherwise, start loop
"""
if argv is None:
import sys
argv = sys.argv # depends on [control=['if'], data=['argv']]
else:
argv = argv[:] # don't modify caller's list
self.optparser = self.get_optparser()
if self.optparser: # i.e. optparser=None means don't process for opts
try:
(self.options, args) = self.optparser.parse_args(argv[1:]) # depends on [control=['try'], data=[]]
except CmdlnUserError as ex:
msg = "%s: %s\nTry '%s help' for info.\n" % (self.name, ex, self.name)
self.stderr.write(self._str(msg))
self.stderr.flush()
return 1 # depends on [control=['except'], data=['ex']]
except StopOptionProcessing as ex:
return 0 # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
(self.options, args) = (None, argv[1:])
retval = self.postoptparse()
if retval:
return retval # depends on [control=['if'], data=[]]
if loop == LOOP_ALWAYS:
if args:
self.cmdqueue.append(args) # depends on [control=['if'], data=[]]
return self.cmdloop() # depends on [control=['if'], data=[]]
elif loop == LOOP_NEVER:
if args:
return self.cmd(args) # depends on [control=['if'], data=[]]
else:
return self.emptyline() # depends on [control=['if'], data=[]]
elif loop == LOOP_IF_EMPTY:
if args:
return self.cmd(args) # depends on [control=['if'], data=[]]
else:
return self.cmdloop() # depends on [control=['if'], data=[]] |
def _calc_xy(self, xxx_todo_changeme, angle, length):
"""
Calculates the coordinates after a specific saccade was made.
Parameters:
(x,y) : tuple of floats or ints
The coordinates before the saccade was made
angle : float or int
The angle that the next saccade encloses with the
horizontal display border
length: float or int
The length of the next saccade
"""
(x, y) = xxx_todo_changeme
return (x+(cos(radians(angle))*length),
y+(sin(radians(angle))*length)) | def function[_calc_xy, parameter[self, xxx_todo_changeme, angle, length]]:
constant[
Calculates the coordinates after a specific saccade was made.
Parameters:
(x,y) : tuple of floats or ints
The coordinates before the saccade was made
angle : float or int
The angle that the next saccade encloses with the
horizontal display border
length: float or int
The length of the next saccade
]
<ast.Tuple object at 0x7da18dc98f10> assign[=] name[xxx_todo_changeme]
return[tuple[[<ast.BinOp object at 0x7da18dc982b0>, <ast.BinOp object at 0x7da18f09f970>]]] | keyword[def] identifier[_calc_xy] ( identifier[self] , identifier[xxx_todo_changeme] , identifier[angle] , identifier[length] ):
literal[string]
( identifier[x] , identifier[y] )= identifier[xxx_todo_changeme]
keyword[return] ( identifier[x] +( identifier[cos] ( identifier[radians] ( identifier[angle] ))* identifier[length] ),
identifier[y] +( identifier[sin] ( identifier[radians] ( identifier[angle] ))* identifier[length] )) | def _calc_xy(self, xxx_todo_changeme, angle, length):
"""
Calculates the coordinates after a specific saccade was made.
Parameters:
(x,y) : tuple of floats or ints
The coordinates before the saccade was made
angle : float or int
The angle that the next saccade encloses with the
horizontal display border
length: float or int
The length of the next saccade
"""
(x, y) = xxx_todo_changeme
return (x + cos(radians(angle)) * length, y + sin(radians(angle)) * length) |
def toxml(self):
"""
Exports this object into a LEMS XML object
"""
return '<ComponentRequirement name="{0}"'.format(self.name) + '' + \
(' description = "{0}"'.format(self.description) if self.description else '') +\
'/>' | def function[toxml, parameter[self]]:
constant[
Exports this object into a LEMS XML object
]
return[binary_operation[binary_operation[binary_operation[call[constant[<ComponentRequirement name="{0}"].format, parameter[name[self].name]] + constant[]] + <ast.IfExp object at 0x7da1b24aed70>] + constant[/>]]] | keyword[def] identifier[toxml] ( identifier[self] ):
literal[string]
keyword[return] literal[string] . identifier[format] ( identifier[self] . identifier[name] )+ literal[string] +( literal[string] . identifier[format] ( identifier[self] . identifier[description] ) keyword[if] identifier[self] . identifier[description] keyword[else] literal[string] )+ literal[string] | def toxml(self):
"""
Exports this object into a LEMS XML object
"""
return '<ComponentRequirement name="{0}"'.format(self.name) + '' + (' description = "{0}"'.format(self.description) if self.description else '') + '/>' |
def flush(self):
"""Flush all streams."""
if self.__logFileStream is not None:
try:
self.__logFileStream.flush()
except:
pass
try:
os.fsync(self.__logFileStream.fileno())
except:
pass
if self.__stdout is not None:
try:
self.__stdout.flush()
except:
pass
try:
os.fsync(self.__stdout.fileno())
except:
pass | def function[flush, parameter[self]]:
constant[Flush all streams.]
if compare[name[self].__logFileStream is_not constant[None]] begin[:]
<ast.Try object at 0x7da1b0973520>
<ast.Try object at 0x7da1b09709a0>
if compare[name[self].__stdout is_not constant[None]] begin[:]
<ast.Try object at 0x7da1b0973370>
<ast.Try object at 0x7da1b0971870> | keyword[def] identifier[flush] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[__logFileStream] keyword[is] keyword[not] keyword[None] :
keyword[try] :
identifier[self] . identifier[__logFileStream] . identifier[flush] ()
keyword[except] :
keyword[pass]
keyword[try] :
identifier[os] . identifier[fsync] ( identifier[self] . identifier[__logFileStream] . identifier[fileno] ())
keyword[except] :
keyword[pass]
keyword[if] identifier[self] . identifier[__stdout] keyword[is] keyword[not] keyword[None] :
keyword[try] :
identifier[self] . identifier[__stdout] . identifier[flush] ()
keyword[except] :
keyword[pass]
keyword[try] :
identifier[os] . identifier[fsync] ( identifier[self] . identifier[__stdout] . identifier[fileno] ())
keyword[except] :
keyword[pass] | def flush(self):
"""Flush all streams."""
if self.__logFileStream is not None:
try:
self.__logFileStream.flush() # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]]
try:
os.fsync(self.__logFileStream.fileno()) # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
if self.__stdout is not None:
try:
self.__stdout.flush() # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]]
try:
os.fsync(self.__stdout.fileno()) # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] |
def parse_form_request(api_secret, request):
"""
>>> parse_form_request("123456",{"nonce": 1451122677, "msg": "helllo", "code": 0, "sign": "DB30F4D1112C20DFA736F65458F89C64"})
<Storage {'nonce': 1451122677, 'msg': 'helllo', 'code': 0, 'sign': 'DB30F4D1112C20DFA736F65458F89C64'}>
"""
if not check_sign(api_secret, request):
raise SignError(u"message sign error")
return Storage(request) | def function[parse_form_request, parameter[api_secret, request]]:
constant[
>>> parse_form_request("123456",{"nonce": 1451122677, "msg": "helllo", "code": 0, "sign": "DB30F4D1112C20DFA736F65458F89C64"})
<Storage {'nonce': 1451122677, 'msg': 'helllo', 'code': 0, 'sign': 'DB30F4D1112C20DFA736F65458F89C64'}>
]
if <ast.UnaryOp object at 0x7da1b09eb100> begin[:]
<ast.Raise object at 0x7da1b09eabf0>
return[call[name[Storage], parameter[name[request]]]] | keyword[def] identifier[parse_form_request] ( identifier[api_secret] , identifier[request] ):
literal[string]
keyword[if] keyword[not] identifier[check_sign] ( identifier[api_secret] , identifier[request] ):
keyword[raise] identifier[SignError] ( literal[string] )
keyword[return] identifier[Storage] ( identifier[request] ) | def parse_form_request(api_secret, request):
"""
>>> parse_form_request("123456",{"nonce": 1451122677, "msg": "helllo", "code": 0, "sign": "DB30F4D1112C20DFA736F65458F89C64"})
<Storage {'nonce': 1451122677, 'msg': 'helllo', 'code': 0, 'sign': 'DB30F4D1112C20DFA736F65458F89C64'}>
"""
if not check_sign(api_secret, request):
raise SignError(u'message sign error') # depends on [control=['if'], data=[]]
return Storage(request) |
def _select_binary_stream(self, name, urls):
"""Download a file from a list of urls, yielding a stream after downloading the file.
URLs are tried in order until they succeed.
:raises: :class:`BinaryToolFetcher.BinaryNotFound` if requests to all the given urls fail.
"""
downloaded_successfully = False
accumulated_errors = []
for url in OrderedSet(urls): # De-dup URLS: we only want to try each URL once.
logger.info('Attempting to fetch {name} binary from: {url} ...'.format(name=name, url=url))
try:
with temporary_file() as dest:
logger.debug("in BinaryToolFetcher: url={}, timeout_secs={}"
.format(url, self._timeout_secs))
self._fetcher.download(url,
listener=Fetcher.ProgressListener(),
path_or_fd=dest,
timeout_secs=self._timeout_secs)
logger.info('Fetched {name} binary from: {url} .'.format(name=name, url=url))
downloaded_successfully = True
dest.seek(0)
yield dest
break
except (IOError, Fetcher.Error, ValueError) as e:
accumulated_errors.append('Failed to fetch binary from {url}: {error}'
.format(url=url, error=e))
if not downloaded_successfully:
raise self.BinaryNotFound(name, accumulated_errors) | def function[_select_binary_stream, parameter[self, name, urls]]:
constant[Download a file from a list of urls, yielding a stream after downloading the file.
URLs are tried in order until they succeed.
:raises: :class:`BinaryToolFetcher.BinaryNotFound` if requests to all the given urls fail.
]
variable[downloaded_successfully] assign[=] constant[False]
variable[accumulated_errors] assign[=] list[[]]
for taget[name[url]] in starred[call[name[OrderedSet], parameter[name[urls]]]] begin[:]
call[name[logger].info, parameter[call[constant[Attempting to fetch {name} binary from: {url} ...].format, parameter[]]]]
<ast.Try object at 0x7da1b2248f70>
if <ast.UnaryOp object at 0x7da1b22495d0> begin[:]
<ast.Raise object at 0x7da1b224a6b0> | keyword[def] identifier[_select_binary_stream] ( identifier[self] , identifier[name] , identifier[urls] ):
literal[string]
identifier[downloaded_successfully] = keyword[False]
identifier[accumulated_errors] =[]
keyword[for] identifier[url] keyword[in] identifier[OrderedSet] ( identifier[urls] ):
identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[name] = identifier[name] , identifier[url] = identifier[url] ))
keyword[try] :
keyword[with] identifier[temporary_file] () keyword[as] identifier[dest] :
identifier[logger] . identifier[debug] ( literal[string]
. identifier[format] ( identifier[url] , identifier[self] . identifier[_timeout_secs] ))
identifier[self] . identifier[_fetcher] . identifier[download] ( identifier[url] ,
identifier[listener] = identifier[Fetcher] . identifier[ProgressListener] (),
identifier[path_or_fd] = identifier[dest] ,
identifier[timeout_secs] = identifier[self] . identifier[_timeout_secs] )
identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[name] = identifier[name] , identifier[url] = identifier[url] ))
identifier[downloaded_successfully] = keyword[True]
identifier[dest] . identifier[seek] ( literal[int] )
keyword[yield] identifier[dest]
keyword[break]
keyword[except] ( identifier[IOError] , identifier[Fetcher] . identifier[Error] , identifier[ValueError] ) keyword[as] identifier[e] :
identifier[accumulated_errors] . identifier[append] ( literal[string]
. identifier[format] ( identifier[url] = identifier[url] , identifier[error] = identifier[e] ))
keyword[if] keyword[not] identifier[downloaded_successfully] :
keyword[raise] identifier[self] . identifier[BinaryNotFound] ( identifier[name] , identifier[accumulated_errors] ) | def _select_binary_stream(self, name, urls):
"""Download a file from a list of urls, yielding a stream after downloading the file.
URLs are tried in order until they succeed.
:raises: :class:`BinaryToolFetcher.BinaryNotFound` if requests to all the given urls fail.
"""
downloaded_successfully = False
accumulated_errors = []
for url in OrderedSet(urls): # De-dup URLS: we only want to try each URL once.
logger.info('Attempting to fetch {name} binary from: {url} ...'.format(name=name, url=url))
try:
with temporary_file() as dest:
logger.debug('in BinaryToolFetcher: url={}, timeout_secs={}'.format(url, self._timeout_secs))
self._fetcher.download(url, listener=Fetcher.ProgressListener(), path_or_fd=dest, timeout_secs=self._timeout_secs)
logger.info('Fetched {name} binary from: {url} .'.format(name=name, url=url))
downloaded_successfully = True
dest.seek(0)
yield dest
break # depends on [control=['with'], data=['dest']] # depends on [control=['try'], data=[]]
except (IOError, Fetcher.Error, ValueError) as e:
accumulated_errors.append('Failed to fetch binary from {url}: {error}'.format(url=url, error=e)) # depends on [control=['except'], data=['e']] # depends on [control=['for'], data=['url']]
if not downloaded_successfully:
raise self.BinaryNotFound(name, accumulated_errors) # depends on [control=['if'], data=[]] |
def size(self):
""" size in bytes """
if not self._size:
self._size = os.path.getsize(self._path)
return self._size | def function[size, parameter[self]]:
constant[ size in bytes ]
if <ast.UnaryOp object at 0x7da18bcc8d90> begin[:]
name[self]._size assign[=] call[name[os].path.getsize, parameter[name[self]._path]]
return[name[self]._size] | keyword[def] identifier[size] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_size] :
identifier[self] . identifier[_size] = identifier[os] . identifier[path] . identifier[getsize] ( identifier[self] . identifier[_path] )
keyword[return] identifier[self] . identifier[_size] | def size(self):
""" size in bytes """
if not self._size:
self._size = os.path.getsize(self._path) # depends on [control=['if'], data=[]]
return self._size |
def _append(self, signature, fields=(), response=None):
""" Add a message to the outgoing queue.
:arg signature: the signature of the message
:arg fields: the fields of the message as a tuple
:arg response: a response object to handle callbacks
"""
self.packer.pack_struct(signature, fields)
self.output_buffer.chunk()
self.output_buffer.chunk()
self.responses.append(response) | def function[_append, parameter[self, signature, fields, response]]:
constant[ Add a message to the outgoing queue.
:arg signature: the signature of the message
:arg fields: the fields of the message as a tuple
:arg response: a response object to handle callbacks
]
call[name[self].packer.pack_struct, parameter[name[signature], name[fields]]]
call[name[self].output_buffer.chunk, parameter[]]
call[name[self].output_buffer.chunk, parameter[]]
call[name[self].responses.append, parameter[name[response]]] | keyword[def] identifier[_append] ( identifier[self] , identifier[signature] , identifier[fields] =(), identifier[response] = keyword[None] ):
literal[string]
identifier[self] . identifier[packer] . identifier[pack_struct] ( identifier[signature] , identifier[fields] )
identifier[self] . identifier[output_buffer] . identifier[chunk] ()
identifier[self] . identifier[output_buffer] . identifier[chunk] ()
identifier[self] . identifier[responses] . identifier[append] ( identifier[response] ) | def _append(self, signature, fields=(), response=None):
""" Add a message to the outgoing queue.
:arg signature: the signature of the message
:arg fields: the fields of the message as a tuple
:arg response: a response object to handle callbacks
"""
self.packer.pack_struct(signature, fields)
self.output_buffer.chunk()
self.output_buffer.chunk()
self.responses.append(response) |
def check_house_number(self, token):
"""
Attempts to find a house number, generally the first thing in an address. If anything is in front of it,
we assume it is a building name.
"""
if self.street and self.house_number is None and re.match(street_num_regex, token.lower()):
if '/' in token:
token = token.split('/')[0]
if '-' in token:
token = token.split('-')[0]
self.house_number = self._clean(str(token))
return True
return False | def function[check_house_number, parameter[self, token]]:
constant[
Attempts to find a house number, generally the first thing in an address. If anything is in front of it,
we assume it is a building name.
]
if <ast.BoolOp object at 0x7da1b112b130> begin[:]
if compare[constant[/] in name[token]] begin[:]
variable[token] assign[=] call[call[name[token].split, parameter[constant[/]]]][constant[0]]
if compare[constant[-] in name[token]] begin[:]
variable[token] assign[=] call[call[name[token].split, parameter[constant[-]]]][constant[0]]
name[self].house_number assign[=] call[name[self]._clean, parameter[call[name[str], parameter[name[token]]]]]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[check_house_number] ( identifier[self] , identifier[token] ):
literal[string]
keyword[if] identifier[self] . identifier[street] keyword[and] identifier[self] . identifier[house_number] keyword[is] keyword[None] keyword[and] identifier[re] . identifier[match] ( identifier[street_num_regex] , identifier[token] . identifier[lower] ()):
keyword[if] literal[string] keyword[in] identifier[token] :
identifier[token] = identifier[token] . identifier[split] ( literal[string] )[ literal[int] ]
keyword[if] literal[string] keyword[in] identifier[token] :
identifier[token] = identifier[token] . identifier[split] ( literal[string] )[ literal[int] ]
identifier[self] . identifier[house_number] = identifier[self] . identifier[_clean] ( identifier[str] ( identifier[token] ))
keyword[return] keyword[True]
keyword[return] keyword[False] | def check_house_number(self, token):
"""
Attempts to find a house number, generally the first thing in an address. If anything is in front of it,
we assume it is a building name.
"""
if self.street and self.house_number is None and re.match(street_num_regex, token.lower()):
if '/' in token:
token = token.split('/')[0] # depends on [control=['if'], data=['token']]
if '-' in token:
token = token.split('-')[0] # depends on [control=['if'], data=['token']]
self.house_number = self._clean(str(token))
return True # depends on [control=['if'], data=[]]
return False |
def check_if_alive(self):
"""Check if the content is available on the host server. Returns `True` if available, else `False`.
This method is `lazy`-evaluated or only executes when called.
:rtype: bool
"""
try:
from urllib2 import urlopen, URLError, HTTPError
except ImportError:
from urllib.request import urlopen, URLError, HTTPError
if len(self.instance.STATUS_LINK):
check_url = self.instance.STATUS_LINK % ({'content_uid': self.get_content_uid()})
else:
# fallback
check_url = self.instance.url
try:
response = urlopen(check_url)
except (HTTPError, URLError):
return False
except ValueError:
raise URLError('Invalid URL: %s'.format(check_url))
else:
return True if response.code == 200 else False | def function[check_if_alive, parameter[self]]:
constant[Check if the content is available on the host server. Returns `True` if available, else `False`.
This method is `lazy`-evaluated or only executes when called.
:rtype: bool
]
<ast.Try object at 0x7da18f09e1d0>
if call[name[len], parameter[name[self].instance.STATUS_LINK]] begin[:]
variable[check_url] assign[=] binary_operation[name[self].instance.STATUS_LINK <ast.Mod object at 0x7da2590d6920> dictionary[[<ast.Constant object at 0x7da18f09fa00>], [<ast.Call object at 0x7da18f09de10>]]]
<ast.Try object at 0x7da18f09c520> | keyword[def] identifier[check_if_alive] ( identifier[self] ):
literal[string]
keyword[try] :
keyword[from] identifier[urllib2] keyword[import] identifier[urlopen] , identifier[URLError] , identifier[HTTPError]
keyword[except] identifier[ImportError] :
keyword[from] identifier[urllib] . identifier[request] keyword[import] identifier[urlopen] , identifier[URLError] , identifier[HTTPError]
keyword[if] identifier[len] ( identifier[self] . identifier[instance] . identifier[STATUS_LINK] ):
identifier[check_url] = identifier[self] . identifier[instance] . identifier[STATUS_LINK] %({ literal[string] : identifier[self] . identifier[get_content_uid] ()})
keyword[else] :
identifier[check_url] = identifier[self] . identifier[instance] . identifier[url]
keyword[try] :
identifier[response] = identifier[urlopen] ( identifier[check_url] )
keyword[except] ( identifier[HTTPError] , identifier[URLError] ):
keyword[return] keyword[False]
keyword[except] identifier[ValueError] :
keyword[raise] identifier[URLError] ( literal[string] . identifier[format] ( identifier[check_url] ))
keyword[else] :
keyword[return] keyword[True] keyword[if] identifier[response] . identifier[code] == literal[int] keyword[else] keyword[False] | def check_if_alive(self):
"""Check if the content is available on the host server. Returns `True` if available, else `False`.
This method is `lazy`-evaluated or only executes when called.
:rtype: bool
"""
try:
from urllib2 import urlopen, URLError, HTTPError # depends on [control=['try'], data=[]]
except ImportError:
from urllib.request import urlopen, URLError, HTTPError # depends on [control=['except'], data=[]]
if len(self.instance.STATUS_LINK):
check_url = self.instance.STATUS_LINK % {'content_uid': self.get_content_uid()} # depends on [control=['if'], data=[]]
else:
# fallback
check_url = self.instance.url
try:
response = urlopen(check_url) # depends on [control=['try'], data=[]]
except (HTTPError, URLError):
return False # depends on [control=['except'], data=[]]
except ValueError:
raise URLError('Invalid URL: %s'.format(check_url)) # depends on [control=['except'], data=[]]
else:
return True if response.code == 200 else False |
def safe_unicode(self, buf):
"""
Safely return an unicode encoded string
"""
tmp = ""
buf = "".join(b for b in buf)
for character in buf:
tmp += character
return tmp | def function[safe_unicode, parameter[self, buf]]:
constant[
Safely return an unicode encoded string
]
variable[tmp] assign[=] constant[]
variable[buf] assign[=] call[constant[].join, parameter[<ast.GeneratorExp object at 0x7da18f58f1c0>]]
for taget[name[character]] in starred[name[buf]] begin[:]
<ast.AugAssign object at 0x7da18f58e9e0>
return[name[tmp]] | keyword[def] identifier[safe_unicode] ( identifier[self] , identifier[buf] ):
literal[string]
identifier[tmp] = literal[string]
identifier[buf] = literal[string] . identifier[join] ( identifier[b] keyword[for] identifier[b] keyword[in] identifier[buf] )
keyword[for] identifier[character] keyword[in] identifier[buf] :
identifier[tmp] += identifier[character]
keyword[return] identifier[tmp] | def safe_unicode(self, buf):
"""
Safely return an unicode encoded string
"""
tmp = ''
buf = ''.join((b for b in buf))
for character in buf:
tmp += character # depends on [control=['for'], data=['character']]
return tmp |
def get_tmaster(self, topologyName, callback=None):
""" get tmaster """
isWatching = False
# Temp dict used to return result
# if callback is not provided.
ret = {
"result": None
}
if callback:
isWatching = True
else:
def callback(data):
"""
Custom callback to get the topologies right now.
"""
ret["result"] = data
self._get_tmaster_with_watch(topologyName, callback, isWatching)
# The topologies are now populated with the data.
return ret["result"] | def function[get_tmaster, parameter[self, topologyName, callback]]:
constant[ get tmaster ]
variable[isWatching] assign[=] constant[False]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da2054a75b0>], [<ast.Constant object at 0x7da2054a5210>]]
if name[callback] begin[:]
variable[isWatching] assign[=] constant[True]
call[name[self]._get_tmaster_with_watch, parameter[name[topologyName], name[callback], name[isWatching]]]
return[call[name[ret]][constant[result]]] | keyword[def] identifier[get_tmaster] ( identifier[self] , identifier[topologyName] , identifier[callback] = keyword[None] ):
literal[string]
identifier[isWatching] = keyword[False]
identifier[ret] ={
literal[string] : keyword[None]
}
keyword[if] identifier[callback] :
identifier[isWatching] = keyword[True]
keyword[else] :
keyword[def] identifier[callback] ( identifier[data] ):
literal[string]
identifier[ret] [ literal[string] ]= identifier[data]
identifier[self] . identifier[_get_tmaster_with_watch] ( identifier[topologyName] , identifier[callback] , identifier[isWatching] )
keyword[return] identifier[ret] [ literal[string] ] | def get_tmaster(self, topologyName, callback=None):
""" get tmaster """
isWatching = False
# Temp dict used to return result
# if callback is not provided.
ret = {'result': None}
if callback:
isWatching = True # depends on [control=['if'], data=[]]
else:
def callback(data):
"""
Custom callback to get the topologies right now.
"""
ret['result'] = data
self._get_tmaster_with_watch(topologyName, callback, isWatching)
# The topologies are now populated with the data.
return ret['result'] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.