code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def put_if_empty(self, key, value):
"""
Atomically write data only if the key is not already set.
:param bytes key: Key to check/set.
:param bytes value: Arbitrary data.
:return: Boolean whether key/value was set.
"""
if self.has_data_for_key(key):
return False
self.put_data(key, value)
return True | def function[put_if_empty, parameter[self, key, value]]:
constant[
Atomically write data only if the key is not already set.
:param bytes key: Key to check/set.
:param bytes value: Arbitrary data.
:return: Boolean whether key/value was set.
]
if call[name[self].has_data_for_key, parameter[name[key]]] begin[:]
return[constant[False]]
call[name[self].put_data, parameter[name[key], name[value]]]
return[constant[True]] | keyword[def] identifier[put_if_empty] ( identifier[self] , identifier[key] , identifier[value] ):
literal[string]
keyword[if] identifier[self] . identifier[has_data_for_key] ( identifier[key] ):
keyword[return] keyword[False]
identifier[self] . identifier[put_data] ( identifier[key] , identifier[value] )
keyword[return] keyword[True] | def put_if_empty(self, key, value):
"""
Atomically write data only if the key is not already set.
:param bytes key: Key to check/set.
:param bytes value: Arbitrary data.
:return: Boolean whether key/value was set.
"""
if self.has_data_for_key(key):
return False # depends on [control=['if'], data=[]]
self.put_data(key, value)
return True |
def uint32_gte(a: int, b: int) -> bool:
"""
Return a >= b.
"""
return (a == b) or uint32_gt(a, b) | def function[uint32_gte, parameter[a, b]]:
constant[
Return a >= b.
]
return[<ast.BoolOp object at 0x7da204962230>] | keyword[def] identifier[uint32_gte] ( identifier[a] : identifier[int] , identifier[b] : identifier[int] )-> identifier[bool] :
literal[string]
keyword[return] ( identifier[a] == identifier[b] ) keyword[or] identifier[uint32_gt] ( identifier[a] , identifier[b] ) | def uint32_gte(a: int, b: int) -> bool:
"""
Return a >= b.
"""
return a == b or uint32_gt(a, b) |
def match_value(expected_type, actual_value):
"""
Matches expected type to a type of a value.
The expected type can be specified by a type, type name or [[TypeCode]].
:param expected_type: an expected type to match.
:param actual_value: a value to match its type to the expected one.
:return: true if types are matching and false if they don't.
"""
if expected_type == None:
return True
if actual_value == None:
raise Exception("Actual value cannot be null")
return TypeMatcher.match_type(expected_type, type(actual_value)) | def function[match_value, parameter[expected_type, actual_value]]:
constant[
Matches expected type to a type of a value.
The expected type can be specified by a type, type name or [[TypeCode]].
:param expected_type: an expected type to match.
:param actual_value: a value to match its type to the expected one.
:return: true if types are matching and false if they don't.
]
if compare[name[expected_type] equal[==] constant[None]] begin[:]
return[constant[True]]
if compare[name[actual_value] equal[==] constant[None]] begin[:]
<ast.Raise object at 0x7da18f00e650>
return[call[name[TypeMatcher].match_type, parameter[name[expected_type], call[name[type], parameter[name[actual_value]]]]]] | keyword[def] identifier[match_value] ( identifier[expected_type] , identifier[actual_value] ):
literal[string]
keyword[if] identifier[expected_type] == keyword[None] :
keyword[return] keyword[True]
keyword[if] identifier[actual_value] == keyword[None] :
keyword[raise] identifier[Exception] ( literal[string] )
keyword[return] identifier[TypeMatcher] . identifier[match_type] ( identifier[expected_type] , identifier[type] ( identifier[actual_value] )) | def match_value(expected_type, actual_value):
"""
Matches expected type to a type of a value.
The expected type can be specified by a type, type name or [[TypeCode]].
:param expected_type: an expected type to match.
:param actual_value: a value to match its type to the expected one.
:return: true if types are matching and false if they don't.
"""
if expected_type == None:
return True # depends on [control=['if'], data=[]]
if actual_value == None:
raise Exception('Actual value cannot be null') # depends on [control=['if'], data=[]]
return TypeMatcher.match_type(expected_type, type(actual_value)) |
def _load_knownGene(filename):
""" Load UCSC knownGene table.
Parameters
----------
filename : str
path to knownGene file
Returns
-------
df : pandas.DataFrame
knownGene table if loading was successful, else None
"""
try:
df = pd.read_table(
filename,
names=[
"name",
"chrom",
"strand",
"txStart",
"txEnd",
"cdsStart",
"cdsEnd",
"exonCount",
"exonStarts",
"exonEnds",
"proteinID",
"alignID",
],
index_col=0,
)
df["chrom"] = df["chrom"].str[3:]
return df
except Exception as err:
print(err)
return None | def function[_load_knownGene, parameter[filename]]:
constant[ Load UCSC knownGene table.
Parameters
----------
filename : str
path to knownGene file
Returns
-------
df : pandas.DataFrame
knownGene table if loading was successful, else None
]
<ast.Try object at 0x7da207f9b0d0> | keyword[def] identifier[_load_knownGene] ( identifier[filename] ):
literal[string]
keyword[try] :
identifier[df] = identifier[pd] . identifier[read_table] (
identifier[filename] ,
identifier[names] =[
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
],
identifier[index_col] = literal[int] ,
)
identifier[df] [ literal[string] ]= identifier[df] [ literal[string] ]. identifier[str] [ literal[int] :]
keyword[return] identifier[df]
keyword[except] identifier[Exception] keyword[as] identifier[err] :
identifier[print] ( identifier[err] )
keyword[return] keyword[None] | def _load_knownGene(filename):
""" Load UCSC knownGene table.
Parameters
----------
filename : str
path to knownGene file
Returns
-------
df : pandas.DataFrame
knownGene table if loading was successful, else None
"""
try:
df = pd.read_table(filename, names=['name', 'chrom', 'strand', 'txStart', 'txEnd', 'cdsStart', 'cdsEnd', 'exonCount', 'exonStarts', 'exonEnds', 'proteinID', 'alignID'], index_col=0)
df['chrom'] = df['chrom'].str[3:]
return df # depends on [control=['try'], data=[]]
except Exception as err:
print(err)
return None # depends on [control=['except'], data=['err']] |
def get_dialog(self):
"""Return FormDialog instance"""
dialog = self.parent()
while not isinstance(dialog, QDialog):
dialog = dialog.parent()
return dialog | def function[get_dialog, parameter[self]]:
constant[Return FormDialog instance]
variable[dialog] assign[=] call[name[self].parent, parameter[]]
while <ast.UnaryOp object at 0x7da1b2345600> begin[:]
variable[dialog] assign[=] call[name[dialog].parent, parameter[]]
return[name[dialog]] | keyword[def] identifier[get_dialog] ( identifier[self] ):
literal[string]
identifier[dialog] = identifier[self] . identifier[parent] ()
keyword[while] keyword[not] identifier[isinstance] ( identifier[dialog] , identifier[QDialog] ):
identifier[dialog] = identifier[dialog] . identifier[parent] ()
keyword[return] identifier[dialog] | def get_dialog(self):
"""Return FormDialog instance"""
dialog = self.parent()
while not isinstance(dialog, QDialog):
dialog = dialog.parent() # depends on [control=['while'], data=[]]
return dialog |
def check_max_filesize(chosen_file, max_size):
"""
Checks file sizes for host
"""
if os.path.getsize(chosen_file) > max_size:
return False
else:
return True | def function[check_max_filesize, parameter[chosen_file, max_size]]:
constant[
Checks file sizes for host
]
if compare[call[name[os].path.getsize, parameter[name[chosen_file]]] greater[>] name[max_size]] begin[:]
return[constant[False]] | keyword[def] identifier[check_max_filesize] ( identifier[chosen_file] , identifier[max_size] ):
literal[string]
keyword[if] identifier[os] . identifier[path] . identifier[getsize] ( identifier[chosen_file] )> identifier[max_size] :
keyword[return] keyword[False]
keyword[else] :
keyword[return] keyword[True] | def check_max_filesize(chosen_file, max_size):
"""
Checks file sizes for host
"""
if os.path.getsize(chosen_file) > max_size:
return False # depends on [control=['if'], data=[]]
else:
return True |
def parse(self):
"""
Retreive and parse Event Summary report for the given :py:class:`nhlscrapi.games.game.GameKey`
:returns: ``self`` on success, ``None`` otherwise
"""
try:
return super(EventSummRep, self).parse() \
.parse_away_shots() \
.parse_home_shots() \
.parse_away_fo() \
.parse_home_fo() \
.parse_away_by_player() \
.parse_home_by_player()
except:
return None | def function[parse, parameter[self]]:
constant[
Retreive and parse Event Summary report for the given :py:class:`nhlscrapi.games.game.GameKey`
:returns: ``self`` on success, ``None`` otherwise
]
<ast.Try object at 0x7da20c6e6ad0> | keyword[def] identifier[parse] ( identifier[self] ):
literal[string]
keyword[try] :
keyword[return] identifier[super] ( identifier[EventSummRep] , identifier[self] ). identifier[parse] (). identifier[parse_away_shots] (). identifier[parse_home_shots] (). identifier[parse_away_fo] (). identifier[parse_home_fo] (). identifier[parse_away_by_player] (). identifier[parse_home_by_player] ()
keyword[except] :
keyword[return] keyword[None] | def parse(self):
"""
Retreive and parse Event Summary report for the given :py:class:`nhlscrapi.games.game.GameKey`
:returns: ``self`` on success, ``None`` otherwise
"""
try:
return super(EventSummRep, self).parse().parse_away_shots().parse_home_shots().parse_away_fo().parse_home_fo().parse_away_by_player().parse_home_by_player() # depends on [control=['try'], data=[]]
except:
return None # depends on [control=['except'], data=[]] |
def _prefetch_items(self,change):
""" When the current_row in the model changes (whether from scrolling) or
set by the application. Make sure the results are loaded!
"""
if self.is_initialized:
view = self.item_view
upper_limit = view.iterable_index+view.iterable_fetch_size-view.iterable_prefetch
lower_limit = max(0,view.iterable_index+view.iterable_prefetch)
offset = int(view.iterable_fetch_size/2.0)
upper_visible_row = view.visible_rect[2]
lower_visible_row = view.visible_rect[0]
print("Visible rect = %s"%view.visible_rect)
if upper_visible_row >= upper_limit:
next_index = max(0,upper_visible_row-offset) # Center on current row
# Going up works...
if next_index>view.iterable_index:
print("Auto prefetch upper limit %s!"%upper_limit)
view.iterable_index = next_index
#view.model().reset()
# But doewn doesnt?
elif view.iterable_index>0 and lower_visible_row < lower_limit:
next_index = max(0,lower_visible_row-offset) # Center on current row
# Going down works
if next_index<view.iterable_index:
print("Auto prefetch lower limit=%s, iterable=%s, setting next=%s!"%(lower_limit,view.iterable_index,next_index))
view.iterable_index = next_index | def function[_prefetch_items, parameter[self, change]]:
constant[ When the current_row in the model changes (whether from scrolling) or
set by the application. Make sure the results are loaded!
]
if name[self].is_initialized begin[:]
variable[view] assign[=] name[self].item_view
variable[upper_limit] assign[=] binary_operation[binary_operation[name[view].iterable_index + name[view].iterable_fetch_size] - name[view].iterable_prefetch]
variable[lower_limit] assign[=] call[name[max], parameter[constant[0], binary_operation[name[view].iterable_index + name[view].iterable_prefetch]]]
variable[offset] assign[=] call[name[int], parameter[binary_operation[name[view].iterable_fetch_size / constant[2.0]]]]
variable[upper_visible_row] assign[=] call[name[view].visible_rect][constant[2]]
variable[lower_visible_row] assign[=] call[name[view].visible_rect][constant[0]]
call[name[print], parameter[binary_operation[constant[Visible rect = %s] <ast.Mod object at 0x7da2590d6920> name[view].visible_rect]]]
if compare[name[upper_visible_row] greater_or_equal[>=] name[upper_limit]] begin[:]
variable[next_index] assign[=] call[name[max], parameter[constant[0], binary_operation[name[upper_visible_row] - name[offset]]]]
if compare[name[next_index] greater[>] name[view].iterable_index] begin[:]
call[name[print], parameter[binary_operation[constant[Auto prefetch upper limit %s!] <ast.Mod object at 0x7da2590d6920> name[upper_limit]]]]
name[view].iterable_index assign[=] name[next_index] | keyword[def] identifier[_prefetch_items] ( identifier[self] , identifier[change] ):
literal[string]
keyword[if] identifier[self] . identifier[is_initialized] :
identifier[view] = identifier[self] . identifier[item_view]
identifier[upper_limit] = identifier[view] . identifier[iterable_index] + identifier[view] . identifier[iterable_fetch_size] - identifier[view] . identifier[iterable_prefetch]
identifier[lower_limit] = identifier[max] ( literal[int] , identifier[view] . identifier[iterable_index] + identifier[view] . identifier[iterable_prefetch] )
identifier[offset] = identifier[int] ( identifier[view] . identifier[iterable_fetch_size] / literal[int] )
identifier[upper_visible_row] = identifier[view] . identifier[visible_rect] [ literal[int] ]
identifier[lower_visible_row] = identifier[view] . identifier[visible_rect] [ literal[int] ]
identifier[print] ( literal[string] % identifier[view] . identifier[visible_rect] )
keyword[if] identifier[upper_visible_row] >= identifier[upper_limit] :
identifier[next_index] = identifier[max] ( literal[int] , identifier[upper_visible_row] - identifier[offset] )
keyword[if] identifier[next_index] > identifier[view] . identifier[iterable_index] :
identifier[print] ( literal[string] % identifier[upper_limit] )
identifier[view] . identifier[iterable_index] = identifier[next_index]
keyword[elif] identifier[view] . identifier[iterable_index] > literal[int] keyword[and] identifier[lower_visible_row] < identifier[lower_limit] :
identifier[next_index] = identifier[max] ( literal[int] , identifier[lower_visible_row] - identifier[offset] )
keyword[if] identifier[next_index] < identifier[view] . identifier[iterable_index] :
identifier[print] ( literal[string] %( identifier[lower_limit] , identifier[view] . identifier[iterable_index] , identifier[next_index] ))
identifier[view] . identifier[iterable_index] = identifier[next_index] | def _prefetch_items(self, change):
""" When the current_row in the model changes (whether from scrolling) or
set by the application. Make sure the results are loaded!
"""
if self.is_initialized:
view = self.item_view
upper_limit = view.iterable_index + view.iterable_fetch_size - view.iterable_prefetch
lower_limit = max(0, view.iterable_index + view.iterable_prefetch)
offset = int(view.iterable_fetch_size / 2.0)
upper_visible_row = view.visible_rect[2]
lower_visible_row = view.visible_rect[0]
print('Visible rect = %s' % view.visible_rect)
if upper_visible_row >= upper_limit:
next_index = max(0, upper_visible_row - offset) # Center on current row
# Going up works...
if next_index > view.iterable_index:
print('Auto prefetch upper limit %s!' % upper_limit)
view.iterable_index = next_index # depends on [control=['if'], data=['next_index']] # depends on [control=['if'], data=['upper_visible_row', 'upper_limit']]
#view.model().reset()
# But doewn doesnt?
elif view.iterable_index > 0 and lower_visible_row < lower_limit:
next_index = max(0, lower_visible_row - offset) # Center on current row
# Going down works
if next_index < view.iterable_index:
print('Auto prefetch lower limit=%s, iterable=%s, setting next=%s!' % (lower_limit, view.iterable_index, next_index))
view.iterable_index = next_index # depends on [control=['if'], data=['next_index']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def send_message(self, msg):
""" Send a message to the client. This should not be used in
remote debugging mode.
"""
if not self.handlers:
return #: Client not connected
for h in self.handlers:
h.write_message(msg) | def function[send_message, parameter[self, msg]]:
constant[ Send a message to the client. This should not be used in
remote debugging mode.
]
if <ast.UnaryOp object at 0x7da20e74beb0> begin[:]
return[None]
for taget[name[h]] in starred[name[self].handlers] begin[:]
call[name[h].write_message, parameter[name[msg]]] | keyword[def] identifier[send_message] ( identifier[self] , identifier[msg] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[handlers] :
keyword[return]
keyword[for] identifier[h] keyword[in] identifier[self] . identifier[handlers] :
identifier[h] . identifier[write_message] ( identifier[msg] ) | def send_message(self, msg):
""" Send a message to the client. This should not be used in
remote debugging mode.
"""
if not self.handlers:
return #: Client not connected # depends on [control=['if'], data=[]]
for h in self.handlers:
h.write_message(msg) # depends on [control=['for'], data=['h']] |
def genl_register(ops):
"""Register Generic Netlink family backed cache.
https://github.com/thom311/libnl/blob/libnl3_2_25/lib/genl/mngt.c#L241
Same as genl_register_family() but additionally registers the specified cache operations using
nl_cache_mngt_register() and associates it with the Generic Netlink family.
Positional arguments:
ops -- cache operations definition (nl_cache_ops class instance).
Returns:
0 on success or a negative error code.
"""
if ops.co_protocol != NETLINK_GENERIC:
return -NLE_PROTO_MISMATCH
if ops.co_hdrsize < GENL_HDRSIZE(0):
return -NLE_INVAL
if ops.co_genl is None:
return -NLE_INVAL
ops.co_genl.o_cache_ops = ops
ops.co_genl.o_hdrsize = ops.co_hdrsize - GENL_HDRLEN
ops.co_genl.o_name = ops.co_msgtypes[0].mt_name
ops.co_genl.o_id = ops.co_msgtypes[0].mt_id
ops.co_msg_parser = genl_msg_parser
err = genl_register_family(ops.co_genl)
if err < 0:
return err
return nl_cache_mngt_register(ops) | def function[genl_register, parameter[ops]]:
constant[Register Generic Netlink family backed cache.
https://github.com/thom311/libnl/blob/libnl3_2_25/lib/genl/mngt.c#L241
Same as genl_register_family() but additionally registers the specified cache operations using
nl_cache_mngt_register() and associates it with the Generic Netlink family.
Positional arguments:
ops -- cache operations definition (nl_cache_ops class instance).
Returns:
0 on success or a negative error code.
]
if compare[name[ops].co_protocol not_equal[!=] name[NETLINK_GENERIC]] begin[:]
return[<ast.UnaryOp object at 0x7da1b281a9e0>]
if compare[name[ops].co_hdrsize less[<] call[name[GENL_HDRSIZE], parameter[constant[0]]]] begin[:]
return[<ast.UnaryOp object at 0x7da1b2819ba0>]
if compare[name[ops].co_genl is constant[None]] begin[:]
return[<ast.UnaryOp object at 0x7da1b281a830>]
name[ops].co_genl.o_cache_ops assign[=] name[ops]
name[ops].co_genl.o_hdrsize assign[=] binary_operation[name[ops].co_hdrsize - name[GENL_HDRLEN]]
name[ops].co_genl.o_name assign[=] call[name[ops].co_msgtypes][constant[0]].mt_name
name[ops].co_genl.o_id assign[=] call[name[ops].co_msgtypes][constant[0]].mt_id
name[ops].co_msg_parser assign[=] name[genl_msg_parser]
variable[err] assign[=] call[name[genl_register_family], parameter[name[ops].co_genl]]
if compare[name[err] less[<] constant[0]] begin[:]
return[name[err]]
return[call[name[nl_cache_mngt_register], parameter[name[ops]]]] | keyword[def] identifier[genl_register] ( identifier[ops] ):
literal[string]
keyword[if] identifier[ops] . identifier[co_protocol] != identifier[NETLINK_GENERIC] :
keyword[return] - identifier[NLE_PROTO_MISMATCH]
keyword[if] identifier[ops] . identifier[co_hdrsize] < identifier[GENL_HDRSIZE] ( literal[int] ):
keyword[return] - identifier[NLE_INVAL]
keyword[if] identifier[ops] . identifier[co_genl] keyword[is] keyword[None] :
keyword[return] - identifier[NLE_INVAL]
identifier[ops] . identifier[co_genl] . identifier[o_cache_ops] = identifier[ops]
identifier[ops] . identifier[co_genl] . identifier[o_hdrsize] = identifier[ops] . identifier[co_hdrsize] - identifier[GENL_HDRLEN]
identifier[ops] . identifier[co_genl] . identifier[o_name] = identifier[ops] . identifier[co_msgtypes] [ literal[int] ]. identifier[mt_name]
identifier[ops] . identifier[co_genl] . identifier[o_id] = identifier[ops] . identifier[co_msgtypes] [ literal[int] ]. identifier[mt_id]
identifier[ops] . identifier[co_msg_parser] = identifier[genl_msg_parser]
identifier[err] = identifier[genl_register_family] ( identifier[ops] . identifier[co_genl] )
keyword[if] identifier[err] < literal[int] :
keyword[return] identifier[err]
keyword[return] identifier[nl_cache_mngt_register] ( identifier[ops] ) | def genl_register(ops):
"""Register Generic Netlink family backed cache.
https://github.com/thom311/libnl/blob/libnl3_2_25/lib/genl/mngt.c#L241
Same as genl_register_family() but additionally registers the specified cache operations using
nl_cache_mngt_register() and associates it with the Generic Netlink family.
Positional arguments:
ops -- cache operations definition (nl_cache_ops class instance).
Returns:
0 on success or a negative error code.
"""
if ops.co_protocol != NETLINK_GENERIC:
return -NLE_PROTO_MISMATCH # depends on [control=['if'], data=[]]
if ops.co_hdrsize < GENL_HDRSIZE(0):
return -NLE_INVAL # depends on [control=['if'], data=[]]
if ops.co_genl is None:
return -NLE_INVAL # depends on [control=['if'], data=[]]
ops.co_genl.o_cache_ops = ops
ops.co_genl.o_hdrsize = ops.co_hdrsize - GENL_HDRLEN
ops.co_genl.o_name = ops.co_msgtypes[0].mt_name
ops.co_genl.o_id = ops.co_msgtypes[0].mt_id
ops.co_msg_parser = genl_msg_parser
err = genl_register_family(ops.co_genl)
if err < 0:
return err # depends on [control=['if'], data=['err']]
return nl_cache_mngt_register(ops) |
def _register_handler(event, fun, external=False):
"""Register a function to be an event handler"""
registry = core.HANDLER_REGISTRY
if external:
registry = core.EXTERNAL_HANDLER_REGISTRY
if not isinstance(event, basestring):
# If not basestring, it is a BaseEvent subclass.
# This occurs when class methods are registered as handlers
event = core.parse_event_to_name(event)
if event in registry:
registry[event].append(fun)
else:
registry[event] = [fun]
return fun | def function[_register_handler, parameter[event, fun, external]]:
constant[Register a function to be an event handler]
variable[registry] assign[=] name[core].HANDLER_REGISTRY
if name[external] begin[:]
variable[registry] assign[=] name[core].EXTERNAL_HANDLER_REGISTRY
if <ast.UnaryOp object at 0x7da20c76d0c0> begin[:]
variable[event] assign[=] call[name[core].parse_event_to_name, parameter[name[event]]]
if compare[name[event] in name[registry]] begin[:]
call[call[name[registry]][name[event]].append, parameter[name[fun]]]
return[name[fun]] | keyword[def] identifier[_register_handler] ( identifier[event] , identifier[fun] , identifier[external] = keyword[False] ):
literal[string]
identifier[registry] = identifier[core] . identifier[HANDLER_REGISTRY]
keyword[if] identifier[external] :
identifier[registry] = identifier[core] . identifier[EXTERNAL_HANDLER_REGISTRY]
keyword[if] keyword[not] identifier[isinstance] ( identifier[event] , identifier[basestring] ):
identifier[event] = identifier[core] . identifier[parse_event_to_name] ( identifier[event] )
keyword[if] identifier[event] keyword[in] identifier[registry] :
identifier[registry] [ identifier[event] ]. identifier[append] ( identifier[fun] )
keyword[else] :
identifier[registry] [ identifier[event] ]=[ identifier[fun] ]
keyword[return] identifier[fun] | def _register_handler(event, fun, external=False):
"""Register a function to be an event handler"""
registry = core.HANDLER_REGISTRY
if external:
registry = core.EXTERNAL_HANDLER_REGISTRY # depends on [control=['if'], data=[]]
if not isinstance(event, basestring):
# If not basestring, it is a BaseEvent subclass.
# This occurs when class methods are registered as handlers
event = core.parse_event_to_name(event) # depends on [control=['if'], data=[]]
if event in registry:
registry[event].append(fun) # depends on [control=['if'], data=['event', 'registry']]
else:
registry[event] = [fun]
return fun |
def long_click(self, x, y):
'''long click at arbitrary coordinates.'''
return self.swipe(x, y, x + 1, y + 1) | def function[long_click, parameter[self, x, y]]:
constant[long click at arbitrary coordinates.]
return[call[name[self].swipe, parameter[name[x], name[y], binary_operation[name[x] + constant[1]], binary_operation[name[y] + constant[1]]]]] | keyword[def] identifier[long_click] ( identifier[self] , identifier[x] , identifier[y] ):
literal[string]
keyword[return] identifier[self] . identifier[swipe] ( identifier[x] , identifier[y] , identifier[x] + literal[int] , identifier[y] + literal[int] ) | def long_click(self, x, y):
"""long click at arbitrary coordinates."""
return self.swipe(x, y, x + 1, y + 1) |
def subscribe_agreement_created(self, agreement_id, timeout, callback, args, wait=False):
"""
Subscribe to an agreement created.
:param agreement_id: id of the agreement, hex str
:param timeout:
:param callback:
:param args:
:param wait: if true block the listener until get the event, bool
:return:
"""
logger.info(
f'Subscribing {self.AGREEMENT_CREATED_EVENT} event with agreement id {agreement_id}.')
return self.subscribe_to_event(
self.AGREEMENT_CREATED_EVENT,
timeout,
{'_agreementId': Web3Provider.get_web3().toBytes(hexstr=agreement_id)},
callback=callback,
args=args,
wait=wait
) | def function[subscribe_agreement_created, parameter[self, agreement_id, timeout, callback, args, wait]]:
constant[
Subscribe to an agreement created.
:param agreement_id: id of the agreement, hex str
:param timeout:
:param callback:
:param args:
:param wait: if true block the listener until get the event, bool
:return:
]
call[name[logger].info, parameter[<ast.JoinedStr object at 0x7da18f00fdc0>]]
return[call[name[self].subscribe_to_event, parameter[name[self].AGREEMENT_CREATED_EVENT, name[timeout], dictionary[[<ast.Constant object at 0x7da18f00f3d0>], [<ast.Call object at 0x7da18f00de70>]]]]] | keyword[def] identifier[subscribe_agreement_created] ( identifier[self] , identifier[agreement_id] , identifier[timeout] , identifier[callback] , identifier[args] , identifier[wait] = keyword[False] ):
literal[string]
identifier[logger] . identifier[info] (
literal[string] )
keyword[return] identifier[self] . identifier[subscribe_to_event] (
identifier[self] . identifier[AGREEMENT_CREATED_EVENT] ,
identifier[timeout] ,
{ literal[string] : identifier[Web3Provider] . identifier[get_web3] (). identifier[toBytes] ( identifier[hexstr] = identifier[agreement_id] )},
identifier[callback] = identifier[callback] ,
identifier[args] = identifier[args] ,
identifier[wait] = identifier[wait]
) | def subscribe_agreement_created(self, agreement_id, timeout, callback, args, wait=False):
"""
Subscribe to an agreement created.
:param agreement_id: id of the agreement, hex str
:param timeout:
:param callback:
:param args:
:param wait: if true block the listener until get the event, bool
:return:
"""
logger.info(f'Subscribing {self.AGREEMENT_CREATED_EVENT} event with agreement id {agreement_id}.')
return self.subscribe_to_event(self.AGREEMENT_CREATED_EVENT, timeout, {'_agreementId': Web3Provider.get_web3().toBytes(hexstr=agreement_id)}, callback=callback, args=args, wait=wait) |
def to_dict(self):
"""
Returns a dict representation of this instance suitable for
conversion to YAML.
"""
return {
'model_type': 'segmented_regression',
'name': self.name,
'segmentation_col': self.segmentation_col,
'fit_filters': self.fit_filters,
'predict_filters': self.predict_filters,
'min_segment_size': self.min_segment_size,
'default_config': {
'model_expression': self.default_model_expr,
'ytransform': YTRANSFORM_MAPPING[self.default_ytransform]
},
'fitted': self.fitted,
'models': {
yamlio.to_scalar_safe(name):
self._process_model_dict(m.to_dict())
for name, m in self._group.models.items()}
} | def function[to_dict, parameter[self]]:
constant[
Returns a dict representation of this instance suitable for
conversion to YAML.
]
return[dictionary[[<ast.Constant object at 0x7da18f09d360>, <ast.Constant object at 0x7da18f09d3f0>, <ast.Constant object at 0x7da18f09d9f0>, <ast.Constant object at 0x7da18f09d780>, <ast.Constant object at 0x7da18f09ec50>, <ast.Constant object at 0x7da18f09f580>, <ast.Constant object at 0x7da18f09c3d0>, <ast.Constant object at 0x7da18f09d960>, <ast.Constant object at 0x7da18f09faf0>], [<ast.Constant object at 0x7da2047ea650>, <ast.Attribute object at 0x7da2047eb5e0>, <ast.Attribute object at 0x7da2047eac80>, <ast.Attribute object at 0x7da2047eb970>, <ast.Attribute object at 0x7da2047e9600>, <ast.Attribute object at 0x7da2047eaa40>, <ast.Dict object at 0x7da2047eaa70>, <ast.Attribute object at 0x7da18f09dcc0>, <ast.DictComp object at 0x7da18f09fbe0>]]] | keyword[def] identifier[to_dict] ( identifier[self] ):
literal[string]
keyword[return] {
literal[string] : literal[string] ,
literal[string] : identifier[self] . identifier[name] ,
literal[string] : identifier[self] . identifier[segmentation_col] ,
literal[string] : identifier[self] . identifier[fit_filters] ,
literal[string] : identifier[self] . identifier[predict_filters] ,
literal[string] : identifier[self] . identifier[min_segment_size] ,
literal[string] :{
literal[string] : identifier[self] . identifier[default_model_expr] ,
literal[string] : identifier[YTRANSFORM_MAPPING] [ identifier[self] . identifier[default_ytransform] ]
},
literal[string] : identifier[self] . identifier[fitted] ,
literal[string] :{
identifier[yamlio] . identifier[to_scalar_safe] ( identifier[name] ):
identifier[self] . identifier[_process_model_dict] ( identifier[m] . identifier[to_dict] ())
keyword[for] identifier[name] , identifier[m] keyword[in] identifier[self] . identifier[_group] . identifier[models] . identifier[items] ()}
} | def to_dict(self):
"""
Returns a dict representation of this instance suitable for
conversion to YAML.
"""
return {'model_type': 'segmented_regression', 'name': self.name, 'segmentation_col': self.segmentation_col, 'fit_filters': self.fit_filters, 'predict_filters': self.predict_filters, 'min_segment_size': self.min_segment_size, 'default_config': {'model_expression': self.default_model_expr, 'ytransform': YTRANSFORM_MAPPING[self.default_ytransform]}, 'fitted': self.fitted, 'models': {yamlio.to_scalar_safe(name): self._process_model_dict(m.to_dict()) for (name, m) in self._group.models.items()}} |
def integrate(self, wave=None):
"""Integrate the throughput over the specified wavelength set.
If no wavelength set is specified, the built-in one is used.
Integration is done using :meth:`~Integrator.trapezoidIntegration`
with ``x=wave`` and ``y=throughput``.
Also see :ref:`pysynphot-formula-equvw`.
Parameters
----------
wave : array_like or `None`
Wavelength set for integration.
Returns
-------
ans : float
Integrated sum.
"""
if wave is None:
wave = self.wave
ans = self.trapezoidIntegration(wave, self(wave))
return ans | def function[integrate, parameter[self, wave]]:
constant[Integrate the throughput over the specified wavelength set.
If no wavelength set is specified, the built-in one is used.
Integration is done using :meth:`~Integrator.trapezoidIntegration`
with ``x=wave`` and ``y=throughput``.
Also see :ref:`pysynphot-formula-equvw`.
Parameters
----------
wave : array_like or `None`
Wavelength set for integration.
Returns
-------
ans : float
Integrated sum.
]
if compare[name[wave] is constant[None]] begin[:]
variable[wave] assign[=] name[self].wave
variable[ans] assign[=] call[name[self].trapezoidIntegration, parameter[name[wave], call[name[self], parameter[name[wave]]]]]
return[name[ans]] | keyword[def] identifier[integrate] ( identifier[self] , identifier[wave] = keyword[None] ):
literal[string]
keyword[if] identifier[wave] keyword[is] keyword[None] :
identifier[wave] = identifier[self] . identifier[wave]
identifier[ans] = identifier[self] . identifier[trapezoidIntegration] ( identifier[wave] , identifier[self] ( identifier[wave] ))
keyword[return] identifier[ans] | def integrate(self, wave=None):
"""Integrate the throughput over the specified wavelength set.
If no wavelength set is specified, the built-in one is used.
Integration is done using :meth:`~Integrator.trapezoidIntegration`
with ``x=wave`` and ``y=throughput``.
Also see :ref:`pysynphot-formula-equvw`.
Parameters
----------
wave : array_like or `None`
Wavelength set for integration.
Returns
-------
ans : float
Integrated sum.
"""
if wave is None:
wave = self.wave # depends on [control=['if'], data=['wave']]
ans = self.trapezoidIntegration(wave, self(wave))
return ans |
def fetch_recent_submissions(self, max_duration):
"""Fetch recent submissions in subreddit with boundaries.
Does not include posts within the last day as their scores may not be
representative.
:param max_duration: When set, specifies the number of days to include
"""
if max_duration:
self.min_date = self.max_date - SECONDS_IN_A_DAY * max_duration
for submission in self.subreddit.new(limit=None):
if submission.created_utc <= self.min_date:
break
if submission.created_utc > self.max_date:
continue
self.submissions[submission.id] = MiniSubmission(submission) | def function[fetch_recent_submissions, parameter[self, max_duration]]:
constant[Fetch recent submissions in subreddit with boundaries.
Does not include posts within the last day as their scores may not be
representative.
:param max_duration: When set, specifies the number of days to include
]
if name[max_duration] begin[:]
name[self].min_date assign[=] binary_operation[name[self].max_date - binary_operation[name[SECONDS_IN_A_DAY] * name[max_duration]]]
for taget[name[submission]] in starred[call[name[self].subreddit.new, parameter[]]] begin[:]
if compare[name[submission].created_utc less_or_equal[<=] name[self].min_date] begin[:]
break
if compare[name[submission].created_utc greater[>] name[self].max_date] begin[:]
continue
call[name[self].submissions][name[submission].id] assign[=] call[name[MiniSubmission], parameter[name[submission]]] | keyword[def] identifier[fetch_recent_submissions] ( identifier[self] , identifier[max_duration] ):
literal[string]
keyword[if] identifier[max_duration] :
identifier[self] . identifier[min_date] = identifier[self] . identifier[max_date] - identifier[SECONDS_IN_A_DAY] * identifier[max_duration]
keyword[for] identifier[submission] keyword[in] identifier[self] . identifier[subreddit] . identifier[new] ( identifier[limit] = keyword[None] ):
keyword[if] identifier[submission] . identifier[created_utc] <= identifier[self] . identifier[min_date] :
keyword[break]
keyword[if] identifier[submission] . identifier[created_utc] > identifier[self] . identifier[max_date] :
keyword[continue]
identifier[self] . identifier[submissions] [ identifier[submission] . identifier[id] ]= identifier[MiniSubmission] ( identifier[submission] ) | def fetch_recent_submissions(self, max_duration):
"""Fetch recent submissions in subreddit with boundaries.
Does not include posts within the last day as their scores may not be
representative.
:param max_duration: When set, specifies the number of days to include
"""
if max_duration:
self.min_date = self.max_date - SECONDS_IN_A_DAY * max_duration # depends on [control=['if'], data=[]]
for submission in self.subreddit.new(limit=None):
if submission.created_utc <= self.min_date:
break # depends on [control=['if'], data=[]]
if submission.created_utc > self.max_date:
continue # depends on [control=['if'], data=[]]
self.submissions[submission.id] = MiniSubmission(submission) # depends on [control=['for'], data=['submission']] |
def render(self, context):
"""Output the content of the `PlaceholdeNode` in the template."""
content = mark_safe(self.get_content_from_context(context))
if not content:
return ''
if self.parsed:
try:
t = template.Template(content, name=self.name)
content = mark_safe(t.render(context))
except TemplateSyntaxError as error:
if global_settings.DEBUG:
content = PLACEHOLDER_ERROR % {
'name': self.name,
'error': error,
}
else:
content = ''
if self.as_varname is None:
return content
context[self.as_varname] = content
return '' | def function[render, parameter[self, context]]:
constant[Output the content of the `PlaceholdeNode` in the template.]
variable[content] assign[=] call[name[mark_safe], parameter[call[name[self].get_content_from_context, parameter[name[context]]]]]
if <ast.UnaryOp object at 0x7da1b1e0ae00> begin[:]
return[constant[]]
if name[self].parsed begin[:]
<ast.Try object at 0x7da1b1e0b880>
if compare[name[self].as_varname is constant[None]] begin[:]
return[name[content]]
call[name[context]][name[self].as_varname] assign[=] name[content]
return[constant[]] | keyword[def] identifier[render] ( identifier[self] , identifier[context] ):
literal[string]
identifier[content] = identifier[mark_safe] ( identifier[self] . identifier[get_content_from_context] ( identifier[context] ))
keyword[if] keyword[not] identifier[content] :
keyword[return] literal[string]
keyword[if] identifier[self] . identifier[parsed] :
keyword[try] :
identifier[t] = identifier[template] . identifier[Template] ( identifier[content] , identifier[name] = identifier[self] . identifier[name] )
identifier[content] = identifier[mark_safe] ( identifier[t] . identifier[render] ( identifier[context] ))
keyword[except] identifier[TemplateSyntaxError] keyword[as] identifier[error] :
keyword[if] identifier[global_settings] . identifier[DEBUG] :
identifier[content] = identifier[PLACEHOLDER_ERROR] %{
literal[string] : identifier[self] . identifier[name] ,
literal[string] : identifier[error] ,
}
keyword[else] :
identifier[content] = literal[string]
keyword[if] identifier[self] . identifier[as_varname] keyword[is] keyword[None] :
keyword[return] identifier[content]
identifier[context] [ identifier[self] . identifier[as_varname] ]= identifier[content]
keyword[return] literal[string] | def render(self, context):
"""Output the content of the `PlaceholdeNode` in the template."""
content = mark_safe(self.get_content_from_context(context))
if not content:
return '' # depends on [control=['if'], data=[]]
if self.parsed:
try:
t = template.Template(content, name=self.name)
content = mark_safe(t.render(context)) # depends on [control=['try'], data=[]]
except TemplateSyntaxError as error:
if global_settings.DEBUG:
content = PLACEHOLDER_ERROR % {'name': self.name, 'error': error} # depends on [control=['if'], data=[]]
else:
content = '' # depends on [control=['except'], data=['error']] # depends on [control=['if'], data=[]]
if self.as_varname is None:
return content # depends on [control=['if'], data=[]]
context[self.as_varname] = content
return '' |
def _load_tsv_variables(layout, suffix, dataset=None, columns=None,
prepend_type=False, scope='all', **selectors):
''' Reads variables from scans.tsv, sessions.tsv, and participants.tsv.
Args:
layout (BIDSLayout): The BIDSLayout to use.
suffix (str): The suffix of file to read from. Must be one of 'scans',
'sessions', or 'participants'.
dataset (NodeIndex): A BIDS NodeIndex container. If None, a new one is
initialized.
columns (list): Optional list of names specifying which columns in the
files to return. If None, all columns are returned.
prepend_type (bool): If True, variable names are prepended with the
type name (e.g., 'age' becomes 'participants.age').
scope (str, list): The scope of the space to search for variables. See
docstring for BIDSLayout for details and valid predefined values.
selectors (dict): Optional keyword arguments passed onto the
BIDSLayout instance's get() method; can be used to constrain
which data are loaded.
Returns: A NodeIndex instance.
'''
# Sanitize the selectors: only keep entities at current level or above
remap = {'scans': 'run', 'sessions': 'session', 'participants': 'subject'}
level = remap[suffix]
valid_entities = BASE_ENTITIES[:BASE_ENTITIES.index(level)]
layout_kwargs = {k: v for k, v in selectors.items() if k in valid_entities}
if dataset is None:
dataset = NodeIndex()
files = layout.get(extensions='.tsv', return_type='file', suffix=suffix,
scope=scope, **layout_kwargs)
for f in files:
f = layout.files[f]
_data = pd.read_csv(f.path, sep='\t')
# Entities can be defined either within the first column of the .tsv
# file (for entities that vary by row), or from the full file path
# (for entities constant over all rows in the file). We extract both
# and store them in the main DataFrame alongside other variables (as
# they'll be extracted when the BIDSVariable is initialized anyway).
for ent_name, ent_val in f.entities.items():
if ent_name in ALL_ENTITIES:
_data[ent_name] = ent_val
# Handling is a bit more convoluted for scans.tsv, because the first
# column contains the run filename, which we also need to parse.
if suffix == 'scans':
# Suffix is guaranteed to be present in each filename, so drop the
# constant column with value 'scans' to make way for it and prevent
# two 'suffix' columns.
_data.drop(columns='suffix', inplace=True)
image = _data['filename']
_data = _data.drop('filename', axis=1)
dn = f.dirname
paths = [join(dn, p) for p in image.values]
ent_recs = [layout.files[p].entities for p in paths
if p in layout.files]
ent_cols = pd.DataFrame.from_records(ent_recs)
_data = pd.concat([_data, ent_cols], axis=1, sort=True)
# It's possible to end up with duplicate entity columns this way
_data = _data.T.drop_duplicates().T
# The BIDS spec requires ID columns to be named 'session_id', 'run_id',
# etc., and IDs begin with entity prefixes (e.g., 'sub-01'). To ensure
# consistent internal handling, we strip these suffixes and prefixes.
elif suffix == 'sessions':
_data = _data.rename(columns={'session_id': 'session'})
_data['session'] = _data['session'].str.replace('ses-', '')
elif suffix == 'participants':
_data = _data.rename(columns={'participant_id': 'subject'})
_data['subject'] = _data['subject'].str.replace('sub-', '')
def make_patt(x, regex_search=False):
patt = '%s' % x
if isinstance(x, (int, float)):
# allow for leading zeros if a number was specified
# regardless of regex_search
patt = '0*' + patt
if not regex_search:
patt = '^%s$' % patt
return patt
# Filter rows on all selectors
comm_cols = list(set(_data.columns) & set(selectors.keys()))
for col in comm_cols:
ent_patts = [make_patt(x, regex_search=layout.regex_search)
for x in listify(selectors.get(col))]
patt = '|'.join(ent_patts)
_data = _data[_data[col].str.contains(patt)]
level = {'scans': 'session', 'sessions': 'subject',
'participants': 'dataset'}[suffix]
node = dataset.get_or_create_node(level, f.entities)
ent_cols = list(set(ALL_ENTITIES) & set(_data.columns))
amp_cols = list(set(_data.columns) - set(ent_cols))
if columns is not None:
amp_cols = list(set(amp_cols) & set(columns))
for col_name in amp_cols:
# Rename colummns: values must be in 'amplitude'
df = _data.loc[:, [col_name] + ent_cols]
df.columns = ['amplitude'] + ent_cols
if prepend_type:
col_name = '%s.%s' % (suffix, col_name)
node.add_variable(SimpleVariable(name=col_name, data=df, source=suffix))
return dataset | def function[_load_tsv_variables, parameter[layout, suffix, dataset, columns, prepend_type, scope]]:
constant[ Reads variables from scans.tsv, sessions.tsv, and participants.tsv.
Args:
layout (BIDSLayout): The BIDSLayout to use.
suffix (str): The suffix of file to read from. Must be one of 'scans',
'sessions', or 'participants'.
dataset (NodeIndex): A BIDS NodeIndex container. If None, a new one is
initialized.
columns (list): Optional list of names specifying which columns in the
files to return. If None, all columns are returned.
prepend_type (bool): If True, variable names are prepended with the
type name (e.g., 'age' becomes 'participants.age').
scope (str, list): The scope of the space to search for variables. See
docstring for BIDSLayout for details and valid predefined values.
selectors (dict): Optional keyword arguments passed onto the
BIDSLayout instance's get() method; can be used to constrain
which data are loaded.
Returns: A NodeIndex instance.
]
variable[remap] assign[=] dictionary[[<ast.Constant object at 0x7da1b106ee00>, <ast.Constant object at 0x7da1b124e140>, <ast.Constant object at 0x7da1b124c9a0>], [<ast.Constant object at 0x7da1b124cd90>, <ast.Constant object at 0x7da1b124cd00>, <ast.Constant object at 0x7da1b124dd80>]]
variable[level] assign[=] call[name[remap]][name[suffix]]
variable[valid_entities] assign[=] call[name[BASE_ENTITIES]][<ast.Slice object at 0x7da1b124cf40>]
variable[layout_kwargs] assign[=] <ast.DictComp object at 0x7da1b1288a00>
if compare[name[dataset] is constant[None]] begin[:]
variable[dataset] assign[=] call[name[NodeIndex], parameter[]]
variable[files] assign[=] call[name[layout].get, parameter[]]
for taget[name[f]] in starred[name[files]] begin[:]
variable[f] assign[=] call[name[layout].files][name[f]]
variable[_data] assign[=] call[name[pd].read_csv, parameter[name[f].path]]
for taget[tuple[[<ast.Name object at 0x7da1b128a5c0>, <ast.Name object at 0x7da1b1288c10>]]] in starred[call[name[f].entities.items, parameter[]]] begin[:]
if compare[name[ent_name] in name[ALL_ENTITIES]] begin[:]
call[name[_data]][name[ent_name]] assign[=] name[ent_val]
if compare[name[suffix] equal[==] constant[scans]] begin[:]
call[name[_data].drop, parameter[]]
variable[image] assign[=] call[name[_data]][constant[filename]]
variable[_data] assign[=] call[name[_data].drop, parameter[constant[filename]]]
variable[dn] assign[=] name[f].dirname
variable[paths] assign[=] <ast.ListComp object at 0x7da1b1152a10>
variable[ent_recs] assign[=] <ast.ListComp object at 0x7da1b1151bd0>
variable[ent_cols] assign[=] call[name[pd].DataFrame.from_records, parameter[name[ent_recs]]]
variable[_data] assign[=] call[name[pd].concat, parameter[list[[<ast.Name object at 0x7da1b1151870>, <ast.Name object at 0x7da1b1152e60>]]]]
variable[_data] assign[=] call[name[_data].T.drop_duplicates, parameter[]].T
def function[make_patt, parameter[x, regex_search]]:
variable[patt] assign[=] binary_operation[constant[%s] <ast.Mod object at 0x7da2590d6920> name[x]]
if call[name[isinstance], parameter[name[x], tuple[[<ast.Name object at 0x7da1b10835e0>, <ast.Name object at 0x7da1b1083640>]]]] begin[:]
variable[patt] assign[=] binary_operation[constant[0*] + name[patt]]
if <ast.UnaryOp object at 0x7da1b1083730> begin[:]
variable[patt] assign[=] binary_operation[constant[^%s$] <ast.Mod object at 0x7da2590d6920> name[patt]]
return[name[patt]]
variable[comm_cols] assign[=] call[name[list], parameter[binary_operation[call[name[set], parameter[name[_data].columns]] <ast.BitAnd object at 0x7da2590d6b60> call[name[set], parameter[call[name[selectors].keys, parameter[]]]]]]]
for taget[name[col]] in starred[name[comm_cols]] begin[:]
variable[ent_patts] assign[=] <ast.ListComp object at 0x7da1b1082d40>
variable[patt] assign[=] call[constant[|].join, parameter[name[ent_patts]]]
variable[_data] assign[=] call[name[_data]][call[call[name[_data]][name[col]].str.contains, parameter[name[patt]]]]
variable[level] assign[=] call[dictionary[[<ast.Constant object at 0x7da1b10824a0>, <ast.Constant object at 0x7da1b1082530>, <ast.Constant object at 0x7da1b1082470>], [<ast.Constant object at 0x7da1b10823b0>, <ast.Constant object at 0x7da1b10823e0>, <ast.Constant object at 0x7da1b1082350>]]][name[suffix]]
variable[node] assign[=] call[name[dataset].get_or_create_node, parameter[name[level], name[f].entities]]
variable[ent_cols] assign[=] call[name[list], parameter[binary_operation[call[name[set], parameter[name[ALL_ENTITIES]]] <ast.BitAnd object at 0x7da2590d6b60> call[name[set], parameter[name[_data].columns]]]]]
variable[amp_cols] assign[=] call[name[list], parameter[binary_operation[call[name[set], parameter[name[_data].columns]] - call[name[set], parameter[name[ent_cols]]]]]]
if compare[name[columns] is_not constant[None]] begin[:]
variable[amp_cols] assign[=] call[name[list], parameter[binary_operation[call[name[set], parameter[name[amp_cols]]] <ast.BitAnd object at 0x7da2590d6b60> call[name[set], parameter[name[columns]]]]]]
for taget[name[col_name]] in starred[name[amp_cols]] begin[:]
variable[df] assign[=] call[name[_data].loc][tuple[[<ast.Slice object at 0x7da1b1081b40>, <ast.BinOp object at 0x7da1b1081a20>]]]
name[df].columns assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b10819f0>]] + name[ent_cols]]
if name[prepend_type] begin[:]
variable[col_name] assign[=] binary_operation[constant[%s.%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1081540>, <ast.Name object at 0x7da1b10814e0>]]]
call[name[node].add_variable, parameter[call[name[SimpleVariable], parameter[]]]]
return[name[dataset]] | keyword[def] identifier[_load_tsv_variables] ( identifier[layout] , identifier[suffix] , identifier[dataset] = keyword[None] , identifier[columns] = keyword[None] ,
identifier[prepend_type] = keyword[False] , identifier[scope] = literal[string] ,** identifier[selectors] ):
literal[string]
identifier[remap] ={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] }
identifier[level] = identifier[remap] [ identifier[suffix] ]
identifier[valid_entities] = identifier[BASE_ENTITIES] [: identifier[BASE_ENTITIES] . identifier[index] ( identifier[level] )]
identifier[layout_kwargs] ={ identifier[k] : identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[selectors] . identifier[items] () keyword[if] identifier[k] keyword[in] identifier[valid_entities] }
keyword[if] identifier[dataset] keyword[is] keyword[None] :
identifier[dataset] = identifier[NodeIndex] ()
identifier[files] = identifier[layout] . identifier[get] ( identifier[extensions] = literal[string] , identifier[return_type] = literal[string] , identifier[suffix] = identifier[suffix] ,
identifier[scope] = identifier[scope] ,** identifier[layout_kwargs] )
keyword[for] identifier[f] keyword[in] identifier[files] :
identifier[f] = identifier[layout] . identifier[files] [ identifier[f] ]
identifier[_data] = identifier[pd] . identifier[read_csv] ( identifier[f] . identifier[path] , identifier[sep] = literal[string] )
keyword[for] identifier[ent_name] , identifier[ent_val] keyword[in] identifier[f] . identifier[entities] . identifier[items] ():
keyword[if] identifier[ent_name] keyword[in] identifier[ALL_ENTITIES] :
identifier[_data] [ identifier[ent_name] ]= identifier[ent_val]
keyword[if] identifier[suffix] == literal[string] :
identifier[_data] . identifier[drop] ( identifier[columns] = literal[string] , identifier[inplace] = keyword[True] )
identifier[image] = identifier[_data] [ literal[string] ]
identifier[_data] = identifier[_data] . identifier[drop] ( literal[string] , identifier[axis] = literal[int] )
identifier[dn] = identifier[f] . identifier[dirname]
identifier[paths] =[ identifier[join] ( identifier[dn] , identifier[p] ) keyword[for] identifier[p] keyword[in] identifier[image] . identifier[values] ]
identifier[ent_recs] =[ identifier[layout] . identifier[files] [ identifier[p] ]. identifier[entities] keyword[for] identifier[p] keyword[in] identifier[paths]
keyword[if] identifier[p] keyword[in] identifier[layout] . identifier[files] ]
identifier[ent_cols] = identifier[pd] . identifier[DataFrame] . identifier[from_records] ( identifier[ent_recs] )
identifier[_data] = identifier[pd] . identifier[concat] ([ identifier[_data] , identifier[ent_cols] ], identifier[axis] = literal[int] , identifier[sort] = keyword[True] )
identifier[_data] = identifier[_data] . identifier[T] . identifier[drop_duplicates] (). identifier[T]
keyword[elif] identifier[suffix] == literal[string] :
identifier[_data] = identifier[_data] . identifier[rename] ( identifier[columns] ={ literal[string] : literal[string] })
identifier[_data] [ literal[string] ]= identifier[_data] [ literal[string] ]. identifier[str] . identifier[replace] ( literal[string] , literal[string] )
keyword[elif] identifier[suffix] == literal[string] :
identifier[_data] = identifier[_data] . identifier[rename] ( identifier[columns] ={ literal[string] : literal[string] })
identifier[_data] [ literal[string] ]= identifier[_data] [ literal[string] ]. identifier[str] . identifier[replace] ( literal[string] , literal[string] )
keyword[def] identifier[make_patt] ( identifier[x] , identifier[regex_search] = keyword[False] ):
identifier[patt] = literal[string] % identifier[x]
keyword[if] identifier[isinstance] ( identifier[x] ,( identifier[int] , identifier[float] )):
identifier[patt] = literal[string] + identifier[patt]
keyword[if] keyword[not] identifier[regex_search] :
identifier[patt] = literal[string] % identifier[patt]
keyword[return] identifier[patt]
identifier[comm_cols] = identifier[list] ( identifier[set] ( identifier[_data] . identifier[columns] )& identifier[set] ( identifier[selectors] . identifier[keys] ()))
keyword[for] identifier[col] keyword[in] identifier[comm_cols] :
identifier[ent_patts] =[ identifier[make_patt] ( identifier[x] , identifier[regex_search] = identifier[layout] . identifier[regex_search] )
keyword[for] identifier[x] keyword[in] identifier[listify] ( identifier[selectors] . identifier[get] ( identifier[col] ))]
identifier[patt] = literal[string] . identifier[join] ( identifier[ent_patts] )
identifier[_data] = identifier[_data] [ identifier[_data] [ identifier[col] ]. identifier[str] . identifier[contains] ( identifier[patt] )]
identifier[level] ={ literal[string] : literal[string] , literal[string] : literal[string] ,
literal[string] : literal[string] }[ identifier[suffix] ]
identifier[node] = identifier[dataset] . identifier[get_or_create_node] ( identifier[level] , identifier[f] . identifier[entities] )
identifier[ent_cols] = identifier[list] ( identifier[set] ( identifier[ALL_ENTITIES] )& identifier[set] ( identifier[_data] . identifier[columns] ))
identifier[amp_cols] = identifier[list] ( identifier[set] ( identifier[_data] . identifier[columns] )- identifier[set] ( identifier[ent_cols] ))
keyword[if] identifier[columns] keyword[is] keyword[not] keyword[None] :
identifier[amp_cols] = identifier[list] ( identifier[set] ( identifier[amp_cols] )& identifier[set] ( identifier[columns] ))
keyword[for] identifier[col_name] keyword[in] identifier[amp_cols] :
identifier[df] = identifier[_data] . identifier[loc] [:,[ identifier[col_name] ]+ identifier[ent_cols] ]
identifier[df] . identifier[columns] =[ literal[string] ]+ identifier[ent_cols]
keyword[if] identifier[prepend_type] :
identifier[col_name] = literal[string] %( identifier[suffix] , identifier[col_name] )
identifier[node] . identifier[add_variable] ( identifier[SimpleVariable] ( identifier[name] = identifier[col_name] , identifier[data] = identifier[df] , identifier[source] = identifier[suffix] ))
keyword[return] identifier[dataset] | def _load_tsv_variables(layout, suffix, dataset=None, columns=None, prepend_type=False, scope='all', **selectors):
""" Reads variables from scans.tsv, sessions.tsv, and participants.tsv.
Args:
layout (BIDSLayout): The BIDSLayout to use.
suffix (str): The suffix of file to read from. Must be one of 'scans',
'sessions', or 'participants'.
dataset (NodeIndex): A BIDS NodeIndex container. If None, a new one is
initialized.
columns (list): Optional list of names specifying which columns in the
files to return. If None, all columns are returned.
prepend_type (bool): If True, variable names are prepended with the
type name (e.g., 'age' becomes 'participants.age').
scope (str, list): The scope of the space to search for variables. See
docstring for BIDSLayout for details and valid predefined values.
selectors (dict): Optional keyword arguments passed onto the
BIDSLayout instance's get() method; can be used to constrain
which data are loaded.
Returns: A NodeIndex instance.
"""
# Sanitize the selectors: only keep entities at current level or above
remap = {'scans': 'run', 'sessions': 'session', 'participants': 'subject'}
level = remap[suffix]
valid_entities = BASE_ENTITIES[:BASE_ENTITIES.index(level)]
layout_kwargs = {k: v for (k, v) in selectors.items() if k in valid_entities}
if dataset is None:
dataset = NodeIndex() # depends on [control=['if'], data=['dataset']]
files = layout.get(extensions='.tsv', return_type='file', suffix=suffix, scope=scope, **layout_kwargs)
for f in files:
f = layout.files[f]
_data = pd.read_csv(f.path, sep='\t')
# Entities can be defined either within the first column of the .tsv
# file (for entities that vary by row), or from the full file path
# (for entities constant over all rows in the file). We extract both
# and store them in the main DataFrame alongside other variables (as
# they'll be extracted when the BIDSVariable is initialized anyway).
for (ent_name, ent_val) in f.entities.items():
if ent_name in ALL_ENTITIES:
_data[ent_name] = ent_val # depends on [control=['if'], data=['ent_name']] # depends on [control=['for'], data=[]]
# Handling is a bit more convoluted for scans.tsv, because the first
# column contains the run filename, which we also need to parse.
if suffix == 'scans':
# Suffix is guaranteed to be present in each filename, so drop the
# constant column with value 'scans' to make way for it and prevent
# two 'suffix' columns.
_data.drop(columns='suffix', inplace=True)
image = _data['filename']
_data = _data.drop('filename', axis=1)
dn = f.dirname
paths = [join(dn, p) for p in image.values]
ent_recs = [layout.files[p].entities for p in paths if p in layout.files]
ent_cols = pd.DataFrame.from_records(ent_recs)
_data = pd.concat([_data, ent_cols], axis=1, sort=True)
# It's possible to end up with duplicate entity columns this way
_data = _data.T.drop_duplicates().T # depends on [control=['if'], data=[]]
# The BIDS spec requires ID columns to be named 'session_id', 'run_id',
# etc., and IDs begin with entity prefixes (e.g., 'sub-01'). To ensure
# consistent internal handling, we strip these suffixes and prefixes.
elif suffix == 'sessions':
_data = _data.rename(columns={'session_id': 'session'})
_data['session'] = _data['session'].str.replace('ses-', '') # depends on [control=['if'], data=[]]
elif suffix == 'participants':
_data = _data.rename(columns={'participant_id': 'subject'})
_data['subject'] = _data['subject'].str.replace('sub-', '') # depends on [control=['if'], data=[]]
def make_patt(x, regex_search=False):
patt = '%s' % x
if isinstance(x, (int, float)):
# allow for leading zeros if a number was specified
# regardless of regex_search
patt = '0*' + patt # depends on [control=['if'], data=[]]
if not regex_search:
patt = '^%s$' % patt # depends on [control=['if'], data=[]]
return patt
# Filter rows on all selectors
comm_cols = list(set(_data.columns) & set(selectors.keys()))
for col in comm_cols:
ent_patts = [make_patt(x, regex_search=layout.regex_search) for x in listify(selectors.get(col))]
patt = '|'.join(ent_patts)
_data = _data[_data[col].str.contains(patt)] # depends on [control=['for'], data=['col']]
level = {'scans': 'session', 'sessions': 'subject', 'participants': 'dataset'}[suffix]
node = dataset.get_or_create_node(level, f.entities)
ent_cols = list(set(ALL_ENTITIES) & set(_data.columns))
amp_cols = list(set(_data.columns) - set(ent_cols))
if columns is not None:
amp_cols = list(set(amp_cols) & set(columns)) # depends on [control=['if'], data=['columns']]
for col_name in amp_cols:
# Rename colummns: values must be in 'amplitude'
df = _data.loc[:, [col_name] + ent_cols]
df.columns = ['amplitude'] + ent_cols
if prepend_type:
col_name = '%s.%s' % (suffix, col_name) # depends on [control=['if'], data=[]]
node.add_variable(SimpleVariable(name=col_name, data=df, source=suffix)) # depends on [control=['for'], data=['col_name']] # depends on [control=['for'], data=['f']]
return dataset |
def get_frame(self, idx):
"""Return the frame number of a contour"""
cont = self.data[idx]
frame = int(cont.strip().split(" ", 1)[0])
return frame | def function[get_frame, parameter[self, idx]]:
constant[Return the frame number of a contour]
variable[cont] assign[=] call[name[self].data][name[idx]]
variable[frame] assign[=] call[name[int], parameter[call[call[call[name[cont].strip, parameter[]].split, parameter[constant[ ], constant[1]]]][constant[0]]]]
return[name[frame]] | keyword[def] identifier[get_frame] ( identifier[self] , identifier[idx] ):
literal[string]
identifier[cont] = identifier[self] . identifier[data] [ identifier[idx] ]
identifier[frame] = identifier[int] ( identifier[cont] . identifier[strip] (). identifier[split] ( literal[string] , literal[int] )[ literal[int] ])
keyword[return] identifier[frame] | def get_frame(self, idx):
"""Return the frame number of a contour"""
cont = self.data[idx]
frame = int(cont.strip().split(' ', 1)[0])
return frame |
def PlayerSeasonFinder(**kwargs):
""" Docstring will be filled in by __init__.py """
if 'offset' not in kwargs:
kwargs['offset'] = 0
playerSeasons = []
while True:
querystring = _kwargs_to_qs(**kwargs)
url = '{}?{}'.format(PSF_URL, querystring)
if kwargs.get('verbose', False):
print(url)
html = utils.get_html(url)
doc = pq(html)
table = doc('table#results')
df = utils.parse_table(table)
if df.empty:
break
thisSeason = list(zip(df.player_id, df.year))
playerSeasons.extend(thisSeason)
if doc('*:contains("Next Page")'):
kwargs['offset'] += 100
else:
break
return playerSeasons | def function[PlayerSeasonFinder, parameter[]]:
constant[ Docstring will be filled in by __init__.py ]
if compare[constant[offset] <ast.NotIn object at 0x7da2590d7190> name[kwargs]] begin[:]
call[name[kwargs]][constant[offset]] assign[=] constant[0]
variable[playerSeasons] assign[=] list[[]]
while constant[True] begin[:]
variable[querystring] assign[=] call[name[_kwargs_to_qs], parameter[]]
variable[url] assign[=] call[constant[{}?{}].format, parameter[name[PSF_URL], name[querystring]]]
if call[name[kwargs].get, parameter[constant[verbose], constant[False]]] begin[:]
call[name[print], parameter[name[url]]]
variable[html] assign[=] call[name[utils].get_html, parameter[name[url]]]
variable[doc] assign[=] call[name[pq], parameter[name[html]]]
variable[table] assign[=] call[name[doc], parameter[constant[table#results]]]
variable[df] assign[=] call[name[utils].parse_table, parameter[name[table]]]
if name[df].empty begin[:]
break
variable[thisSeason] assign[=] call[name[list], parameter[call[name[zip], parameter[name[df].player_id, name[df].year]]]]
call[name[playerSeasons].extend, parameter[name[thisSeason]]]
if call[name[doc], parameter[constant[*:contains("Next Page")]]] begin[:]
<ast.AugAssign object at 0x7da1b026e560>
return[name[playerSeasons]] | keyword[def] identifier[PlayerSeasonFinder] (** identifier[kwargs] ):
literal[string]
keyword[if] literal[string] keyword[not] keyword[in] identifier[kwargs] :
identifier[kwargs] [ literal[string] ]= literal[int]
identifier[playerSeasons] =[]
keyword[while] keyword[True] :
identifier[querystring] = identifier[_kwargs_to_qs] (** identifier[kwargs] )
identifier[url] = literal[string] . identifier[format] ( identifier[PSF_URL] , identifier[querystring] )
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] , keyword[False] ):
identifier[print] ( identifier[url] )
identifier[html] = identifier[utils] . identifier[get_html] ( identifier[url] )
identifier[doc] = identifier[pq] ( identifier[html] )
identifier[table] = identifier[doc] ( literal[string] )
identifier[df] = identifier[utils] . identifier[parse_table] ( identifier[table] )
keyword[if] identifier[df] . identifier[empty] :
keyword[break]
identifier[thisSeason] = identifier[list] ( identifier[zip] ( identifier[df] . identifier[player_id] , identifier[df] . identifier[year] ))
identifier[playerSeasons] . identifier[extend] ( identifier[thisSeason] )
keyword[if] identifier[doc] ( literal[string] ):
identifier[kwargs] [ literal[string] ]+= literal[int]
keyword[else] :
keyword[break]
keyword[return] identifier[playerSeasons] | def PlayerSeasonFinder(**kwargs):
""" Docstring will be filled in by __init__.py """
if 'offset' not in kwargs:
kwargs['offset'] = 0 # depends on [control=['if'], data=['kwargs']]
playerSeasons = []
while True:
querystring = _kwargs_to_qs(**kwargs)
url = '{}?{}'.format(PSF_URL, querystring)
if kwargs.get('verbose', False):
print(url) # depends on [control=['if'], data=[]]
html = utils.get_html(url)
doc = pq(html)
table = doc('table#results')
df = utils.parse_table(table)
if df.empty:
break # depends on [control=['if'], data=[]]
thisSeason = list(zip(df.player_id, df.year))
playerSeasons.extend(thisSeason)
if doc('*:contains("Next Page")'):
kwargs['offset'] += 100 # depends on [control=['if'], data=[]]
else:
break # depends on [control=['while'], data=[]]
return playerSeasons |
def get_chat_member(self, user_id):
"""
Get information about a member of a chat.
:param int user_id: Unique identifier of the target user
"""
return self.bot.api_call(
"getChatMember", chat_id=str(self.id), user_id=str(user_id)
) | def function[get_chat_member, parameter[self, user_id]]:
constant[
Get information about a member of a chat.
:param int user_id: Unique identifier of the target user
]
return[call[name[self].bot.api_call, parameter[constant[getChatMember]]]] | keyword[def] identifier[get_chat_member] ( identifier[self] , identifier[user_id] ):
literal[string]
keyword[return] identifier[self] . identifier[bot] . identifier[api_call] (
literal[string] , identifier[chat_id] = identifier[str] ( identifier[self] . identifier[id] ), identifier[user_id] = identifier[str] ( identifier[user_id] )
) | def get_chat_member(self, user_id):
"""
Get information about a member of a chat.
:param int user_id: Unique identifier of the target user
"""
return self.bot.api_call('getChatMember', chat_id=str(self.id), user_id=str(user_id)) |
def fixed_width_binning(data=None, bin_width: Union[float, int] = 1, *, range=None, includes_right_edge=False, **kwargs) -> FixedWidthBinning:
"""Construct fixed-width binning schema.
Parameters
----------
bin_width: float
range: Optional[tuple]
(min, max)
align: Optional[float]
Must be multiple of bin_width
"""
result = FixedWidthBinning(bin_width=bin_width, includes_right_edge=includes_right_edge,
**kwargs)
if range:
result._force_bin_existence(range[0])
result._force_bin_existence(range[1], includes_right_edge=True)
if not kwargs.get("adaptive"):
return result # Otherwise we want to adapt to data
if data is not None and data.shape[0]:
# print("Jo, tady")
result._force_bin_existence([np.min(data), np.max(data)],
includes_right_edge=includes_right_edge)
return result | def function[fixed_width_binning, parameter[data, bin_width]]:
constant[Construct fixed-width binning schema.
Parameters
----------
bin_width: float
range: Optional[tuple]
(min, max)
align: Optional[float]
Must be multiple of bin_width
]
variable[result] assign[=] call[name[FixedWidthBinning], parameter[]]
if name[range] begin[:]
call[name[result]._force_bin_existence, parameter[call[name[range]][constant[0]]]]
call[name[result]._force_bin_existence, parameter[call[name[range]][constant[1]]]]
if <ast.UnaryOp object at 0x7da18fe904c0> begin[:]
return[name[result]]
if <ast.BoolOp object at 0x7da18fe924d0> begin[:]
call[name[result]._force_bin_existence, parameter[list[[<ast.Call object at 0x7da204564d90>, <ast.Call object at 0x7da20c795540>]]]]
return[name[result]] | keyword[def] identifier[fixed_width_binning] ( identifier[data] = keyword[None] , identifier[bin_width] : identifier[Union] [ identifier[float] , identifier[int] ]= literal[int] ,*, identifier[range] = keyword[None] , identifier[includes_right_edge] = keyword[False] ,** identifier[kwargs] )-> identifier[FixedWidthBinning] :
literal[string]
identifier[result] = identifier[FixedWidthBinning] ( identifier[bin_width] = identifier[bin_width] , identifier[includes_right_edge] = identifier[includes_right_edge] ,
** identifier[kwargs] )
keyword[if] identifier[range] :
identifier[result] . identifier[_force_bin_existence] ( identifier[range] [ literal[int] ])
identifier[result] . identifier[_force_bin_existence] ( identifier[range] [ literal[int] ], identifier[includes_right_edge] = keyword[True] )
keyword[if] keyword[not] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[result]
keyword[if] identifier[data] keyword[is] keyword[not] keyword[None] keyword[and] identifier[data] . identifier[shape] [ literal[int] ]:
identifier[result] . identifier[_force_bin_existence] ([ identifier[np] . identifier[min] ( identifier[data] ), identifier[np] . identifier[max] ( identifier[data] )],
identifier[includes_right_edge] = identifier[includes_right_edge] )
keyword[return] identifier[result] | def fixed_width_binning(data=None, bin_width: Union[float, int]=1, *, range=None, includes_right_edge=False, **kwargs) -> FixedWidthBinning:
"""Construct fixed-width binning schema.
Parameters
----------
bin_width: float
range: Optional[tuple]
(min, max)
align: Optional[float]
Must be multiple of bin_width
"""
result = FixedWidthBinning(bin_width=bin_width, includes_right_edge=includes_right_edge, **kwargs)
if range:
result._force_bin_existence(range[0])
result._force_bin_existence(range[1], includes_right_edge=True)
if not kwargs.get('adaptive'):
return result # Otherwise we want to adapt to data # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if data is not None and data.shape[0]:
# print("Jo, tady")
result._force_bin_existence([np.min(data), np.max(data)], includes_right_edge=includes_right_edge) # depends on [control=['if'], data=[]]
return result |
def get_ordering_field_columns(self):
"""
Returns an OrderedDict of ordering field column numbers and asc/desc
"""
# We must cope with more than one column having the same underlying
# sort field, so we base things on column numbers.
ordering = self._get_default_ordering()
ordering_fields = OrderedDict()
if ORDER_VAR not in self.params:
# for ordering specified on model_admin or model Meta, we don't
# know the right column numbers absolutely, because there might be
# morr than one column associated with that ordering, so we guess.
for field in ordering:
if field.startswith('-'):
field = field[1:]
order_type = 'desc'
else:
order_type = 'asc'
for index, attr in enumerate(self.list_display):
if self.get_ordering_field(attr) == field:
ordering_fields[index] = order_type
break
else:
for p in self.params[ORDER_VAR].split('.'):
none, pfx, idx = p.rpartition('-')
try:
idx = int(idx)
except ValueError:
continue # skip it
ordering_fields[idx] = 'desc' if pfx == '-' else 'asc'
return ordering_fields | def function[get_ordering_field_columns, parameter[self]]:
constant[
Returns an OrderedDict of ordering field column numbers and asc/desc
]
variable[ordering] assign[=] call[name[self]._get_default_ordering, parameter[]]
variable[ordering_fields] assign[=] call[name[OrderedDict], parameter[]]
if compare[name[ORDER_VAR] <ast.NotIn object at 0x7da2590d7190> name[self].params] begin[:]
for taget[name[field]] in starred[name[ordering]] begin[:]
if call[name[field].startswith, parameter[constant[-]]] begin[:]
variable[field] assign[=] call[name[field]][<ast.Slice object at 0x7da18bccad10>]
variable[order_type] assign[=] constant[desc]
for taget[tuple[[<ast.Name object at 0x7da18bccbb80>, <ast.Name object at 0x7da18bcc9810>]]] in starred[call[name[enumerate], parameter[name[self].list_display]]] begin[:]
if compare[call[name[self].get_ordering_field, parameter[name[attr]]] equal[==] name[field]] begin[:]
call[name[ordering_fields]][name[index]] assign[=] name[order_type]
break
return[name[ordering_fields]] | keyword[def] identifier[get_ordering_field_columns] ( identifier[self] ):
literal[string]
identifier[ordering] = identifier[self] . identifier[_get_default_ordering] ()
identifier[ordering_fields] = identifier[OrderedDict] ()
keyword[if] identifier[ORDER_VAR] keyword[not] keyword[in] identifier[self] . identifier[params] :
keyword[for] identifier[field] keyword[in] identifier[ordering] :
keyword[if] identifier[field] . identifier[startswith] ( literal[string] ):
identifier[field] = identifier[field] [ literal[int] :]
identifier[order_type] = literal[string]
keyword[else] :
identifier[order_type] = literal[string]
keyword[for] identifier[index] , identifier[attr] keyword[in] identifier[enumerate] ( identifier[self] . identifier[list_display] ):
keyword[if] identifier[self] . identifier[get_ordering_field] ( identifier[attr] )== identifier[field] :
identifier[ordering_fields] [ identifier[index] ]= identifier[order_type]
keyword[break]
keyword[else] :
keyword[for] identifier[p] keyword[in] identifier[self] . identifier[params] [ identifier[ORDER_VAR] ]. identifier[split] ( literal[string] ):
identifier[none] , identifier[pfx] , identifier[idx] = identifier[p] . identifier[rpartition] ( literal[string] )
keyword[try] :
identifier[idx] = identifier[int] ( identifier[idx] )
keyword[except] identifier[ValueError] :
keyword[continue]
identifier[ordering_fields] [ identifier[idx] ]= literal[string] keyword[if] identifier[pfx] == literal[string] keyword[else] literal[string]
keyword[return] identifier[ordering_fields] | def get_ordering_field_columns(self):
"""
Returns an OrderedDict of ordering field column numbers and asc/desc
"""
# We must cope with more than one column having the same underlying
# sort field, so we base things on column numbers.
ordering = self._get_default_ordering()
ordering_fields = OrderedDict()
if ORDER_VAR not in self.params:
# for ordering specified on model_admin or model Meta, we don't
# know the right column numbers absolutely, because there might be
# morr than one column associated with that ordering, so we guess.
for field in ordering:
if field.startswith('-'):
field = field[1:]
order_type = 'desc' # depends on [control=['if'], data=[]]
else:
order_type = 'asc'
for (index, attr) in enumerate(self.list_display):
if self.get_ordering_field(attr) == field:
ordering_fields[index] = order_type
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['field']] # depends on [control=['if'], data=[]]
else:
for p in self.params[ORDER_VAR].split('.'):
(none, pfx, idx) = p.rpartition('-')
try:
idx = int(idx) # depends on [control=['try'], data=[]]
except ValueError:
continue # skip it # depends on [control=['except'], data=[]]
ordering_fields[idx] = 'desc' if pfx == '-' else 'asc' # depends on [control=['for'], data=['p']]
return ordering_fields |
def receive_nak_requesting(self, pkt):
"""Receive NAK in REQUESTING state."""
logger.debug("C3.1. Received NAK?, in REQUESTING state.")
if self.process_received_nak(pkt):
logger.debug("C3.1: T. Received NAK, in REQUESTING state, "
"raise INIT.")
raise self.INIT() | def function[receive_nak_requesting, parameter[self, pkt]]:
constant[Receive NAK in REQUESTING state.]
call[name[logger].debug, parameter[constant[C3.1. Received NAK?, in REQUESTING state.]]]
if call[name[self].process_received_nak, parameter[name[pkt]]] begin[:]
call[name[logger].debug, parameter[constant[C3.1: T. Received NAK, in REQUESTING state, raise INIT.]]]
<ast.Raise object at 0x7da1b03b97e0> | keyword[def] identifier[receive_nak_requesting] ( identifier[self] , identifier[pkt] ):
literal[string]
identifier[logger] . identifier[debug] ( literal[string] )
keyword[if] identifier[self] . identifier[process_received_nak] ( identifier[pkt] ):
identifier[logger] . identifier[debug] ( literal[string]
literal[string] )
keyword[raise] identifier[self] . identifier[INIT] () | def receive_nak_requesting(self, pkt):
"""Receive NAK in REQUESTING state."""
logger.debug('C3.1. Received NAK?, in REQUESTING state.')
if self.process_received_nak(pkt):
logger.debug('C3.1: T. Received NAK, in REQUESTING state, raise INIT.')
raise self.INIT() # depends on [control=['if'], data=[]] |
def gather_configs(self):
"""
Gather configuration requirements of all plugins
"""
configs = []
for what in self.order:
for key in self.plugins[what]:
mgr = self.plugins[what][key]
c = mgr.config(what='get')
if c is not None:
c.update({
'description': mgr.description
})
# print("Gathering configuration from ", c)
configs.append(c)
return configs | def function[gather_configs, parameter[self]]:
constant[
Gather configuration requirements of all plugins
]
variable[configs] assign[=] list[[]]
for taget[name[what]] in starred[name[self].order] begin[:]
for taget[name[key]] in starred[call[name[self].plugins][name[what]]] begin[:]
variable[mgr] assign[=] call[call[name[self].plugins][name[what]]][name[key]]
variable[c] assign[=] call[name[mgr].config, parameter[]]
if compare[name[c] is_not constant[None]] begin[:]
call[name[c].update, parameter[dictionary[[<ast.Constant object at 0x7da1afef8af0>], [<ast.Attribute object at 0x7da1afef8b20>]]]]
call[name[configs].append, parameter[name[c]]]
return[name[configs]] | keyword[def] identifier[gather_configs] ( identifier[self] ):
literal[string]
identifier[configs] =[]
keyword[for] identifier[what] keyword[in] identifier[self] . identifier[order] :
keyword[for] identifier[key] keyword[in] identifier[self] . identifier[plugins] [ identifier[what] ]:
identifier[mgr] = identifier[self] . identifier[plugins] [ identifier[what] ][ identifier[key] ]
identifier[c] = identifier[mgr] . identifier[config] ( identifier[what] = literal[string] )
keyword[if] identifier[c] keyword[is] keyword[not] keyword[None] :
identifier[c] . identifier[update] ({
literal[string] : identifier[mgr] . identifier[description]
})
identifier[configs] . identifier[append] ( identifier[c] )
keyword[return] identifier[configs] | def gather_configs(self):
"""
Gather configuration requirements of all plugins
"""
configs = []
for what in self.order:
for key in self.plugins[what]:
mgr = self.plugins[what][key]
c = mgr.config(what='get')
if c is not None:
c.update({'description': mgr.description})
# print("Gathering configuration from ", c)
configs.append(c) # depends on [control=['if'], data=['c']] # depends on [control=['for'], data=['key']] # depends on [control=['for'], data=['what']]
return configs |
def get_location (host):
"""Get translated country and optional city name.
@return: country with optional city or an boolean False if not found
"""
if geoip is None:
# no geoip available
return None
try:
record = get_geoip_record(host)
except (geoip_error, socket.error):
log.debug(LOG_PLUGIN, "Geoip error for %r", host, exception=True)
# ignore lookup errors
return None
value = u""
if record and record.get("city"):
value += unicode_safe(record["city"])
if record and record.get("country_name"):
if value:
value += u", "
value += unicode_safe(record["country_name"])
return value | def function[get_location, parameter[host]]:
constant[Get translated country and optional city name.
@return: country with optional city or an boolean False if not found
]
if compare[name[geoip] is constant[None]] begin[:]
return[constant[None]]
<ast.Try object at 0x7da1b0ab9e10>
variable[value] assign[=] constant[]
if <ast.BoolOp object at 0x7da1b0ab8a90> begin[:]
<ast.AugAssign object at 0x7da1b0ab9570>
if <ast.BoolOp object at 0x7da1b0ab9270> begin[:]
if name[value] begin[:]
<ast.AugAssign object at 0x7da1b0ab8df0>
<ast.AugAssign object at 0x7da1b0abb130>
return[name[value]] | keyword[def] identifier[get_location] ( identifier[host] ):
literal[string]
keyword[if] identifier[geoip] keyword[is] keyword[None] :
keyword[return] keyword[None]
keyword[try] :
identifier[record] = identifier[get_geoip_record] ( identifier[host] )
keyword[except] ( identifier[geoip_error] , identifier[socket] . identifier[error] ):
identifier[log] . identifier[debug] ( identifier[LOG_PLUGIN] , literal[string] , identifier[host] , identifier[exception] = keyword[True] )
keyword[return] keyword[None]
identifier[value] = literal[string]
keyword[if] identifier[record] keyword[and] identifier[record] . identifier[get] ( literal[string] ):
identifier[value] += identifier[unicode_safe] ( identifier[record] [ literal[string] ])
keyword[if] identifier[record] keyword[and] identifier[record] . identifier[get] ( literal[string] ):
keyword[if] identifier[value] :
identifier[value] += literal[string]
identifier[value] += identifier[unicode_safe] ( identifier[record] [ literal[string] ])
keyword[return] identifier[value] | def get_location(host):
"""Get translated country and optional city name.
@return: country with optional city or an boolean False if not found
"""
if geoip is None:
# no geoip available
return None # depends on [control=['if'], data=[]]
try:
record = get_geoip_record(host) # depends on [control=['try'], data=[]]
except (geoip_error, socket.error):
log.debug(LOG_PLUGIN, 'Geoip error for %r', host, exception=True)
# ignore lookup errors
return None # depends on [control=['except'], data=[]]
value = u''
if record and record.get('city'):
value += unicode_safe(record['city']) # depends on [control=['if'], data=[]]
if record and record.get('country_name'):
if value:
value += u', ' # depends on [control=['if'], data=[]]
value += unicode_safe(record['country_name']) # depends on [control=['if'], data=[]]
return value |
def run(self, command, arguments=(), console_mode_stdin=True, skip_cmd_shell=False):
"""This function does something.
:param command: The command to be executed
:type name: str.
:param arguments: A list of arguments to be passed to the command
:type state: str.
:returns: int -- the return code.
:raises: AttributeError, KeyError
iclegg: blocking i/o operations are slow, doesnt Python have a moden 'async' mechanism
rather than replying on 80's style callbacks?
"""
logging.info('running command: ' + command)
resource = ResourceLocator(CommandShell.ShellResource)
resource.add_selector('ShellId', self.__shell_id)
resource.add_option('WINRS_SKIP_CMD_SHELL', ['FALSE', 'TRUE'][bool(skip_cmd_shell)], True)
resource.add_option('WINRS_CONSOLEMODE_STDIN', ['FALSE', 'TRUE'][bool(console_mode_stdin)], True)
command = OrderedDict([('rsp:Command', command)])
command['rsp:Arguments'] = list(arguments)
response = self.session.command(resource, {'rsp:CommandLine': command})
command_id = response['rsp:CommandResponse']['rsp:CommandId']
logging.info('receive command: ' + command_id)
return command_id | def function[run, parameter[self, command, arguments, console_mode_stdin, skip_cmd_shell]]:
constant[This function does something.
:param command: The command to be executed
:type name: str.
:param arguments: A list of arguments to be passed to the command
:type state: str.
:returns: int -- the return code.
:raises: AttributeError, KeyError
iclegg: blocking i/o operations are slow, doesnt Python have a moden 'async' mechanism
rather than replying on 80's style callbacks?
]
call[name[logging].info, parameter[binary_operation[constant[running command: ] + name[command]]]]
variable[resource] assign[=] call[name[ResourceLocator], parameter[name[CommandShell].ShellResource]]
call[name[resource].add_selector, parameter[constant[ShellId], name[self].__shell_id]]
call[name[resource].add_option, parameter[constant[WINRS_SKIP_CMD_SHELL], call[list[[<ast.Constant object at 0x7da1b0bd8190>, <ast.Constant object at 0x7da1b0bd8b20>]]][call[name[bool], parameter[name[skip_cmd_shell]]]], constant[True]]]
call[name[resource].add_option, parameter[constant[WINRS_CONSOLEMODE_STDIN], call[list[[<ast.Constant object at 0x7da1b0bdb4f0>, <ast.Constant object at 0x7da1b0bdb190>]]][call[name[bool], parameter[name[console_mode_stdin]]]], constant[True]]]
variable[command] assign[=] call[name[OrderedDict], parameter[list[[<ast.Tuple object at 0x7da1b0bd9120>]]]]
call[name[command]][constant[rsp:Arguments]] assign[=] call[name[list], parameter[name[arguments]]]
variable[response] assign[=] call[name[self].session.command, parameter[name[resource], dictionary[[<ast.Constant object at 0x7da1b0bda710>], [<ast.Name object at 0x7da1b0bd9de0>]]]]
variable[command_id] assign[=] call[call[name[response]][constant[rsp:CommandResponse]]][constant[rsp:CommandId]]
call[name[logging].info, parameter[binary_operation[constant[receive command: ] + name[command_id]]]]
return[name[command_id]] | keyword[def] identifier[run] ( identifier[self] , identifier[command] , identifier[arguments] =(), identifier[console_mode_stdin] = keyword[True] , identifier[skip_cmd_shell] = keyword[False] ):
literal[string]
identifier[logging] . identifier[info] ( literal[string] + identifier[command] )
identifier[resource] = identifier[ResourceLocator] ( identifier[CommandShell] . identifier[ShellResource] )
identifier[resource] . identifier[add_selector] ( literal[string] , identifier[self] . identifier[__shell_id] )
identifier[resource] . identifier[add_option] ( literal[string] ,[ literal[string] , literal[string] ][ identifier[bool] ( identifier[skip_cmd_shell] )], keyword[True] )
identifier[resource] . identifier[add_option] ( literal[string] ,[ literal[string] , literal[string] ][ identifier[bool] ( identifier[console_mode_stdin] )], keyword[True] )
identifier[command] = identifier[OrderedDict] ([( literal[string] , identifier[command] )])
identifier[command] [ literal[string] ]= identifier[list] ( identifier[arguments] )
identifier[response] = identifier[self] . identifier[session] . identifier[command] ( identifier[resource] ,{ literal[string] : identifier[command] })
identifier[command_id] = identifier[response] [ literal[string] ][ literal[string] ]
identifier[logging] . identifier[info] ( literal[string] + identifier[command_id] )
keyword[return] identifier[command_id] | def run(self, command, arguments=(), console_mode_stdin=True, skip_cmd_shell=False):
"""This function does something.
:param command: The command to be executed
:type name: str.
:param arguments: A list of arguments to be passed to the command
:type state: str.
:returns: int -- the return code.
:raises: AttributeError, KeyError
iclegg: blocking i/o operations are slow, doesnt Python have a moden 'async' mechanism
rather than replying on 80's style callbacks?
"""
logging.info('running command: ' + command)
resource = ResourceLocator(CommandShell.ShellResource)
resource.add_selector('ShellId', self.__shell_id)
resource.add_option('WINRS_SKIP_CMD_SHELL', ['FALSE', 'TRUE'][bool(skip_cmd_shell)], True)
resource.add_option('WINRS_CONSOLEMODE_STDIN', ['FALSE', 'TRUE'][bool(console_mode_stdin)], True)
command = OrderedDict([('rsp:Command', command)])
command['rsp:Arguments'] = list(arguments)
response = self.session.command(resource, {'rsp:CommandLine': command})
command_id = response['rsp:CommandResponse']['rsp:CommandId']
logging.info('receive command: ' + command_id)
return command_id |
def project(self, term_doc_mat, x_dim=0, y_dim=1):
'''
Returns a projection of the categories
:param term_doc_mat: a TermDocMatrix
:return: CategoryProjection
'''
return self._project_category_corpus(self._get_category_metadata_corpus(term_doc_mat),
x_dim, y_dim) | def function[project, parameter[self, term_doc_mat, x_dim, y_dim]]:
constant[
Returns a projection of the categories
:param term_doc_mat: a TermDocMatrix
:return: CategoryProjection
]
return[call[name[self]._project_category_corpus, parameter[call[name[self]._get_category_metadata_corpus, parameter[name[term_doc_mat]]], name[x_dim], name[y_dim]]]] | keyword[def] identifier[project] ( identifier[self] , identifier[term_doc_mat] , identifier[x_dim] = literal[int] , identifier[y_dim] = literal[int] ):
literal[string]
keyword[return] identifier[self] . identifier[_project_category_corpus] ( identifier[self] . identifier[_get_category_metadata_corpus] ( identifier[term_doc_mat] ),
identifier[x_dim] , identifier[y_dim] ) | def project(self, term_doc_mat, x_dim=0, y_dim=1):
"""
Returns a projection of the categories
:param term_doc_mat: a TermDocMatrix
:return: CategoryProjection
"""
return self._project_category_corpus(self._get_category_metadata_corpus(term_doc_mat), x_dim, y_dim) |
def down(self, down_uid):
'''
Download the entity by UID.
'''
down_url = MPost.get_by_uid(down_uid).extinfo.get('tag__file_download', '')
print('=' * 40)
print(down_url)
str_down_url = str(down_url)[15:]
if down_url:
ment_id = MEntity.get_id_by_impath(str_down_url)
if ment_id:
MEntity2User.create_entity2user(ment_id, self.userinfo.uid)
return True
else:
return False | def function[down, parameter[self, down_uid]]:
constant[
Download the entity by UID.
]
variable[down_url] assign[=] call[call[name[MPost].get_by_uid, parameter[name[down_uid]]].extinfo.get, parameter[constant[tag__file_download], constant[]]]
call[name[print], parameter[binary_operation[constant[=] * constant[40]]]]
call[name[print], parameter[name[down_url]]]
variable[str_down_url] assign[=] call[call[name[str], parameter[name[down_url]]]][<ast.Slice object at 0x7da1b04fa230>]
if name[down_url] begin[:]
variable[ment_id] assign[=] call[name[MEntity].get_id_by_impath, parameter[name[str_down_url]]]
if name[ment_id] begin[:]
call[name[MEntity2User].create_entity2user, parameter[name[ment_id], name[self].userinfo.uid]]
return[constant[True]] | keyword[def] identifier[down] ( identifier[self] , identifier[down_uid] ):
literal[string]
identifier[down_url] = identifier[MPost] . identifier[get_by_uid] ( identifier[down_uid] ). identifier[extinfo] . identifier[get] ( literal[string] , literal[string] )
identifier[print] ( literal[string] * literal[int] )
identifier[print] ( identifier[down_url] )
identifier[str_down_url] = identifier[str] ( identifier[down_url] )[ literal[int] :]
keyword[if] identifier[down_url] :
identifier[ment_id] = identifier[MEntity] . identifier[get_id_by_impath] ( identifier[str_down_url] )
keyword[if] identifier[ment_id] :
identifier[MEntity2User] . identifier[create_entity2user] ( identifier[ment_id] , identifier[self] . identifier[userinfo] . identifier[uid] )
keyword[return] keyword[True]
keyword[else] :
keyword[return] keyword[False] | def down(self, down_uid):
"""
Download the entity by UID.
"""
down_url = MPost.get_by_uid(down_uid).extinfo.get('tag__file_download', '')
print('=' * 40)
print(down_url)
str_down_url = str(down_url)[15:]
if down_url:
ment_id = MEntity.get_id_by_impath(str_down_url)
if ment_id:
MEntity2User.create_entity2user(ment_id, self.userinfo.uid) # depends on [control=['if'], data=[]]
return True # depends on [control=['if'], data=[]]
else:
return False |
def get_lenet():
""" A lenet style net, takes difference of each frame as input.
"""
source = mx.sym.Variable("data")
source = (source - 128) * (1.0/128)
frames = mx.sym.SliceChannel(source, num_outputs=30)
diffs = [frames[i+1] - frames[i] for i in range(29)]
source = mx.sym.Concat(*diffs)
net = mx.sym.Convolution(source, kernel=(5, 5), num_filter=40)
net = mx.sym.BatchNorm(net, fix_gamma=True)
net = mx.sym.Activation(net, act_type="relu")
net = mx.sym.Pooling(net, pool_type="max", kernel=(2,2), stride=(2,2))
net = mx.sym.Convolution(net, kernel=(3, 3), num_filter=40)
net = mx.sym.BatchNorm(net, fix_gamma=True)
net = mx.sym.Activation(net, act_type="relu")
net = mx.sym.Pooling(net, pool_type="max", kernel=(2,2), stride=(2,2))
# first fullc
flatten = mx.symbol.Flatten(net)
flatten = mx.symbol.Dropout(flatten)
fc1 = mx.symbol.FullyConnected(data=flatten, num_hidden=600)
# Name the final layer as softmax so it auto matches the naming of data iterator
# Otherwise we can also change the provide_data in the data iter
return mx.symbol.LogisticRegressionOutput(data=fc1, name='softmax') | def function[get_lenet, parameter[]]:
constant[ A lenet style net, takes difference of each frame as input.
]
variable[source] assign[=] call[name[mx].sym.Variable, parameter[constant[data]]]
variable[source] assign[=] binary_operation[binary_operation[name[source] - constant[128]] * binary_operation[constant[1.0] / constant[128]]]
variable[frames] assign[=] call[name[mx].sym.SliceChannel, parameter[name[source]]]
variable[diffs] assign[=] <ast.ListComp object at 0x7da1b2008d60>
variable[source] assign[=] call[name[mx].sym.Concat, parameter[<ast.Starred object at 0x7da1b2008a60>]]
variable[net] assign[=] call[name[mx].sym.Convolution, parameter[name[source]]]
variable[net] assign[=] call[name[mx].sym.BatchNorm, parameter[name[net]]]
variable[net] assign[=] call[name[mx].sym.Activation, parameter[name[net]]]
variable[net] assign[=] call[name[mx].sym.Pooling, parameter[name[net]]]
variable[net] assign[=] call[name[mx].sym.Convolution, parameter[name[net]]]
variable[net] assign[=] call[name[mx].sym.BatchNorm, parameter[name[net]]]
variable[net] assign[=] call[name[mx].sym.Activation, parameter[name[net]]]
variable[net] assign[=] call[name[mx].sym.Pooling, parameter[name[net]]]
variable[flatten] assign[=] call[name[mx].symbol.Flatten, parameter[name[net]]]
variable[flatten] assign[=] call[name[mx].symbol.Dropout, parameter[name[flatten]]]
variable[fc1] assign[=] call[name[mx].symbol.FullyConnected, parameter[]]
return[call[name[mx].symbol.LogisticRegressionOutput, parameter[]]] | keyword[def] identifier[get_lenet] ():
literal[string]
identifier[source] = identifier[mx] . identifier[sym] . identifier[Variable] ( literal[string] )
identifier[source] =( identifier[source] - literal[int] )*( literal[int] / literal[int] )
identifier[frames] = identifier[mx] . identifier[sym] . identifier[SliceChannel] ( identifier[source] , identifier[num_outputs] = literal[int] )
identifier[diffs] =[ identifier[frames] [ identifier[i] + literal[int] ]- identifier[frames] [ identifier[i] ] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] )]
identifier[source] = identifier[mx] . identifier[sym] . identifier[Concat] (* identifier[diffs] )
identifier[net] = identifier[mx] . identifier[sym] . identifier[Convolution] ( identifier[source] , identifier[kernel] =( literal[int] , literal[int] ), identifier[num_filter] = literal[int] )
identifier[net] = identifier[mx] . identifier[sym] . identifier[BatchNorm] ( identifier[net] , identifier[fix_gamma] = keyword[True] )
identifier[net] = identifier[mx] . identifier[sym] . identifier[Activation] ( identifier[net] , identifier[act_type] = literal[string] )
identifier[net] = identifier[mx] . identifier[sym] . identifier[Pooling] ( identifier[net] , identifier[pool_type] = literal[string] , identifier[kernel] =( literal[int] , literal[int] ), identifier[stride] =( literal[int] , literal[int] ))
identifier[net] = identifier[mx] . identifier[sym] . identifier[Convolution] ( identifier[net] , identifier[kernel] =( literal[int] , literal[int] ), identifier[num_filter] = literal[int] )
identifier[net] = identifier[mx] . identifier[sym] . identifier[BatchNorm] ( identifier[net] , identifier[fix_gamma] = keyword[True] )
identifier[net] = identifier[mx] . identifier[sym] . identifier[Activation] ( identifier[net] , identifier[act_type] = literal[string] )
identifier[net] = identifier[mx] . identifier[sym] . identifier[Pooling] ( identifier[net] , identifier[pool_type] = literal[string] , identifier[kernel] =( literal[int] , literal[int] ), identifier[stride] =( literal[int] , literal[int] ))
identifier[flatten] = identifier[mx] . identifier[symbol] . identifier[Flatten] ( identifier[net] )
identifier[flatten] = identifier[mx] . identifier[symbol] . identifier[Dropout] ( identifier[flatten] )
identifier[fc1] = identifier[mx] . identifier[symbol] . identifier[FullyConnected] ( identifier[data] = identifier[flatten] , identifier[num_hidden] = literal[int] )
keyword[return] identifier[mx] . identifier[symbol] . identifier[LogisticRegressionOutput] ( identifier[data] = identifier[fc1] , identifier[name] = literal[string] ) | def get_lenet():
""" A lenet style net, takes difference of each frame as input.
"""
source = mx.sym.Variable('data')
source = (source - 128) * (1.0 / 128)
frames = mx.sym.SliceChannel(source, num_outputs=30)
diffs = [frames[i + 1] - frames[i] for i in range(29)]
source = mx.sym.Concat(*diffs)
net = mx.sym.Convolution(source, kernel=(5, 5), num_filter=40)
net = mx.sym.BatchNorm(net, fix_gamma=True)
net = mx.sym.Activation(net, act_type='relu')
net = mx.sym.Pooling(net, pool_type='max', kernel=(2, 2), stride=(2, 2))
net = mx.sym.Convolution(net, kernel=(3, 3), num_filter=40)
net = mx.sym.BatchNorm(net, fix_gamma=True)
net = mx.sym.Activation(net, act_type='relu')
net = mx.sym.Pooling(net, pool_type='max', kernel=(2, 2), stride=(2, 2))
# first fullc
flatten = mx.symbol.Flatten(net)
flatten = mx.symbol.Dropout(flatten)
fc1 = mx.symbol.FullyConnected(data=flatten, num_hidden=600)
# Name the final layer as softmax so it auto matches the naming of data iterator
# Otherwise we can also change the provide_data in the data iter
return mx.symbol.LogisticRegressionOutput(data=fc1, name='softmax') |
def create_new_version(
self,
name,
subject,
text='',
template_id=None,
html=None,
locale=None,
timeout=None
):
""" API call to create a new version of a template """
if(html):
payload = {
'name': name,
'subject': subject,
'html': html,
'text': text
}
else:
payload = {
'name': name,
'subject': subject,
'text': text
}
if locale:
url = self.TEMPLATES_SPECIFIC_LOCALE_VERSIONS_ENDPOINT % (
template_id,
locale
)
else:
url = self.TEMPLATES_NEW_VERSION_ENDPOINT % template_id
return self._api_request(
url,
self.HTTP_POST,
payload=payload,
timeout=timeout
) | def function[create_new_version, parameter[self, name, subject, text, template_id, html, locale, timeout]]:
constant[ API call to create a new version of a template ]
if name[html] begin[:]
variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da1aff1e7d0>, <ast.Constant object at 0x7da1aff1d870>, <ast.Constant object at 0x7da1aff1da50>, <ast.Constant object at 0x7da1aff1e470>], [<ast.Name object at 0x7da1aff1f4f0>, <ast.Name object at 0x7da1aff1e230>, <ast.Name object at 0x7da1aff1cd30>, <ast.Name object at 0x7da1aff1ece0>]]
if name[locale] begin[:]
variable[url] assign[=] binary_operation[name[self].TEMPLATES_SPECIFIC_LOCALE_VERSIONS_ENDPOINT <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1aff1c8b0>, <ast.Name object at 0x7da1aff1c5b0>]]]
return[call[name[self]._api_request, parameter[name[url], name[self].HTTP_POST]]] | keyword[def] identifier[create_new_version] (
identifier[self] ,
identifier[name] ,
identifier[subject] ,
identifier[text] = literal[string] ,
identifier[template_id] = keyword[None] ,
identifier[html] = keyword[None] ,
identifier[locale] = keyword[None] ,
identifier[timeout] = keyword[None]
):
literal[string]
keyword[if] ( identifier[html] ):
identifier[payload] ={
literal[string] : identifier[name] ,
literal[string] : identifier[subject] ,
literal[string] : identifier[html] ,
literal[string] : identifier[text]
}
keyword[else] :
identifier[payload] ={
literal[string] : identifier[name] ,
literal[string] : identifier[subject] ,
literal[string] : identifier[text]
}
keyword[if] identifier[locale] :
identifier[url] = identifier[self] . identifier[TEMPLATES_SPECIFIC_LOCALE_VERSIONS_ENDPOINT] %(
identifier[template_id] ,
identifier[locale]
)
keyword[else] :
identifier[url] = identifier[self] . identifier[TEMPLATES_NEW_VERSION_ENDPOINT] % identifier[template_id]
keyword[return] identifier[self] . identifier[_api_request] (
identifier[url] ,
identifier[self] . identifier[HTTP_POST] ,
identifier[payload] = identifier[payload] ,
identifier[timeout] = identifier[timeout]
) | def create_new_version(self, name, subject, text='', template_id=None, html=None, locale=None, timeout=None):
""" API call to create a new version of a template """
if html:
payload = {'name': name, 'subject': subject, 'html': html, 'text': text} # depends on [control=['if'], data=[]]
else:
payload = {'name': name, 'subject': subject, 'text': text}
if locale:
url = self.TEMPLATES_SPECIFIC_LOCALE_VERSIONS_ENDPOINT % (template_id, locale) # depends on [control=['if'], data=[]]
else:
url = self.TEMPLATES_NEW_VERSION_ENDPOINT % template_id
return self._api_request(url, self.HTTP_POST, payload=payload, timeout=timeout) |
def _read_serial(self, may_block):
""" Read the serial number from a YubiKey > 2.2. """
frame = yubikey_frame.YubiKeyFrame(command = SLOT.DEVICE_SERIAL)
self._device._write(frame)
response = self._device._read_response(may_block=may_block)
if not yubico_util.validate_crc16(response[:6]):
raise YubiKeyUSBHIDError("Read from device failed CRC check")
# the serial number is big-endian, although everything else is little-endian
serial = struct.unpack('>lxxx', response)
return serial[0] | def function[_read_serial, parameter[self, may_block]]:
constant[ Read the serial number from a YubiKey > 2.2. ]
variable[frame] assign[=] call[name[yubikey_frame].YubiKeyFrame, parameter[]]
call[name[self]._device._write, parameter[name[frame]]]
variable[response] assign[=] call[name[self]._device._read_response, parameter[]]
if <ast.UnaryOp object at 0x7da1b08890c0> begin[:]
<ast.Raise object at 0x7da1b088a2f0>
variable[serial] assign[=] call[name[struct].unpack, parameter[constant[>lxxx], name[response]]]
return[call[name[serial]][constant[0]]] | keyword[def] identifier[_read_serial] ( identifier[self] , identifier[may_block] ):
literal[string]
identifier[frame] = identifier[yubikey_frame] . identifier[YubiKeyFrame] ( identifier[command] = identifier[SLOT] . identifier[DEVICE_SERIAL] )
identifier[self] . identifier[_device] . identifier[_write] ( identifier[frame] )
identifier[response] = identifier[self] . identifier[_device] . identifier[_read_response] ( identifier[may_block] = identifier[may_block] )
keyword[if] keyword[not] identifier[yubico_util] . identifier[validate_crc16] ( identifier[response] [: literal[int] ]):
keyword[raise] identifier[YubiKeyUSBHIDError] ( literal[string] )
identifier[serial] = identifier[struct] . identifier[unpack] ( literal[string] , identifier[response] )
keyword[return] identifier[serial] [ literal[int] ] | def _read_serial(self, may_block):
""" Read the serial number from a YubiKey > 2.2. """
frame = yubikey_frame.YubiKeyFrame(command=SLOT.DEVICE_SERIAL)
self._device._write(frame)
response = self._device._read_response(may_block=may_block)
if not yubico_util.validate_crc16(response[:6]):
raise YubiKeyUSBHIDError('Read from device failed CRC check') # depends on [control=['if'], data=[]]
# the serial number is big-endian, although everything else is little-endian
serial = struct.unpack('>lxxx', response)
return serial[0] |
def get_bestfit_line(self, x_min=None, x_max=None, resolution=None):
"""
Method to get bestfit line using the defined
self.bestfit_func method
args:
x_min: scalar, default=min(x)
minimum x value of the line
x_max: scalar, default=max(x)
maximum x value of the line
resolution: int, default=1000
how many steps between x_min and x_max
returns:
[bestfit_x, bestfit_y]
"""
x = self.args["x"]
if x_min is None:
x_min = min(x)
if x_max is None:
x_max = max(x)
if resolution is None:
resolution = self.args.get("resolution", 1000)
bestfit_x = np.linspace(x_min, x_max, resolution)
return [bestfit_x, self.bestfit_func(bestfit_x)] | def function[get_bestfit_line, parameter[self, x_min, x_max, resolution]]:
constant[
Method to get bestfit line using the defined
self.bestfit_func method
args:
x_min: scalar, default=min(x)
minimum x value of the line
x_max: scalar, default=max(x)
maximum x value of the line
resolution: int, default=1000
how many steps between x_min and x_max
returns:
[bestfit_x, bestfit_y]
]
variable[x] assign[=] call[name[self].args][constant[x]]
if compare[name[x_min] is constant[None]] begin[:]
variable[x_min] assign[=] call[name[min], parameter[name[x]]]
if compare[name[x_max] is constant[None]] begin[:]
variable[x_max] assign[=] call[name[max], parameter[name[x]]]
if compare[name[resolution] is constant[None]] begin[:]
variable[resolution] assign[=] call[name[self].args.get, parameter[constant[resolution], constant[1000]]]
variable[bestfit_x] assign[=] call[name[np].linspace, parameter[name[x_min], name[x_max], name[resolution]]]
return[list[[<ast.Name object at 0x7da1b1353580>, <ast.Call object at 0x7da1b1352230>]]] | keyword[def] identifier[get_bestfit_line] ( identifier[self] , identifier[x_min] = keyword[None] , identifier[x_max] = keyword[None] , identifier[resolution] = keyword[None] ):
literal[string]
identifier[x] = identifier[self] . identifier[args] [ literal[string] ]
keyword[if] identifier[x_min] keyword[is] keyword[None] :
identifier[x_min] = identifier[min] ( identifier[x] )
keyword[if] identifier[x_max] keyword[is] keyword[None] :
identifier[x_max] = identifier[max] ( identifier[x] )
keyword[if] identifier[resolution] keyword[is] keyword[None] :
identifier[resolution] = identifier[self] . identifier[args] . identifier[get] ( literal[string] , literal[int] )
identifier[bestfit_x] = identifier[np] . identifier[linspace] ( identifier[x_min] , identifier[x_max] , identifier[resolution] )
keyword[return] [ identifier[bestfit_x] , identifier[self] . identifier[bestfit_func] ( identifier[bestfit_x] )] | def get_bestfit_line(self, x_min=None, x_max=None, resolution=None):
"""
Method to get bestfit line using the defined
self.bestfit_func method
args:
x_min: scalar, default=min(x)
minimum x value of the line
x_max: scalar, default=max(x)
maximum x value of the line
resolution: int, default=1000
how many steps between x_min and x_max
returns:
[bestfit_x, bestfit_y]
"""
x = self.args['x']
if x_min is None:
x_min = min(x) # depends on [control=['if'], data=['x_min']]
if x_max is None:
x_max = max(x) # depends on [control=['if'], data=['x_max']]
if resolution is None:
resolution = self.args.get('resolution', 1000) # depends on [control=['if'], data=['resolution']]
bestfit_x = np.linspace(x_min, x_max, resolution)
return [bestfit_x, self.bestfit_func(bestfit_x)] |
def list_common_lookups(kwargs=None, call=None):
'''
List common lookups for a particular type of item
.. versionadded:: 2015.8.0
'''
if kwargs is None:
kwargs = {}
args = {}
if 'lookup' in kwargs:
args['lookup'] = kwargs['lookup']
response = _query('common', 'lookup/list', args=args)
return response | def function[list_common_lookups, parameter[kwargs, call]]:
constant[
List common lookups for a particular type of item
.. versionadded:: 2015.8.0
]
if compare[name[kwargs] is constant[None]] begin[:]
variable[kwargs] assign[=] dictionary[[], []]
variable[args] assign[=] dictionary[[], []]
if compare[constant[lookup] in name[kwargs]] begin[:]
call[name[args]][constant[lookup]] assign[=] call[name[kwargs]][constant[lookup]]
variable[response] assign[=] call[name[_query], parameter[constant[common], constant[lookup/list]]]
return[name[response]] | keyword[def] identifier[list_common_lookups] ( identifier[kwargs] = keyword[None] , identifier[call] = keyword[None] ):
literal[string]
keyword[if] identifier[kwargs] keyword[is] keyword[None] :
identifier[kwargs] ={}
identifier[args] ={}
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[args] [ literal[string] ]= identifier[kwargs] [ literal[string] ]
identifier[response] = identifier[_query] ( literal[string] , literal[string] , identifier[args] = identifier[args] )
keyword[return] identifier[response] | def list_common_lookups(kwargs=None, call=None):
"""
List common lookups for a particular type of item
.. versionadded:: 2015.8.0
"""
if kwargs is None:
kwargs = {} # depends on [control=['if'], data=['kwargs']]
args = {}
if 'lookup' in kwargs:
args['lookup'] = kwargs['lookup'] # depends on [control=['if'], data=['kwargs']]
response = _query('common', 'lookup/list', args=args)
return response |
def setRoles(self, *args, **kwargs):
"""Adds the role assigned to this user to a 'role' field.
Depends on the 'role' field that comes with a fullDetails=True
build of the MambuUser.
Returns the number of requests done to Mambu.
"""
try:
role = self.mamburoleclass(entid=self['role']['encodedKey'], *args, **kwargs)
except KeyError:
return 0
except AttributeError as ae:
from .mamburoles import MambuRole
self.mamburoleclass = MambuRole
try:
role = self.mamburoleclass(entid=self['role']['encodedKey'], *args, **kwargs)
except KeyError:
return 0
self['role']['role'] = role
return 1 | def function[setRoles, parameter[self]]:
constant[Adds the role assigned to this user to a 'role' field.
Depends on the 'role' field that comes with a fullDetails=True
build of the MambuUser.
Returns the number of requests done to Mambu.
]
<ast.Try object at 0x7da20c6a91e0>
call[call[name[self]][constant[role]]][constant[role]] assign[=] name[role]
return[constant[1]] | keyword[def] identifier[setRoles] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[try] :
identifier[role] = identifier[self] . identifier[mamburoleclass] ( identifier[entid] = identifier[self] [ literal[string] ][ literal[string] ],* identifier[args] ,** identifier[kwargs] )
keyword[except] identifier[KeyError] :
keyword[return] literal[int]
keyword[except] identifier[AttributeError] keyword[as] identifier[ae] :
keyword[from] . identifier[mamburoles] keyword[import] identifier[MambuRole]
identifier[self] . identifier[mamburoleclass] = identifier[MambuRole]
keyword[try] :
identifier[role] = identifier[self] . identifier[mamburoleclass] ( identifier[entid] = identifier[self] [ literal[string] ][ literal[string] ],* identifier[args] ,** identifier[kwargs] )
keyword[except] identifier[KeyError] :
keyword[return] literal[int]
identifier[self] [ literal[string] ][ literal[string] ]= identifier[role]
keyword[return] literal[int] | def setRoles(self, *args, **kwargs):
"""Adds the role assigned to this user to a 'role' field.
Depends on the 'role' field that comes with a fullDetails=True
build of the MambuUser.
Returns the number of requests done to Mambu.
"""
try:
role = self.mamburoleclass(*args, entid=self['role']['encodedKey'], **kwargs) # depends on [control=['try'], data=[]]
except KeyError:
return 0 # depends on [control=['except'], data=[]]
except AttributeError as ae:
from .mamburoles import MambuRole
self.mamburoleclass = MambuRole
try:
role = self.mamburoleclass(*args, entid=self['role']['encodedKey'], **kwargs) # depends on [control=['try'], data=[]]
except KeyError:
return 0 # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]]
self['role']['role'] = role
return 1 |
def path(self, key):
'''Returns the `path` for given `key`'''
return os.path.join(self.root_path, self.relative_path(key)) | def function[path, parameter[self, key]]:
constant[Returns the `path` for given `key`]
return[call[name[os].path.join, parameter[name[self].root_path, call[name[self].relative_path, parameter[name[key]]]]]] | keyword[def] identifier[path] ( identifier[self] , identifier[key] ):
literal[string]
keyword[return] identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[root_path] , identifier[self] . identifier[relative_path] ( identifier[key] )) | def path(self, key):
"""Returns the `path` for given `key`"""
return os.path.join(self.root_path, self.relative_path(key)) |
def get_pyxb(self):
"""Generate a DataONE Exception PyXB object.
The PyXB object supports directly reading and writing the individual values that
may be included in a DataONE Exception.
"""
dataone_exception_pyxb = dataoneErrors.error()
dataone_exception_pyxb.name = self.__class__.__name__
dataone_exception_pyxb.errorCode = self.errorCode
dataone_exception_pyxb.detailCode = self.detailCode
if self.description is not None:
dataone_exception_pyxb.description = self.description
dataone_exception_pyxb.traceInformation = self.traceInformation
if self.identifier is not None:
dataone_exception_pyxb.identifier = self.identifier
if self.nodeId is not None:
dataone_exception_pyxb.nodeId = self.nodeId
return dataone_exception_pyxb | def function[get_pyxb, parameter[self]]:
constant[Generate a DataONE Exception PyXB object.
The PyXB object supports directly reading and writing the individual values that
may be included in a DataONE Exception.
]
variable[dataone_exception_pyxb] assign[=] call[name[dataoneErrors].error, parameter[]]
name[dataone_exception_pyxb].name assign[=] name[self].__class__.__name__
name[dataone_exception_pyxb].errorCode assign[=] name[self].errorCode
name[dataone_exception_pyxb].detailCode assign[=] name[self].detailCode
if compare[name[self].description is_not constant[None]] begin[:]
name[dataone_exception_pyxb].description assign[=] name[self].description
name[dataone_exception_pyxb].traceInformation assign[=] name[self].traceInformation
if compare[name[self].identifier is_not constant[None]] begin[:]
name[dataone_exception_pyxb].identifier assign[=] name[self].identifier
if compare[name[self].nodeId is_not constant[None]] begin[:]
name[dataone_exception_pyxb].nodeId assign[=] name[self].nodeId
return[name[dataone_exception_pyxb]] | keyword[def] identifier[get_pyxb] ( identifier[self] ):
literal[string]
identifier[dataone_exception_pyxb] = identifier[dataoneErrors] . identifier[error] ()
identifier[dataone_exception_pyxb] . identifier[name] = identifier[self] . identifier[__class__] . identifier[__name__]
identifier[dataone_exception_pyxb] . identifier[errorCode] = identifier[self] . identifier[errorCode]
identifier[dataone_exception_pyxb] . identifier[detailCode] = identifier[self] . identifier[detailCode]
keyword[if] identifier[self] . identifier[description] keyword[is] keyword[not] keyword[None] :
identifier[dataone_exception_pyxb] . identifier[description] = identifier[self] . identifier[description]
identifier[dataone_exception_pyxb] . identifier[traceInformation] = identifier[self] . identifier[traceInformation]
keyword[if] identifier[self] . identifier[identifier] keyword[is] keyword[not] keyword[None] :
identifier[dataone_exception_pyxb] . identifier[identifier] = identifier[self] . identifier[identifier]
keyword[if] identifier[self] . identifier[nodeId] keyword[is] keyword[not] keyword[None] :
identifier[dataone_exception_pyxb] . identifier[nodeId] = identifier[self] . identifier[nodeId]
keyword[return] identifier[dataone_exception_pyxb] | def get_pyxb(self):
"""Generate a DataONE Exception PyXB object.
The PyXB object supports directly reading and writing the individual values that
may be included in a DataONE Exception.
"""
dataone_exception_pyxb = dataoneErrors.error()
dataone_exception_pyxb.name = self.__class__.__name__
dataone_exception_pyxb.errorCode = self.errorCode
dataone_exception_pyxb.detailCode = self.detailCode
if self.description is not None:
dataone_exception_pyxb.description = self.description # depends on [control=['if'], data=[]]
dataone_exception_pyxb.traceInformation = self.traceInformation
if self.identifier is not None:
dataone_exception_pyxb.identifier = self.identifier # depends on [control=['if'], data=[]]
if self.nodeId is not None:
dataone_exception_pyxb.nodeId = self.nodeId # depends on [control=['if'], data=[]]
return dataone_exception_pyxb |
def weight(weights):
"""
RETURN RANDOM INDEX INTO WEIGHT ARRAY, GIVEN WEIGHTS
"""
total = sum(weights)
p = SEED.random()
acc = 0
for i, w in enumerate(weights):
acc += w / total
if p < acc:
return i
return len(weights) - 1 | def function[weight, parameter[weights]]:
constant[
RETURN RANDOM INDEX INTO WEIGHT ARRAY, GIVEN WEIGHTS
]
variable[total] assign[=] call[name[sum], parameter[name[weights]]]
variable[p] assign[=] call[name[SEED].random, parameter[]]
variable[acc] assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da1b0b6e2f0>, <ast.Name object at 0x7da1b0b6d360>]]] in starred[call[name[enumerate], parameter[name[weights]]]] begin[:]
<ast.AugAssign object at 0x7da18f7207f0>
if compare[name[p] less[<] name[acc]] begin[:]
return[name[i]]
return[binary_operation[call[name[len], parameter[name[weights]]] - constant[1]]] | keyword[def] identifier[weight] ( identifier[weights] ):
literal[string]
identifier[total] = identifier[sum] ( identifier[weights] )
identifier[p] = identifier[SEED] . identifier[random] ()
identifier[acc] = literal[int]
keyword[for] identifier[i] , identifier[w] keyword[in] identifier[enumerate] ( identifier[weights] ):
identifier[acc] += identifier[w] / identifier[total]
keyword[if] identifier[p] < identifier[acc] :
keyword[return] identifier[i]
keyword[return] identifier[len] ( identifier[weights] )- literal[int] | def weight(weights):
"""
RETURN RANDOM INDEX INTO WEIGHT ARRAY, GIVEN WEIGHTS
"""
total = sum(weights)
p = SEED.random()
acc = 0
for (i, w) in enumerate(weights):
acc += w / total
if p < acc:
return i # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return len(weights) - 1 |
def cmd_tuneopt(self, args):
'''Select option for Tune Pot on Channel 6 (quadcopter only)'''
usage = "usage: tuneopt <set|show|reset|list>"
if self.mpstate.vehicle_type != 'copter':
print("This command is only available for copter")
return
if len(args) < 1:
print(usage)
return
if args[0].lower() == 'reset':
self.param_set('TUNE', '0')
elif args[0].lower() == 'set':
if len(args) < 4:
print('Usage: tuneopt set OPTION LOW HIGH')
return
option = self.tune_option_validate(args[1])
if not option:
print('Invalid Tune option: ' + args[1])
return
low = args[2]
high = args[3]
self.param_set('TUNE', tune_options[option])
self.param_set('TUNE_LOW', float(low) * 1000)
self.param_set('TUNE_HIGH', float(high) * 1000)
elif args[0].lower() == 'show':
self.tune_show()
elif args[0].lower() == 'list':
print("Options available:")
for s in sorted(tune_options.keys()):
print(' ' + s)
else:
print(usage) | def function[cmd_tuneopt, parameter[self, args]]:
constant[Select option for Tune Pot on Channel 6 (quadcopter only)]
variable[usage] assign[=] constant[usage: tuneopt <set|show|reset|list>]
if compare[name[self].mpstate.vehicle_type not_equal[!=] constant[copter]] begin[:]
call[name[print], parameter[constant[This command is only available for copter]]]
return[None]
if compare[call[name[len], parameter[name[args]]] less[<] constant[1]] begin[:]
call[name[print], parameter[name[usage]]]
return[None]
if compare[call[call[name[args]][constant[0]].lower, parameter[]] equal[==] constant[reset]] begin[:]
call[name[self].param_set, parameter[constant[TUNE], constant[0]]] | keyword[def] identifier[cmd_tuneopt] ( identifier[self] , identifier[args] ):
literal[string]
identifier[usage] = literal[string]
keyword[if] identifier[self] . identifier[mpstate] . identifier[vehicle_type] != literal[string] :
identifier[print] ( literal[string] )
keyword[return]
keyword[if] identifier[len] ( identifier[args] )< literal[int] :
identifier[print] ( identifier[usage] )
keyword[return]
keyword[if] identifier[args] [ literal[int] ]. identifier[lower] ()== literal[string] :
identifier[self] . identifier[param_set] ( literal[string] , literal[string] )
keyword[elif] identifier[args] [ literal[int] ]. identifier[lower] ()== literal[string] :
keyword[if] identifier[len] ( identifier[args] )< literal[int] :
identifier[print] ( literal[string] )
keyword[return]
identifier[option] = identifier[self] . identifier[tune_option_validate] ( identifier[args] [ literal[int] ])
keyword[if] keyword[not] identifier[option] :
identifier[print] ( literal[string] + identifier[args] [ literal[int] ])
keyword[return]
identifier[low] = identifier[args] [ literal[int] ]
identifier[high] = identifier[args] [ literal[int] ]
identifier[self] . identifier[param_set] ( literal[string] , identifier[tune_options] [ identifier[option] ])
identifier[self] . identifier[param_set] ( literal[string] , identifier[float] ( identifier[low] )* literal[int] )
identifier[self] . identifier[param_set] ( literal[string] , identifier[float] ( identifier[high] )* literal[int] )
keyword[elif] identifier[args] [ literal[int] ]. identifier[lower] ()== literal[string] :
identifier[self] . identifier[tune_show] ()
keyword[elif] identifier[args] [ literal[int] ]. identifier[lower] ()== literal[string] :
identifier[print] ( literal[string] )
keyword[for] identifier[s] keyword[in] identifier[sorted] ( identifier[tune_options] . identifier[keys] ()):
identifier[print] ( literal[string] + identifier[s] )
keyword[else] :
identifier[print] ( identifier[usage] ) | def cmd_tuneopt(self, args):
"""Select option for Tune Pot on Channel 6 (quadcopter only)"""
usage = 'usage: tuneopt <set|show|reset|list>'
if self.mpstate.vehicle_type != 'copter':
print('This command is only available for copter')
return # depends on [control=['if'], data=[]]
if len(args) < 1:
print(usage)
return # depends on [control=['if'], data=[]]
if args[0].lower() == 'reset':
self.param_set('TUNE', '0') # depends on [control=['if'], data=[]]
elif args[0].lower() == 'set':
if len(args) < 4:
print('Usage: tuneopt set OPTION LOW HIGH')
return # depends on [control=['if'], data=[]]
option = self.tune_option_validate(args[1])
if not option:
print('Invalid Tune option: ' + args[1])
return # depends on [control=['if'], data=[]]
low = args[2]
high = args[3]
self.param_set('TUNE', tune_options[option])
self.param_set('TUNE_LOW', float(low) * 1000)
self.param_set('TUNE_HIGH', float(high) * 1000) # depends on [control=['if'], data=[]]
elif args[0].lower() == 'show':
self.tune_show() # depends on [control=['if'], data=[]]
elif args[0].lower() == 'list':
print('Options available:')
for s in sorted(tune_options.keys()):
print(' ' + s) # depends on [control=['for'], data=['s']] # depends on [control=['if'], data=[]]
else:
print(usage) |
def reorder_resource_views(self, resource_views):
# type: (List[Union[ResourceView,Dict,str]]) -> None
"""Order resource views in resource.
Args:
resource_views (List[Union[ResourceView,Dict,str]]): A list of either resource view ids or resource views metadata from ResourceView objects or dictionaries
Returns:
None
"""
if not isinstance(resource_views, list):
raise HDXError('ResourceViews should be a list!')
ids = list()
for resource_view in resource_views:
if isinstance(resource_view, str):
resource_view_id = resource_view
else:
resource_view_id = resource_view['id']
if is_valid_uuid(resource_view_id) is False:
raise HDXError('%s is not a valid resource view id!' % resource_view)
ids.append(resource_view_id)
_, result = self._read_from_hdx('resource view', self.data['id'], 'id',
ResourceView.actions()['reorder'], order=ids) | def function[reorder_resource_views, parameter[self, resource_views]]:
constant[Order resource views in resource.
Args:
resource_views (List[Union[ResourceView,Dict,str]]): A list of either resource view ids or resource views metadata from ResourceView objects or dictionaries
Returns:
None
]
if <ast.UnaryOp object at 0x7da20e954760> begin[:]
<ast.Raise object at 0x7da20e956d70>
variable[ids] assign[=] call[name[list], parameter[]]
for taget[name[resource_view]] in starred[name[resource_views]] begin[:]
if call[name[isinstance], parameter[name[resource_view], name[str]]] begin[:]
variable[resource_view_id] assign[=] name[resource_view]
if compare[call[name[is_valid_uuid], parameter[name[resource_view_id]]] is constant[False]] begin[:]
<ast.Raise object at 0x7da20e956ad0>
call[name[ids].append, parameter[name[resource_view_id]]]
<ast.Tuple object at 0x7da20e957220> assign[=] call[name[self]._read_from_hdx, parameter[constant[resource view], call[name[self].data][constant[id]], constant[id], call[call[name[ResourceView].actions, parameter[]]][constant[reorder]]]] | keyword[def] identifier[reorder_resource_views] ( identifier[self] , identifier[resource_views] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[resource_views] , identifier[list] ):
keyword[raise] identifier[HDXError] ( literal[string] )
identifier[ids] = identifier[list] ()
keyword[for] identifier[resource_view] keyword[in] identifier[resource_views] :
keyword[if] identifier[isinstance] ( identifier[resource_view] , identifier[str] ):
identifier[resource_view_id] = identifier[resource_view]
keyword[else] :
identifier[resource_view_id] = identifier[resource_view] [ literal[string] ]
keyword[if] identifier[is_valid_uuid] ( identifier[resource_view_id] ) keyword[is] keyword[False] :
keyword[raise] identifier[HDXError] ( literal[string] % identifier[resource_view] )
identifier[ids] . identifier[append] ( identifier[resource_view_id] )
identifier[_] , identifier[result] = identifier[self] . identifier[_read_from_hdx] ( literal[string] , identifier[self] . identifier[data] [ literal[string] ], literal[string] ,
identifier[ResourceView] . identifier[actions] ()[ literal[string] ], identifier[order] = identifier[ids] ) | def reorder_resource_views(self, resource_views):
# type: (List[Union[ResourceView,Dict,str]]) -> None
'Order resource views in resource.\n\n Args:\n resource_views (List[Union[ResourceView,Dict,str]]): A list of either resource view ids or resource views metadata from ResourceView objects or dictionaries\n\n Returns:\n None\n '
if not isinstance(resource_views, list):
raise HDXError('ResourceViews should be a list!') # depends on [control=['if'], data=[]]
ids = list()
for resource_view in resource_views:
if isinstance(resource_view, str):
resource_view_id = resource_view # depends on [control=['if'], data=[]]
else:
resource_view_id = resource_view['id']
if is_valid_uuid(resource_view_id) is False:
raise HDXError('%s is not a valid resource view id!' % resource_view) # depends on [control=['if'], data=[]]
ids.append(resource_view_id) # depends on [control=['for'], data=['resource_view']]
(_, result) = self._read_from_hdx('resource view', self.data['id'], 'id', ResourceView.actions()['reorder'], order=ids) |
def get_signature_request_file(self, signature_request_id, path_or_file=None, file_type=None, filename=None):
''' Download the PDF copy of the current documents
Args:
signature_request_id (str): Id of the signature request
path_or_file (str or file): A writable File-like object or a full path to save the PDF file to.
filename (str): [DEPRECATED] Filename to save the PDF file to. This should be a full path.
file_type (str): Type of file to return. Either "pdf" for a single merged document or "zip" for a collection of individual documents. Defaults to "pdf" if not specified.
Returns:
True if file is downloaded and successfully written, False otherwise.
'''
request = self._get_request()
url = self.SIGNATURE_REQUEST_DOWNLOAD_PDF_URL + signature_request_id
if file_type:
url += '?file_type=%s' % file_type
return request.get_file(url, path_or_file or filename) | def function[get_signature_request_file, parameter[self, signature_request_id, path_or_file, file_type, filename]]:
constant[ Download the PDF copy of the current documents
Args:
signature_request_id (str): Id of the signature request
path_or_file (str or file): A writable File-like object or a full path to save the PDF file to.
filename (str): [DEPRECATED] Filename to save the PDF file to. This should be a full path.
file_type (str): Type of file to return. Either "pdf" for a single merged document or "zip" for a collection of individual documents. Defaults to "pdf" if not specified.
Returns:
True if file is downloaded and successfully written, False otherwise.
]
variable[request] assign[=] call[name[self]._get_request, parameter[]]
variable[url] assign[=] binary_operation[name[self].SIGNATURE_REQUEST_DOWNLOAD_PDF_URL + name[signature_request_id]]
if name[file_type] begin[:]
<ast.AugAssign object at 0x7da1b0c16f50>
return[call[name[request].get_file, parameter[name[url], <ast.BoolOp object at 0x7da1b0c14be0>]]] | keyword[def] identifier[get_signature_request_file] ( identifier[self] , identifier[signature_request_id] , identifier[path_or_file] = keyword[None] , identifier[file_type] = keyword[None] , identifier[filename] = keyword[None] ):
literal[string]
identifier[request] = identifier[self] . identifier[_get_request] ()
identifier[url] = identifier[self] . identifier[SIGNATURE_REQUEST_DOWNLOAD_PDF_URL] + identifier[signature_request_id]
keyword[if] identifier[file_type] :
identifier[url] += literal[string] % identifier[file_type]
keyword[return] identifier[request] . identifier[get_file] ( identifier[url] , identifier[path_or_file] keyword[or] identifier[filename] ) | def get_signature_request_file(self, signature_request_id, path_or_file=None, file_type=None, filename=None):
""" Download the PDF copy of the current documents
Args:
signature_request_id (str): Id of the signature request
path_or_file (str or file): A writable File-like object or a full path to save the PDF file to.
filename (str): [DEPRECATED] Filename to save the PDF file to. This should be a full path.
file_type (str): Type of file to return. Either "pdf" for a single merged document or "zip" for a collection of individual documents. Defaults to "pdf" if not specified.
Returns:
True if file is downloaded and successfully written, False otherwise.
"""
request = self._get_request()
url = self.SIGNATURE_REQUEST_DOWNLOAD_PDF_URL + signature_request_id
if file_type:
url += '?file_type=%s' % file_type # depends on [control=['if'], data=[]]
return request.get_file(url, path_or_file or filename) |
def _dataset_report_helper(cls, dataset, catalog_homepage=None):
"""Toma un dict con la metadata de un dataset, y devuelve un dict coni
los valores que dataset_report() usa para reportar sobre él.
Args:
dataset (dict): Diccionario con la metadata de un dataset.
Returns:
dict: Diccionario con los campos a nivel dataset que requiere
dataset_report().
"""
publisher_name = helpers.traverse_dict(dataset, ["publisher", "name"])
languages = cls._stringify_list(dataset.get("language"))
super_themes = cls._stringify_list(dataset.get("superTheme"))
themes = cls._stringify_list(dataset.get("theme"))
def _stringify_distribution(distribution):
title = distribution.get("title")
url = distribution.get("downloadURL")
return "\"{}\": {}".format(title, url)
distributions = [d for d in dataset["distribution"]
if isinstance(d, dict)]
# crea lista de distribuciones
distributions_list = None
if isinstance(distributions, list):
distributions_strings = [
_stringify_distribution(d) for d in distributions
]
distributions_list = "\n\n".join(distributions_strings)
# crea lista de formatos
distributions_formats = json.dumps(
helpers.count_distribution_formats_dataset(dataset))
fields = OrderedDict()
fields["dataset_identifier"] = dataset.get("identifier")
fields["dataset_title"] = dataset.get("title")
fields["dataset_accrualPeriodicity"] = dataset.get(
"accrualPeriodicity")
fields["dataset_description"] = dataset.get("description")
fields["dataset_publisher_name"] = publisher_name
fields["dataset_superTheme"] = super_themes
fields["dataset_theme"] = themes
fields["dataset_landingPage"] = dataset.get("landingPage")
fields["dataset_landingPage_generated"] = cls._generate_landingPage(
catalog_homepage, dataset.get("identifier")
)
fields["dataset_issued"] = dataset.get("issued")
fields["dataset_modified"] = dataset.get("modified")
fields["distributions_formats"] = distributions_formats
fields["distributions_list"] = distributions_list
fields["dataset_license"] = dataset.get("license")
fields["dataset_language"] = languages
fields["dataset_spatial"] = dataset.get("spatial")
fields["dataset_temporal"] = dataset.get("temporal")
return fields | def function[_dataset_report_helper, parameter[cls, dataset, catalog_homepage]]:
constant[Toma un dict con la metadata de un dataset, y devuelve un dict coni
los valores que dataset_report() usa para reportar sobre él.
Args:
dataset (dict): Diccionario con la metadata de un dataset.
Returns:
dict: Diccionario con los campos a nivel dataset que requiere
dataset_report().
]
variable[publisher_name] assign[=] call[name[helpers].traverse_dict, parameter[name[dataset], list[[<ast.Constant object at 0x7da1b04d6890>, <ast.Constant object at 0x7da1b04d7850>]]]]
variable[languages] assign[=] call[name[cls]._stringify_list, parameter[call[name[dataset].get, parameter[constant[language]]]]]
variable[super_themes] assign[=] call[name[cls]._stringify_list, parameter[call[name[dataset].get, parameter[constant[superTheme]]]]]
variable[themes] assign[=] call[name[cls]._stringify_list, parameter[call[name[dataset].get, parameter[constant[theme]]]]]
def function[_stringify_distribution, parameter[distribution]]:
variable[title] assign[=] call[name[distribution].get, parameter[constant[title]]]
variable[url] assign[=] call[name[distribution].get, parameter[constant[downloadURL]]]
return[call[constant["{}": {}].format, parameter[name[title], name[url]]]]
variable[distributions] assign[=] <ast.ListComp object at 0x7da1b04d69b0>
variable[distributions_list] assign[=] constant[None]
if call[name[isinstance], parameter[name[distributions], name[list]]] begin[:]
variable[distributions_strings] assign[=] <ast.ListComp object at 0x7da1b04d6fe0>
variable[distributions_list] assign[=] call[constant[
].join, parameter[name[distributions_strings]]]
variable[distributions_formats] assign[=] call[name[json].dumps, parameter[call[name[helpers].count_distribution_formats_dataset, parameter[name[dataset]]]]]
variable[fields] assign[=] call[name[OrderedDict], parameter[]]
call[name[fields]][constant[dataset_identifier]] assign[=] call[name[dataset].get, parameter[constant[identifier]]]
call[name[fields]][constant[dataset_title]] assign[=] call[name[dataset].get, parameter[constant[title]]]
call[name[fields]][constant[dataset_accrualPeriodicity]] assign[=] call[name[dataset].get, parameter[constant[accrualPeriodicity]]]
call[name[fields]][constant[dataset_description]] assign[=] call[name[dataset].get, parameter[constant[description]]]
call[name[fields]][constant[dataset_publisher_name]] assign[=] name[publisher_name]
call[name[fields]][constant[dataset_superTheme]] assign[=] name[super_themes]
call[name[fields]][constant[dataset_theme]] assign[=] name[themes]
call[name[fields]][constant[dataset_landingPage]] assign[=] call[name[dataset].get, parameter[constant[landingPage]]]
call[name[fields]][constant[dataset_landingPage_generated]] assign[=] call[name[cls]._generate_landingPage, parameter[name[catalog_homepage], call[name[dataset].get, parameter[constant[identifier]]]]]
call[name[fields]][constant[dataset_issued]] assign[=] call[name[dataset].get, parameter[constant[issued]]]
call[name[fields]][constant[dataset_modified]] assign[=] call[name[dataset].get, parameter[constant[modified]]]
call[name[fields]][constant[distributions_formats]] assign[=] name[distributions_formats]
call[name[fields]][constant[distributions_list]] assign[=] name[distributions_list]
call[name[fields]][constant[dataset_license]] assign[=] call[name[dataset].get, parameter[constant[license]]]
call[name[fields]][constant[dataset_language]] assign[=] name[languages]
call[name[fields]][constant[dataset_spatial]] assign[=] call[name[dataset].get, parameter[constant[spatial]]]
call[name[fields]][constant[dataset_temporal]] assign[=] call[name[dataset].get, parameter[constant[temporal]]]
return[name[fields]] | keyword[def] identifier[_dataset_report_helper] ( identifier[cls] , identifier[dataset] , identifier[catalog_homepage] = keyword[None] ):
literal[string]
identifier[publisher_name] = identifier[helpers] . identifier[traverse_dict] ( identifier[dataset] ,[ literal[string] , literal[string] ])
identifier[languages] = identifier[cls] . identifier[_stringify_list] ( identifier[dataset] . identifier[get] ( literal[string] ))
identifier[super_themes] = identifier[cls] . identifier[_stringify_list] ( identifier[dataset] . identifier[get] ( literal[string] ))
identifier[themes] = identifier[cls] . identifier[_stringify_list] ( identifier[dataset] . identifier[get] ( literal[string] ))
keyword[def] identifier[_stringify_distribution] ( identifier[distribution] ):
identifier[title] = identifier[distribution] . identifier[get] ( literal[string] )
identifier[url] = identifier[distribution] . identifier[get] ( literal[string] )
keyword[return] literal[string] . identifier[format] ( identifier[title] , identifier[url] )
identifier[distributions] =[ identifier[d] keyword[for] identifier[d] keyword[in] identifier[dataset] [ literal[string] ]
keyword[if] identifier[isinstance] ( identifier[d] , identifier[dict] )]
identifier[distributions_list] = keyword[None]
keyword[if] identifier[isinstance] ( identifier[distributions] , identifier[list] ):
identifier[distributions_strings] =[
identifier[_stringify_distribution] ( identifier[d] ) keyword[for] identifier[d] keyword[in] identifier[distributions]
]
identifier[distributions_list] = literal[string] . identifier[join] ( identifier[distributions_strings] )
identifier[distributions_formats] = identifier[json] . identifier[dumps] (
identifier[helpers] . identifier[count_distribution_formats_dataset] ( identifier[dataset] ))
identifier[fields] = identifier[OrderedDict] ()
identifier[fields] [ literal[string] ]= identifier[dataset] . identifier[get] ( literal[string] )
identifier[fields] [ literal[string] ]= identifier[dataset] . identifier[get] ( literal[string] )
identifier[fields] [ literal[string] ]= identifier[dataset] . identifier[get] (
literal[string] )
identifier[fields] [ literal[string] ]= identifier[dataset] . identifier[get] ( literal[string] )
identifier[fields] [ literal[string] ]= identifier[publisher_name]
identifier[fields] [ literal[string] ]= identifier[super_themes]
identifier[fields] [ literal[string] ]= identifier[themes]
identifier[fields] [ literal[string] ]= identifier[dataset] . identifier[get] ( literal[string] )
identifier[fields] [ literal[string] ]= identifier[cls] . identifier[_generate_landingPage] (
identifier[catalog_homepage] , identifier[dataset] . identifier[get] ( literal[string] )
)
identifier[fields] [ literal[string] ]= identifier[dataset] . identifier[get] ( literal[string] )
identifier[fields] [ literal[string] ]= identifier[dataset] . identifier[get] ( literal[string] )
identifier[fields] [ literal[string] ]= identifier[distributions_formats]
identifier[fields] [ literal[string] ]= identifier[distributions_list]
identifier[fields] [ literal[string] ]= identifier[dataset] . identifier[get] ( literal[string] )
identifier[fields] [ literal[string] ]= identifier[languages]
identifier[fields] [ literal[string] ]= identifier[dataset] . identifier[get] ( literal[string] )
identifier[fields] [ literal[string] ]= identifier[dataset] . identifier[get] ( literal[string] )
keyword[return] identifier[fields] | def _dataset_report_helper(cls, dataset, catalog_homepage=None):
"""Toma un dict con la metadata de un dataset, y devuelve un dict coni
los valores que dataset_report() usa para reportar sobre él.
Args:
dataset (dict): Diccionario con la metadata de un dataset.
Returns:
dict: Diccionario con los campos a nivel dataset que requiere
dataset_report().
"""
publisher_name = helpers.traverse_dict(dataset, ['publisher', 'name'])
languages = cls._stringify_list(dataset.get('language'))
super_themes = cls._stringify_list(dataset.get('superTheme'))
themes = cls._stringify_list(dataset.get('theme'))
def _stringify_distribution(distribution):
title = distribution.get('title')
url = distribution.get('downloadURL')
return '"{}": {}'.format(title, url)
distributions = [d for d in dataset['distribution'] if isinstance(d, dict)]
# crea lista de distribuciones
distributions_list = None
if isinstance(distributions, list):
distributions_strings = [_stringify_distribution(d) for d in distributions]
distributions_list = '\n\n'.join(distributions_strings) # depends on [control=['if'], data=[]]
# crea lista de formatos
distributions_formats = json.dumps(helpers.count_distribution_formats_dataset(dataset))
fields = OrderedDict()
fields['dataset_identifier'] = dataset.get('identifier')
fields['dataset_title'] = dataset.get('title')
fields['dataset_accrualPeriodicity'] = dataset.get('accrualPeriodicity')
fields['dataset_description'] = dataset.get('description')
fields['dataset_publisher_name'] = publisher_name
fields['dataset_superTheme'] = super_themes
fields['dataset_theme'] = themes
fields['dataset_landingPage'] = dataset.get('landingPage')
fields['dataset_landingPage_generated'] = cls._generate_landingPage(catalog_homepage, dataset.get('identifier'))
fields['dataset_issued'] = dataset.get('issued')
fields['dataset_modified'] = dataset.get('modified')
fields['distributions_formats'] = distributions_formats
fields['distributions_list'] = distributions_list
fields['dataset_license'] = dataset.get('license')
fields['dataset_language'] = languages
fields['dataset_spatial'] = dataset.get('spatial')
fields['dataset_temporal'] = dataset.get('temporal')
return fields |
def ResourceEnum(ctx):
"""Resource Type Enumeration."""
return Enum(
ctx,
food=0,
wood=1,
stone=2,
gold=3,
decay=12,
fish=17,
default=Pass # lots of resource types exist
) | def function[ResourceEnum, parameter[ctx]]:
constant[Resource Type Enumeration.]
return[call[name[Enum], parameter[name[ctx]]]] | keyword[def] identifier[ResourceEnum] ( identifier[ctx] ):
literal[string]
keyword[return] identifier[Enum] (
identifier[ctx] ,
identifier[food] = literal[int] ,
identifier[wood] = literal[int] ,
identifier[stone] = literal[int] ,
identifier[gold] = literal[int] ,
identifier[decay] = literal[int] ,
identifier[fish] = literal[int] ,
identifier[default] = identifier[Pass]
) | def ResourceEnum(ctx):
"""Resource Type Enumeration.""" # lots of resource types exist
return Enum(ctx, food=0, wood=1, stone=2, gold=3, decay=12, fish=17, default=Pass) |
def deriv(self, x: str, ctype: ContentType) -> SchemaPattern:
"""Return derivative of the receiver."""
return (self.pattern.deriv(x, ctype) if self.check_when() else
NotAllowed()) | def function[deriv, parameter[self, x, ctype]]:
constant[Return derivative of the receiver.]
return[<ast.IfExp object at 0x7da1b02e7b80>] | keyword[def] identifier[deriv] ( identifier[self] , identifier[x] : identifier[str] , identifier[ctype] : identifier[ContentType] )-> identifier[SchemaPattern] :
literal[string]
keyword[return] ( identifier[self] . identifier[pattern] . identifier[deriv] ( identifier[x] , identifier[ctype] ) keyword[if] identifier[self] . identifier[check_when] () keyword[else]
identifier[NotAllowed] ()) | def deriv(self, x: str, ctype: ContentType) -> SchemaPattern:
"""Return derivative of the receiver."""
return self.pattern.deriv(x, ctype) if self.check_when() else NotAllowed() |
def get_short_desc(long_desc):
"""Get first sentence of first paragraph of long description."""
found = False
olines = []
for line in [item.rstrip() for item in long_desc.split("\n")]:
if found and (((not line) and (not olines)) or (line and olines)):
olines.append(line)
elif found and olines and (not line):
return (" ".join(olines).split(".")[0]).strip()
found = line == ".. [[[end]]]" if not found else found
return "" | def function[get_short_desc, parameter[long_desc]]:
constant[Get first sentence of first paragraph of long description.]
variable[found] assign[=] constant[False]
variable[olines] assign[=] list[[]]
for taget[name[line]] in starred[<ast.ListComp object at 0x7da1b033d3f0>] begin[:]
if <ast.BoolOp object at 0x7da1b033ec50> begin[:]
call[name[olines].append, parameter[name[line]]]
variable[found] assign[=] <ast.IfExp object at 0x7da1b033d780>
return[constant[]] | keyword[def] identifier[get_short_desc] ( identifier[long_desc] ):
literal[string]
identifier[found] = keyword[False]
identifier[olines] =[]
keyword[for] identifier[line] keyword[in] [ identifier[item] . identifier[rstrip] () keyword[for] identifier[item] keyword[in] identifier[long_desc] . identifier[split] ( literal[string] )]:
keyword[if] identifier[found] keyword[and] ((( keyword[not] identifier[line] ) keyword[and] ( keyword[not] identifier[olines] )) keyword[or] ( identifier[line] keyword[and] identifier[olines] )):
identifier[olines] . identifier[append] ( identifier[line] )
keyword[elif] identifier[found] keyword[and] identifier[olines] keyword[and] ( keyword[not] identifier[line] ):
keyword[return] ( literal[string] . identifier[join] ( identifier[olines] ). identifier[split] ( literal[string] )[ literal[int] ]). identifier[strip] ()
identifier[found] = identifier[line] == literal[string] keyword[if] keyword[not] identifier[found] keyword[else] identifier[found]
keyword[return] literal[string] | def get_short_desc(long_desc):
"""Get first sentence of first paragraph of long description."""
found = False
olines = []
for line in [item.rstrip() for item in long_desc.split('\n')]:
if found and (not line and (not olines) or (line and olines)):
olines.append(line) # depends on [control=['if'], data=[]]
elif found and olines and (not line):
return ' '.join(olines).split('.')[0].strip() # depends on [control=['if'], data=[]]
found = line == '.. [[[end]]]' if not found else found # depends on [control=['for'], data=['line']]
return '' |
def untldict_normalizer(untl_dict, normalizations):
"""Normalize UNTL elements by their qualifier.
Takes a UNTL descriptive metadata dictionary and a dictionary of
the elements and the qualifiers for normalization:
{'element1': ['qualifier1', 'qualifier2'],
'element2': ['qualifier3']}
and normalizes the elements with that qualifier.
"""
# Loop through the element types in the UNTL metadata.
for element_type, element_list in untl_dict.items():
# A normalization is required for that element type.
if element_type in normalizations:
# Get the required normalizations for specific qualifiers list.
norm_qualifier_list = normalizations.get(element_type)
# Loop through the element lists within that element type.
for element in element_list:
# Determine if the qualifier requires normalization.
qualifier = element.get('qualifier', None)
if qualifier in norm_qualifier_list:
content = element.get('content', None)
# Determine if there is normalizing for the element.
if element_type in ELEMENT_NORMALIZERS:
elem_norms = ELEMENT_NORMALIZERS.get(element_type,
None)
# If the qualified element requires a
# normalization and has content, replace the
# content with the normalized.
if qualifier in elem_norms:
if content and content != '':
element['content'] = \
elem_norms[qualifier](content)
return untl_dict | def function[untldict_normalizer, parameter[untl_dict, normalizations]]:
constant[Normalize UNTL elements by their qualifier.
Takes a UNTL descriptive metadata dictionary and a dictionary of
the elements and the qualifiers for normalization:
{'element1': ['qualifier1', 'qualifier2'],
'element2': ['qualifier3']}
and normalizes the elements with that qualifier.
]
for taget[tuple[[<ast.Name object at 0x7da1b24af580>, <ast.Name object at 0x7da1b24ac100>]]] in starred[call[name[untl_dict].items, parameter[]]] begin[:]
if compare[name[element_type] in name[normalizations]] begin[:]
variable[norm_qualifier_list] assign[=] call[name[normalizations].get, parameter[name[element_type]]]
for taget[name[element]] in starred[name[element_list]] begin[:]
variable[qualifier] assign[=] call[name[element].get, parameter[constant[qualifier], constant[None]]]
if compare[name[qualifier] in name[norm_qualifier_list]] begin[:]
variable[content] assign[=] call[name[element].get, parameter[constant[content], constant[None]]]
if compare[name[element_type] in name[ELEMENT_NORMALIZERS]] begin[:]
variable[elem_norms] assign[=] call[name[ELEMENT_NORMALIZERS].get, parameter[name[element_type], constant[None]]]
if compare[name[qualifier] in name[elem_norms]] begin[:]
if <ast.BoolOp object at 0x7da1b24ad630> begin[:]
call[name[element]][constant[content]] assign[=] call[call[name[elem_norms]][name[qualifier]], parameter[name[content]]]
return[name[untl_dict]] | keyword[def] identifier[untldict_normalizer] ( identifier[untl_dict] , identifier[normalizations] ):
literal[string]
keyword[for] identifier[element_type] , identifier[element_list] keyword[in] identifier[untl_dict] . identifier[items] ():
keyword[if] identifier[element_type] keyword[in] identifier[normalizations] :
identifier[norm_qualifier_list] = identifier[normalizations] . identifier[get] ( identifier[element_type] )
keyword[for] identifier[element] keyword[in] identifier[element_list] :
identifier[qualifier] = identifier[element] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] identifier[qualifier] keyword[in] identifier[norm_qualifier_list] :
identifier[content] = identifier[element] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] identifier[element_type] keyword[in] identifier[ELEMENT_NORMALIZERS] :
identifier[elem_norms] = identifier[ELEMENT_NORMALIZERS] . identifier[get] ( identifier[element_type] ,
keyword[None] )
keyword[if] identifier[qualifier] keyword[in] identifier[elem_norms] :
keyword[if] identifier[content] keyword[and] identifier[content] != literal[string] :
identifier[element] [ literal[string] ]= identifier[elem_norms] [ identifier[qualifier] ]( identifier[content] )
keyword[return] identifier[untl_dict] | def untldict_normalizer(untl_dict, normalizations):
"""Normalize UNTL elements by their qualifier.
Takes a UNTL descriptive metadata dictionary and a dictionary of
the elements and the qualifiers for normalization:
{'element1': ['qualifier1', 'qualifier2'],
'element2': ['qualifier3']}
and normalizes the elements with that qualifier.
"""
# Loop through the element types in the UNTL metadata.
for (element_type, element_list) in untl_dict.items():
# A normalization is required for that element type.
if element_type in normalizations:
# Get the required normalizations for specific qualifiers list.
norm_qualifier_list = normalizations.get(element_type)
# Loop through the element lists within that element type.
for element in element_list:
# Determine if the qualifier requires normalization.
qualifier = element.get('qualifier', None)
if qualifier in norm_qualifier_list:
content = element.get('content', None)
# Determine if there is normalizing for the element.
if element_type in ELEMENT_NORMALIZERS:
elem_norms = ELEMENT_NORMALIZERS.get(element_type, None)
# If the qualified element requires a
# normalization and has content, replace the
# content with the normalized.
if qualifier in elem_norms:
if content and content != '':
element['content'] = elem_norms[qualifier](content) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['qualifier', 'elem_norms']] # depends on [control=['if'], data=['element_type', 'ELEMENT_NORMALIZERS']] # depends on [control=['if'], data=['qualifier']] # depends on [control=['for'], data=['element']] # depends on [control=['if'], data=['element_type', 'normalizations']] # depends on [control=['for'], data=[]]
return untl_dict |
def print_number_str(self, value, justify_right=True):
"""Print a 4 character long string of numeric values to the display. This
function is similar to print_str but will interpret periods not as
characters but as decimal points associated with the previous character.
"""
# Calculate length of value without decimals.
length = len(value.translate(None, '.'))
# Error if value without decimals is longer than 4 characters.
if length > 4:
self.print_str('----')
return
# Calculcate starting position of digits based on justification.
pos = (4-length) if justify_right else 0
# Go through each character and print it on the display.
for i, ch in enumerate(value):
if ch == '.':
# Print decimal points on the previous digit.
self.set_decimal(pos-1, True)
else:
self.set_digit(pos, ch)
pos += 1 | def function[print_number_str, parameter[self, value, justify_right]]:
constant[Print a 4 character long string of numeric values to the display. This
function is similar to print_str but will interpret periods not as
characters but as decimal points associated with the previous character.
]
variable[length] assign[=] call[name[len], parameter[call[name[value].translate, parameter[constant[None], constant[.]]]]]
if compare[name[length] greater[>] constant[4]] begin[:]
call[name[self].print_str, parameter[constant[----]]]
return[None]
variable[pos] assign[=] <ast.IfExp object at 0x7da1b102bf70>
for taget[tuple[[<ast.Name object at 0x7da1b102bb50>, <ast.Name object at 0x7da1b10294b0>]]] in starred[call[name[enumerate], parameter[name[value]]]] begin[:]
if compare[name[ch] equal[==] constant[.]] begin[:]
call[name[self].set_decimal, parameter[binary_operation[name[pos] - constant[1]], constant[True]]] | keyword[def] identifier[print_number_str] ( identifier[self] , identifier[value] , identifier[justify_right] = keyword[True] ):
literal[string]
identifier[length] = identifier[len] ( identifier[value] . identifier[translate] ( keyword[None] , literal[string] ))
keyword[if] identifier[length] > literal[int] :
identifier[self] . identifier[print_str] ( literal[string] )
keyword[return]
identifier[pos] =( literal[int] - identifier[length] ) keyword[if] identifier[justify_right] keyword[else] literal[int]
keyword[for] identifier[i] , identifier[ch] keyword[in] identifier[enumerate] ( identifier[value] ):
keyword[if] identifier[ch] == literal[string] :
identifier[self] . identifier[set_decimal] ( identifier[pos] - literal[int] , keyword[True] )
keyword[else] :
identifier[self] . identifier[set_digit] ( identifier[pos] , identifier[ch] )
identifier[pos] += literal[int] | def print_number_str(self, value, justify_right=True):
"""Print a 4 character long string of numeric values to the display. This
function is similar to print_str but will interpret periods not as
characters but as decimal points associated with the previous character.
"""
# Calculate length of value without decimals.
length = len(value.translate(None, '.'))
# Error if value without decimals is longer than 4 characters.
if length > 4:
self.print_str('----')
return # depends on [control=['if'], data=[]]
# Calculcate starting position of digits based on justification.
pos = 4 - length if justify_right else 0
# Go through each character and print it on the display.
for (i, ch) in enumerate(value):
if ch == '.':
# Print decimal points on the previous digit.
self.set_decimal(pos - 1, True) # depends on [control=['if'], data=[]]
else:
self.set_digit(pos, ch)
pos += 1 # depends on [control=['for'], data=[]] |
def start_all(self):
"""
Start all nodes
"""
pool = Pool(concurrency=3)
for node in self.nodes.values():
pool.append(node.start)
yield from pool.join() | def function[start_all, parameter[self]]:
constant[
Start all nodes
]
variable[pool] assign[=] call[name[Pool], parameter[]]
for taget[name[node]] in starred[call[name[self].nodes.values, parameter[]]] begin[:]
call[name[pool].append, parameter[name[node].start]]
<ast.YieldFrom object at 0x7da204961d80> | keyword[def] identifier[start_all] ( identifier[self] ):
literal[string]
identifier[pool] = identifier[Pool] ( identifier[concurrency] = literal[int] )
keyword[for] identifier[node] keyword[in] identifier[self] . identifier[nodes] . identifier[values] ():
identifier[pool] . identifier[append] ( identifier[node] . identifier[start] )
keyword[yield] keyword[from] identifier[pool] . identifier[join] () | def start_all(self):
"""
Start all nodes
"""
pool = Pool(concurrency=3)
for node in self.nodes.values():
pool.append(node.start) # depends on [control=['for'], data=['node']]
yield from pool.join() |
def as_list_with_options(self):
"""
Similar to list(self) except elements which have an option associated
with them are returned as a ``TListItemWithOption``
"""
it = ROOT.TIter(self)
elem = it.Next()
result = []
while elem:
if it.GetOption():
result.append(TListItemWithOption(elem, it.GetOption()))
else:
result.append(elem)
elem = it.Next()
return result | def function[as_list_with_options, parameter[self]]:
constant[
Similar to list(self) except elements which have an option associated
with them are returned as a ``TListItemWithOption``
]
variable[it] assign[=] call[name[ROOT].TIter, parameter[name[self]]]
variable[elem] assign[=] call[name[it].Next, parameter[]]
variable[result] assign[=] list[[]]
while name[elem] begin[:]
if call[name[it].GetOption, parameter[]] begin[:]
call[name[result].append, parameter[call[name[TListItemWithOption], parameter[name[elem], call[name[it].GetOption, parameter[]]]]]]
variable[elem] assign[=] call[name[it].Next, parameter[]]
return[name[result]] | keyword[def] identifier[as_list_with_options] ( identifier[self] ):
literal[string]
identifier[it] = identifier[ROOT] . identifier[TIter] ( identifier[self] )
identifier[elem] = identifier[it] . identifier[Next] ()
identifier[result] =[]
keyword[while] identifier[elem] :
keyword[if] identifier[it] . identifier[GetOption] ():
identifier[result] . identifier[append] ( identifier[TListItemWithOption] ( identifier[elem] , identifier[it] . identifier[GetOption] ()))
keyword[else] :
identifier[result] . identifier[append] ( identifier[elem] )
identifier[elem] = identifier[it] . identifier[Next] ()
keyword[return] identifier[result] | def as_list_with_options(self):
"""
Similar to list(self) except elements which have an option associated
with them are returned as a ``TListItemWithOption``
"""
it = ROOT.TIter(self)
elem = it.Next()
result = []
while elem:
if it.GetOption():
result.append(TListItemWithOption(elem, it.GetOption())) # depends on [control=['if'], data=[]]
else:
result.append(elem)
elem = it.Next() # depends on [control=['while'], data=[]]
return result |
def downgrade(account):
"""Transforms data from v2 format to a v1 format"""
d_account = dict(schema_version=1, metadata={'email': account['email']},
tags=list(set([account['environment']] + account.get('tags', []))))
v1_services = {}
for service in account.get('services', []):
if service['name'] == 's3':
if service['metadata'].get('name'):
d_account['metadata']['s3_name'] = service['metadata']['name']
elif service['name'] == 'cloudtrail':
d_account['metadata']['cloudtrail_index'] = service['metadata']['esIndex']
d_account['metadata']['cloudtrail_kibana_url'] = service['metadata']['kibanaUrl']
elif service['name'] == 'bastion':
d_account['bastion'] = service['metadata']['hostname']
elif service['name'] == 'titus':
v1_services['titus'] = {
'stacks': service['metadata']['stacks'],
'enabled': service['status'][0]['enabled']
}
elif service['name'] == 'spinnaker':
v1_services['spinnaker'] = {
'name': service['metadata'].get('name', account["name"]),
'enabled': service['status'][0]['enabled']
}
elif service['name'] == 'awwwdit':
v1_services['awwwdit'] = {
'enabled': service['status'][0]['enabled']
}
elif service['name'] == 'security_monkey':
v1_services['security_monkey'] = {
'enabled': service['status'][0]['enabled']
}
elif service['name'] == 'poseidon':
v1_services['poseidon'] = {
'enabled': service['status'][0]['enabled']
}
elif service['name'] == 'rolliepollie':
v1_services['rolliepollie'] = {
'enabled': service['status'][0]['enabled']
}
elif service['name'] == 'lazyfalcon':
owner = None
if service.get('metadata'):
if service['metadata'].get('owner'):
owner = service['metadata']['owner']
v1_services['lazyfalcon'] = {
'enabled': service['status'][0]['enabled'],
'owner': owner
}
if account['provider'] == 'aws':
d_account['metadata']['account_number'] = account['id']
elif account['provider'] == 'gcp':
d_account['metadata']['project_id'] = account['id']
d_account['id'] = account['provider'] + '-' + account['id']
d_account['cmc_required'] = account['sensitive']
d_account['name'] = account['name']
d_account['alias'] = account['aliases']
d_account['description'] = account['description']
d_account['owners'] = account['contacts']
d_account['type'] = account['provider']
d_account['ours'] = True if account['owner'] == 'netflix' else False
d_account['netflix'] = True if account['owner'] == 'netflix' else False
d_account['services'] = v1_services
d_account['account_status'] = account['account_status']
return d_account | def function[downgrade, parameter[account]]:
constant[Transforms data from v2 format to a v1 format]
variable[d_account] assign[=] call[name[dict], parameter[]]
variable[v1_services] assign[=] dictionary[[], []]
for taget[name[service]] in starred[call[name[account].get, parameter[constant[services], list[[]]]]] begin[:]
if compare[call[name[service]][constant[name]] equal[==] constant[s3]] begin[:]
if call[call[name[service]][constant[metadata]].get, parameter[constant[name]]] begin[:]
call[call[name[d_account]][constant[metadata]]][constant[s3_name]] assign[=] call[call[name[service]][constant[metadata]]][constant[name]]
if compare[call[name[account]][constant[provider]] equal[==] constant[aws]] begin[:]
call[call[name[d_account]][constant[metadata]]][constant[account_number]] assign[=] call[name[account]][constant[id]]
call[name[d_account]][constant[id]] assign[=] binary_operation[binary_operation[call[name[account]][constant[provider]] + constant[-]] + call[name[account]][constant[id]]]
call[name[d_account]][constant[cmc_required]] assign[=] call[name[account]][constant[sensitive]]
call[name[d_account]][constant[name]] assign[=] call[name[account]][constant[name]]
call[name[d_account]][constant[alias]] assign[=] call[name[account]][constant[aliases]]
call[name[d_account]][constant[description]] assign[=] call[name[account]][constant[description]]
call[name[d_account]][constant[owners]] assign[=] call[name[account]][constant[contacts]]
call[name[d_account]][constant[type]] assign[=] call[name[account]][constant[provider]]
call[name[d_account]][constant[ours]] assign[=] <ast.IfExp object at 0x7da1b0841600>
call[name[d_account]][constant[netflix]] assign[=] <ast.IfExp object at 0x7da1b0840760>
call[name[d_account]][constant[services]] assign[=] name[v1_services]
call[name[d_account]][constant[account_status]] assign[=] call[name[account]][constant[account_status]]
return[name[d_account]] | keyword[def] identifier[downgrade] ( identifier[account] ):
literal[string]
identifier[d_account] = identifier[dict] ( identifier[schema_version] = literal[int] , identifier[metadata] ={ literal[string] : identifier[account] [ literal[string] ]},
identifier[tags] = identifier[list] ( identifier[set] ([ identifier[account] [ literal[string] ]]+ identifier[account] . identifier[get] ( literal[string] ,[]))))
identifier[v1_services] ={}
keyword[for] identifier[service] keyword[in] identifier[account] . identifier[get] ( literal[string] ,[]):
keyword[if] identifier[service] [ literal[string] ]== literal[string] :
keyword[if] identifier[service] [ literal[string] ]. identifier[get] ( literal[string] ):
identifier[d_account] [ literal[string] ][ literal[string] ]= identifier[service] [ literal[string] ][ literal[string] ]
keyword[elif] identifier[service] [ literal[string] ]== literal[string] :
identifier[d_account] [ literal[string] ][ literal[string] ]= identifier[service] [ literal[string] ][ literal[string] ]
identifier[d_account] [ literal[string] ][ literal[string] ]= identifier[service] [ literal[string] ][ literal[string] ]
keyword[elif] identifier[service] [ literal[string] ]== literal[string] :
identifier[d_account] [ literal[string] ]= identifier[service] [ literal[string] ][ literal[string] ]
keyword[elif] identifier[service] [ literal[string] ]== literal[string] :
identifier[v1_services] [ literal[string] ]={
literal[string] : identifier[service] [ literal[string] ][ literal[string] ],
literal[string] : identifier[service] [ literal[string] ][ literal[int] ][ literal[string] ]
}
keyword[elif] identifier[service] [ literal[string] ]== literal[string] :
identifier[v1_services] [ literal[string] ]={
literal[string] : identifier[service] [ literal[string] ]. identifier[get] ( literal[string] , identifier[account] [ literal[string] ]),
literal[string] : identifier[service] [ literal[string] ][ literal[int] ][ literal[string] ]
}
keyword[elif] identifier[service] [ literal[string] ]== literal[string] :
identifier[v1_services] [ literal[string] ]={
literal[string] : identifier[service] [ literal[string] ][ literal[int] ][ literal[string] ]
}
keyword[elif] identifier[service] [ literal[string] ]== literal[string] :
identifier[v1_services] [ literal[string] ]={
literal[string] : identifier[service] [ literal[string] ][ literal[int] ][ literal[string] ]
}
keyword[elif] identifier[service] [ literal[string] ]== literal[string] :
identifier[v1_services] [ literal[string] ]={
literal[string] : identifier[service] [ literal[string] ][ literal[int] ][ literal[string] ]
}
keyword[elif] identifier[service] [ literal[string] ]== literal[string] :
identifier[v1_services] [ literal[string] ]={
literal[string] : identifier[service] [ literal[string] ][ literal[int] ][ literal[string] ]
}
keyword[elif] identifier[service] [ literal[string] ]== literal[string] :
identifier[owner] = keyword[None]
keyword[if] identifier[service] . identifier[get] ( literal[string] ):
keyword[if] identifier[service] [ literal[string] ]. identifier[get] ( literal[string] ):
identifier[owner] = identifier[service] [ literal[string] ][ literal[string] ]
identifier[v1_services] [ literal[string] ]={
literal[string] : identifier[service] [ literal[string] ][ literal[int] ][ literal[string] ],
literal[string] : identifier[owner]
}
keyword[if] identifier[account] [ literal[string] ]== literal[string] :
identifier[d_account] [ literal[string] ][ literal[string] ]= identifier[account] [ literal[string] ]
keyword[elif] identifier[account] [ literal[string] ]== literal[string] :
identifier[d_account] [ literal[string] ][ literal[string] ]= identifier[account] [ literal[string] ]
identifier[d_account] [ literal[string] ]= identifier[account] [ literal[string] ]+ literal[string] + identifier[account] [ literal[string] ]
identifier[d_account] [ literal[string] ]= identifier[account] [ literal[string] ]
identifier[d_account] [ literal[string] ]= identifier[account] [ literal[string] ]
identifier[d_account] [ literal[string] ]= identifier[account] [ literal[string] ]
identifier[d_account] [ literal[string] ]= identifier[account] [ literal[string] ]
identifier[d_account] [ literal[string] ]= identifier[account] [ literal[string] ]
identifier[d_account] [ literal[string] ]= identifier[account] [ literal[string] ]
identifier[d_account] [ literal[string] ]= keyword[True] keyword[if] identifier[account] [ literal[string] ]== literal[string] keyword[else] keyword[False]
identifier[d_account] [ literal[string] ]= keyword[True] keyword[if] identifier[account] [ literal[string] ]== literal[string] keyword[else] keyword[False]
identifier[d_account] [ literal[string] ]= identifier[v1_services]
identifier[d_account] [ literal[string] ]= identifier[account] [ literal[string] ]
keyword[return] identifier[d_account] | def downgrade(account):
"""Transforms data from v2 format to a v1 format"""
d_account = dict(schema_version=1, metadata={'email': account['email']}, tags=list(set([account['environment']] + account.get('tags', []))))
v1_services = {}
for service in account.get('services', []):
if service['name'] == 's3':
if service['metadata'].get('name'):
d_account['metadata']['s3_name'] = service['metadata']['name'] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif service['name'] == 'cloudtrail':
d_account['metadata']['cloudtrail_index'] = service['metadata']['esIndex']
d_account['metadata']['cloudtrail_kibana_url'] = service['metadata']['kibanaUrl'] # depends on [control=['if'], data=[]]
elif service['name'] == 'bastion':
d_account['bastion'] = service['metadata']['hostname'] # depends on [control=['if'], data=[]]
elif service['name'] == 'titus':
v1_services['titus'] = {'stacks': service['metadata']['stacks'], 'enabled': service['status'][0]['enabled']} # depends on [control=['if'], data=[]]
elif service['name'] == 'spinnaker':
v1_services['spinnaker'] = {'name': service['metadata'].get('name', account['name']), 'enabled': service['status'][0]['enabled']} # depends on [control=['if'], data=[]]
elif service['name'] == 'awwwdit':
v1_services['awwwdit'] = {'enabled': service['status'][0]['enabled']} # depends on [control=['if'], data=[]]
elif service['name'] == 'security_monkey':
v1_services['security_monkey'] = {'enabled': service['status'][0]['enabled']} # depends on [control=['if'], data=[]]
elif service['name'] == 'poseidon':
v1_services['poseidon'] = {'enabled': service['status'][0]['enabled']} # depends on [control=['if'], data=[]]
elif service['name'] == 'rolliepollie':
v1_services['rolliepollie'] = {'enabled': service['status'][0]['enabled']} # depends on [control=['if'], data=[]]
elif service['name'] == 'lazyfalcon':
owner = None
if service.get('metadata'):
if service['metadata'].get('owner'):
owner = service['metadata']['owner'] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
v1_services['lazyfalcon'] = {'enabled': service['status'][0]['enabled'], 'owner': owner} # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['service']]
if account['provider'] == 'aws':
d_account['metadata']['account_number'] = account['id'] # depends on [control=['if'], data=[]]
elif account['provider'] == 'gcp':
d_account['metadata']['project_id'] = account['id'] # depends on [control=['if'], data=[]]
d_account['id'] = account['provider'] + '-' + account['id']
d_account['cmc_required'] = account['sensitive']
d_account['name'] = account['name']
d_account['alias'] = account['aliases']
d_account['description'] = account['description']
d_account['owners'] = account['contacts']
d_account['type'] = account['provider']
d_account['ours'] = True if account['owner'] == 'netflix' else False
d_account['netflix'] = True if account['owner'] == 'netflix' else False
d_account['services'] = v1_services
d_account['account_status'] = account['account_status']
return d_account |
def render_search(self, ctx, data):
"""
Render some UI for performing searches, if we know about a search
aggregator.
"""
if self.username is None:
return ''
translator = self._getViewerPrivateApplication()
searchAggregator = translator.getPageComponents().searchAggregator
if searchAggregator is None or not searchAggregator.providers():
return ''
return ctx.tag.fillSlots(
'form-action', translator.linkTo(searchAggregator.storeID)) | def function[render_search, parameter[self, ctx, data]]:
constant[
Render some UI for performing searches, if we know about a search
aggregator.
]
if compare[name[self].username is constant[None]] begin[:]
return[constant[]]
variable[translator] assign[=] call[name[self]._getViewerPrivateApplication, parameter[]]
variable[searchAggregator] assign[=] call[name[translator].getPageComponents, parameter[]].searchAggregator
if <ast.BoolOp object at 0x7da1b0bda410> begin[:]
return[constant[]]
return[call[name[ctx].tag.fillSlots, parameter[constant[form-action], call[name[translator].linkTo, parameter[name[searchAggregator].storeID]]]]] | keyword[def] identifier[render_search] ( identifier[self] , identifier[ctx] , identifier[data] ):
literal[string]
keyword[if] identifier[self] . identifier[username] keyword[is] keyword[None] :
keyword[return] literal[string]
identifier[translator] = identifier[self] . identifier[_getViewerPrivateApplication] ()
identifier[searchAggregator] = identifier[translator] . identifier[getPageComponents] (). identifier[searchAggregator]
keyword[if] identifier[searchAggregator] keyword[is] keyword[None] keyword[or] keyword[not] identifier[searchAggregator] . identifier[providers] ():
keyword[return] literal[string]
keyword[return] identifier[ctx] . identifier[tag] . identifier[fillSlots] (
literal[string] , identifier[translator] . identifier[linkTo] ( identifier[searchAggregator] . identifier[storeID] )) | def render_search(self, ctx, data):
"""
Render some UI for performing searches, if we know about a search
aggregator.
"""
if self.username is None:
return '' # depends on [control=['if'], data=[]]
translator = self._getViewerPrivateApplication()
searchAggregator = translator.getPageComponents().searchAggregator
if searchAggregator is None or not searchAggregator.providers():
return '' # depends on [control=['if'], data=[]]
return ctx.tag.fillSlots('form-action', translator.linkTo(searchAggregator.storeID)) |
def get_low_liquidity_transactions(transactions, market_data,
last_n_days=None):
"""
For each traded name, find the daily transaction total that consumed
the greatest proportion of available daily bar volume.
Parameters
----------
transactions : pd.DataFrame
Prices and amounts of executed trades. One row per trade.
- See full explanation in create_full_tear_sheet.
market_data : pd.Panel
Panel with items axis of 'price' and 'volume' DataFrames.
The major and minor axes should match those of the
the passed positions DataFrame (same dates and symbols).
last_n_days : integer
Compute for only the last n days of the passed backtest data.
"""
txn_daily_w_bar = daily_txns_with_bar_data(transactions, market_data)
txn_daily_w_bar.index.name = 'date'
txn_daily_w_bar = txn_daily_w_bar.reset_index()
if last_n_days is not None:
md = txn_daily_w_bar.date.max() - pd.Timedelta(days=last_n_days)
txn_daily_w_bar = txn_daily_w_bar[txn_daily_w_bar.date > md]
bar_consumption = txn_daily_w_bar.assign(
max_pct_bar_consumed=(
txn_daily_w_bar.amount/txn_daily_w_bar.volume)*100
).sort_values('max_pct_bar_consumed', ascending=False)
max_bar_consumption = bar_consumption.groupby('symbol').first()
return max_bar_consumption[['date', 'max_pct_bar_consumed']] | def function[get_low_liquidity_transactions, parameter[transactions, market_data, last_n_days]]:
constant[
For each traded name, find the daily transaction total that consumed
the greatest proportion of available daily bar volume.
Parameters
----------
transactions : pd.DataFrame
Prices and amounts of executed trades. One row per trade.
- See full explanation in create_full_tear_sheet.
market_data : pd.Panel
Panel with items axis of 'price' and 'volume' DataFrames.
The major and minor axes should match those of the
the passed positions DataFrame (same dates and symbols).
last_n_days : integer
Compute for only the last n days of the passed backtest data.
]
variable[txn_daily_w_bar] assign[=] call[name[daily_txns_with_bar_data], parameter[name[transactions], name[market_data]]]
name[txn_daily_w_bar].index.name assign[=] constant[date]
variable[txn_daily_w_bar] assign[=] call[name[txn_daily_w_bar].reset_index, parameter[]]
if compare[name[last_n_days] is_not constant[None]] begin[:]
variable[md] assign[=] binary_operation[call[name[txn_daily_w_bar].date.max, parameter[]] - call[name[pd].Timedelta, parameter[]]]
variable[txn_daily_w_bar] assign[=] call[name[txn_daily_w_bar]][compare[name[txn_daily_w_bar].date greater[>] name[md]]]
variable[bar_consumption] assign[=] call[call[name[txn_daily_w_bar].assign, parameter[]].sort_values, parameter[constant[max_pct_bar_consumed]]]
variable[max_bar_consumption] assign[=] call[call[name[bar_consumption].groupby, parameter[constant[symbol]]].first, parameter[]]
return[call[name[max_bar_consumption]][list[[<ast.Constant object at 0x7da1b0022a10>, <ast.Constant object at 0x7da1b0022a40>]]]] | keyword[def] identifier[get_low_liquidity_transactions] ( identifier[transactions] , identifier[market_data] ,
identifier[last_n_days] = keyword[None] ):
literal[string]
identifier[txn_daily_w_bar] = identifier[daily_txns_with_bar_data] ( identifier[transactions] , identifier[market_data] )
identifier[txn_daily_w_bar] . identifier[index] . identifier[name] = literal[string]
identifier[txn_daily_w_bar] = identifier[txn_daily_w_bar] . identifier[reset_index] ()
keyword[if] identifier[last_n_days] keyword[is] keyword[not] keyword[None] :
identifier[md] = identifier[txn_daily_w_bar] . identifier[date] . identifier[max] ()- identifier[pd] . identifier[Timedelta] ( identifier[days] = identifier[last_n_days] )
identifier[txn_daily_w_bar] = identifier[txn_daily_w_bar] [ identifier[txn_daily_w_bar] . identifier[date] > identifier[md] ]
identifier[bar_consumption] = identifier[txn_daily_w_bar] . identifier[assign] (
identifier[max_pct_bar_consumed] =(
identifier[txn_daily_w_bar] . identifier[amount] / identifier[txn_daily_w_bar] . identifier[volume] )* literal[int]
). identifier[sort_values] ( literal[string] , identifier[ascending] = keyword[False] )
identifier[max_bar_consumption] = identifier[bar_consumption] . identifier[groupby] ( literal[string] ). identifier[first] ()
keyword[return] identifier[max_bar_consumption] [[ literal[string] , literal[string] ]] | def get_low_liquidity_transactions(transactions, market_data, last_n_days=None):
"""
For each traded name, find the daily transaction total that consumed
the greatest proportion of available daily bar volume.
Parameters
----------
transactions : pd.DataFrame
Prices and amounts of executed trades. One row per trade.
- See full explanation in create_full_tear_sheet.
market_data : pd.Panel
Panel with items axis of 'price' and 'volume' DataFrames.
The major and minor axes should match those of the
the passed positions DataFrame (same dates and symbols).
last_n_days : integer
Compute for only the last n days of the passed backtest data.
"""
txn_daily_w_bar = daily_txns_with_bar_data(transactions, market_data)
txn_daily_w_bar.index.name = 'date'
txn_daily_w_bar = txn_daily_w_bar.reset_index()
if last_n_days is not None:
md = txn_daily_w_bar.date.max() - pd.Timedelta(days=last_n_days)
txn_daily_w_bar = txn_daily_w_bar[txn_daily_w_bar.date > md] # depends on [control=['if'], data=['last_n_days']]
bar_consumption = txn_daily_w_bar.assign(max_pct_bar_consumed=txn_daily_w_bar.amount / txn_daily_w_bar.volume * 100).sort_values('max_pct_bar_consumed', ascending=False)
max_bar_consumption = bar_consumption.groupby('symbol').first()
return max_bar_consumption[['date', 'max_pct_bar_consumed']] |
def determine_module_class(path, class_path):
"""Determine type of module and return deployment module class."""
if not class_path:
# First check directory name for type-indicating suffix
basename = os.path.basename(path)
if basename.endswith('.sls'):
class_path = 'runway.module.serverless.Serverless'
elif basename.endswith('.tf'):
class_path = 'runway.module.terraform.Terraform'
elif basename.endswith('.cdk'):
class_path = 'runway.module.cdk.CloudDevelopmentKit'
elif basename.endswith('.cfn'):
class_path = 'runway.module.cloudformation.CloudFormation'
if not class_path:
# Fallback to autodetection
if os.path.isfile(os.path.join(path, 'serverless.yml')):
class_path = 'runway.module.serverless.Serverless'
elif glob.glob(os.path.join(path, '*.tf')):
class_path = 'runway.module.terraform.Terraform'
elif os.path.isfile(os.path.join(path, 'cdk.json')) \
and os.path.isfile(os.path.join(path, 'package.json')):
class_path = 'runway.module.cdk.CloudDevelopmentKit'
elif glob.glob(os.path.join(path, '*.env')) or (
glob.glob(os.path.join(path, '*.yaml'))) or (
glob.glob(os.path.join(path, '*.yml'))):
class_path = 'runway.module.cloudformation.CloudFormation'
if not class_path:
LOGGER.error('No module class found for %s', os.path.basename(path))
sys.exit(1)
return load_object_from_string(class_path) | def function[determine_module_class, parameter[path, class_path]]:
constant[Determine type of module and return deployment module class.]
if <ast.UnaryOp object at 0x7da1b07630d0> begin[:]
variable[basename] assign[=] call[name[os].path.basename, parameter[name[path]]]
if call[name[basename].endswith, parameter[constant[.sls]]] begin[:]
variable[class_path] assign[=] constant[runway.module.serverless.Serverless]
if <ast.UnaryOp object at 0x7da1b07602e0> begin[:]
if call[name[os].path.isfile, parameter[call[name[os].path.join, parameter[name[path], constant[serverless.yml]]]]] begin[:]
variable[class_path] assign[=] constant[runway.module.serverless.Serverless]
if <ast.UnaryOp object at 0x7da1b0760e50> begin[:]
call[name[LOGGER].error, parameter[constant[No module class found for %s], call[name[os].path.basename, parameter[name[path]]]]]
call[name[sys].exit, parameter[constant[1]]]
return[call[name[load_object_from_string], parameter[name[class_path]]]] | keyword[def] identifier[determine_module_class] ( identifier[path] , identifier[class_path] ):
literal[string]
keyword[if] keyword[not] identifier[class_path] :
identifier[basename] = identifier[os] . identifier[path] . identifier[basename] ( identifier[path] )
keyword[if] identifier[basename] . identifier[endswith] ( literal[string] ):
identifier[class_path] = literal[string]
keyword[elif] identifier[basename] . identifier[endswith] ( literal[string] ):
identifier[class_path] = literal[string]
keyword[elif] identifier[basename] . identifier[endswith] ( literal[string] ):
identifier[class_path] = literal[string]
keyword[elif] identifier[basename] . identifier[endswith] ( literal[string] ):
identifier[class_path] = literal[string]
keyword[if] keyword[not] identifier[class_path] :
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[os] . identifier[path] . identifier[join] ( identifier[path] , literal[string] )):
identifier[class_path] = literal[string]
keyword[elif] identifier[glob] . identifier[glob] ( identifier[os] . identifier[path] . identifier[join] ( identifier[path] , literal[string] )):
identifier[class_path] = literal[string]
keyword[elif] identifier[os] . identifier[path] . identifier[isfile] ( identifier[os] . identifier[path] . identifier[join] ( identifier[path] , literal[string] )) keyword[and] identifier[os] . identifier[path] . identifier[isfile] ( identifier[os] . identifier[path] . identifier[join] ( identifier[path] , literal[string] )):
identifier[class_path] = literal[string]
keyword[elif] identifier[glob] . identifier[glob] ( identifier[os] . identifier[path] . identifier[join] ( identifier[path] , literal[string] )) keyword[or] (
identifier[glob] . identifier[glob] ( identifier[os] . identifier[path] . identifier[join] ( identifier[path] , literal[string] ))) keyword[or] (
identifier[glob] . identifier[glob] ( identifier[os] . identifier[path] . identifier[join] ( identifier[path] , literal[string] ))):
identifier[class_path] = literal[string]
keyword[if] keyword[not] identifier[class_path] :
identifier[LOGGER] . identifier[error] ( literal[string] , identifier[os] . identifier[path] . identifier[basename] ( identifier[path] ))
identifier[sys] . identifier[exit] ( literal[int] )
keyword[return] identifier[load_object_from_string] ( identifier[class_path] ) | def determine_module_class(path, class_path):
"""Determine type of module and return deployment module class."""
if not class_path:
# First check directory name for type-indicating suffix
basename = os.path.basename(path)
if basename.endswith('.sls'):
class_path = 'runway.module.serverless.Serverless' # depends on [control=['if'], data=[]]
elif basename.endswith('.tf'):
class_path = 'runway.module.terraform.Terraform' # depends on [control=['if'], data=[]]
elif basename.endswith('.cdk'):
class_path = 'runway.module.cdk.CloudDevelopmentKit' # depends on [control=['if'], data=[]]
elif basename.endswith('.cfn'):
class_path = 'runway.module.cloudformation.CloudFormation' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if not class_path:
# Fallback to autodetection
if os.path.isfile(os.path.join(path, 'serverless.yml')):
class_path = 'runway.module.serverless.Serverless' # depends on [control=['if'], data=[]]
elif glob.glob(os.path.join(path, '*.tf')):
class_path = 'runway.module.terraform.Terraform' # depends on [control=['if'], data=[]]
elif os.path.isfile(os.path.join(path, 'cdk.json')) and os.path.isfile(os.path.join(path, 'package.json')):
class_path = 'runway.module.cdk.CloudDevelopmentKit' # depends on [control=['if'], data=[]]
elif glob.glob(os.path.join(path, '*.env')) or glob.glob(os.path.join(path, '*.yaml')) or glob.glob(os.path.join(path, '*.yml')):
class_path = 'runway.module.cloudformation.CloudFormation' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if not class_path:
LOGGER.error('No module class found for %s', os.path.basename(path))
sys.exit(1) # depends on [control=['if'], data=[]]
return load_object_from_string(class_path) |
def _poll_connection(self, fd):
"""Check with psycopg2 to see what action to take. If the state is
POLL_OK, we should have a pending callback for that fd.
:param int fd: The socket fd for the postgresql connection
"""
try:
state = self._connections[fd].poll()
except (OSError, socket.error) as error:
self._ioloop.remove_handler(fd)
if fd in self._futures and not self._futures[fd].done():
self._futures[fd].set_exception(
psycopg2.OperationalError('Connection error (%s)' % error)
)
except (psycopg2.Error, psycopg2.Warning) as error:
if fd in self._futures and not self._futures[fd].done():
self._futures[fd].set_exception(error)
else:
if state == extensions.POLL_OK:
if fd in self._futures and not self._futures[fd].done():
self._futures[fd].set_result(True)
elif state == extensions.POLL_WRITE:
self._ioloop.update_handler(fd, ioloop.IOLoop.WRITE)
elif state == extensions.POLL_READ:
self._ioloop.update_handler(fd, ioloop.IOLoop.READ)
elif state == extensions.POLL_ERROR:
self._ioloop.remove_handler(fd)
if fd in self._futures and not self._futures[fd].done():
self._futures[fd].set_exception(
psycopg2.Error('Poll Error')) | def function[_poll_connection, parameter[self, fd]]:
constant[Check with psycopg2 to see what action to take. If the state is
POLL_OK, we should have a pending callback for that fd.
:param int fd: The socket fd for the postgresql connection
]
<ast.Try object at 0x7da20e74bf40> | keyword[def] identifier[_poll_connection] ( identifier[self] , identifier[fd] ):
literal[string]
keyword[try] :
identifier[state] = identifier[self] . identifier[_connections] [ identifier[fd] ]. identifier[poll] ()
keyword[except] ( identifier[OSError] , identifier[socket] . identifier[error] ) keyword[as] identifier[error] :
identifier[self] . identifier[_ioloop] . identifier[remove_handler] ( identifier[fd] )
keyword[if] identifier[fd] keyword[in] identifier[self] . identifier[_futures] keyword[and] keyword[not] identifier[self] . identifier[_futures] [ identifier[fd] ]. identifier[done] ():
identifier[self] . identifier[_futures] [ identifier[fd] ]. identifier[set_exception] (
identifier[psycopg2] . identifier[OperationalError] ( literal[string] % identifier[error] )
)
keyword[except] ( identifier[psycopg2] . identifier[Error] , identifier[psycopg2] . identifier[Warning] ) keyword[as] identifier[error] :
keyword[if] identifier[fd] keyword[in] identifier[self] . identifier[_futures] keyword[and] keyword[not] identifier[self] . identifier[_futures] [ identifier[fd] ]. identifier[done] ():
identifier[self] . identifier[_futures] [ identifier[fd] ]. identifier[set_exception] ( identifier[error] )
keyword[else] :
keyword[if] identifier[state] == identifier[extensions] . identifier[POLL_OK] :
keyword[if] identifier[fd] keyword[in] identifier[self] . identifier[_futures] keyword[and] keyword[not] identifier[self] . identifier[_futures] [ identifier[fd] ]. identifier[done] ():
identifier[self] . identifier[_futures] [ identifier[fd] ]. identifier[set_result] ( keyword[True] )
keyword[elif] identifier[state] == identifier[extensions] . identifier[POLL_WRITE] :
identifier[self] . identifier[_ioloop] . identifier[update_handler] ( identifier[fd] , identifier[ioloop] . identifier[IOLoop] . identifier[WRITE] )
keyword[elif] identifier[state] == identifier[extensions] . identifier[POLL_READ] :
identifier[self] . identifier[_ioloop] . identifier[update_handler] ( identifier[fd] , identifier[ioloop] . identifier[IOLoop] . identifier[READ] )
keyword[elif] identifier[state] == identifier[extensions] . identifier[POLL_ERROR] :
identifier[self] . identifier[_ioloop] . identifier[remove_handler] ( identifier[fd] )
keyword[if] identifier[fd] keyword[in] identifier[self] . identifier[_futures] keyword[and] keyword[not] identifier[self] . identifier[_futures] [ identifier[fd] ]. identifier[done] ():
identifier[self] . identifier[_futures] [ identifier[fd] ]. identifier[set_exception] (
identifier[psycopg2] . identifier[Error] ( literal[string] )) | def _poll_connection(self, fd):
"""Check with psycopg2 to see what action to take. If the state is
POLL_OK, we should have a pending callback for that fd.
:param int fd: The socket fd for the postgresql connection
"""
try:
state = self._connections[fd].poll() # depends on [control=['try'], data=[]]
except (OSError, socket.error) as error:
self._ioloop.remove_handler(fd)
if fd in self._futures and (not self._futures[fd].done()):
self._futures[fd].set_exception(psycopg2.OperationalError('Connection error (%s)' % error)) # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['error']]
except (psycopg2.Error, psycopg2.Warning) as error:
if fd in self._futures and (not self._futures[fd].done()):
self._futures[fd].set_exception(error) # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['error']]
else:
if state == extensions.POLL_OK:
if fd in self._futures and (not self._futures[fd].done()):
self._futures[fd].set_result(True) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif state == extensions.POLL_WRITE:
self._ioloop.update_handler(fd, ioloop.IOLoop.WRITE) # depends on [control=['if'], data=[]]
elif state == extensions.POLL_READ:
self._ioloop.update_handler(fd, ioloop.IOLoop.READ) # depends on [control=['if'], data=[]]
elif state == extensions.POLL_ERROR:
self._ioloop.remove_handler(fd)
if fd in self._futures and (not self._futures[fd].done()):
self._futures[fd].set_exception(psycopg2.Error('Poll Error')) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def filter_user(user, using='records', interaction=None,
part_of_week='allweek', part_of_day='allday'):
"""
Filter records of a User objects by interaction, part of week and day.
Parameters
----------
user : User
a bandicoot User object
type : str, default 'records'
'records' or 'recharges'
part_of_week : {'allweek', 'weekday', 'weekend'}, default 'allweek'
* 'weekend': keep only the weekend records
* 'weekday': keep only the weekdays records
* 'allweek': use all the records
part_of_day : {'allday', 'day', 'night'}, default 'allday'
* 'day': keep only the records during the day
* 'night': keep only the records during the night
* 'allday': use all the records
interaction : object
The interaction to filter records:
* "callandtext", for only callandtext;
* a string, to filter for one type;
* None, to use all records.
"""
if using == 'recharges':
records = user.recharges
else:
records = user.records
if interaction == 'callandtext':
records = filter(
lambda r: r.interaction in ['call', 'text'], records)
elif interaction is not None:
records = filter(lambda r: r.interaction == interaction, records)
if part_of_week == 'weekday':
records = filter(
lambda r: r.datetime.isoweekday() not in user.weekend, records)
elif part_of_week == 'weekend':
records = filter(
lambda r: r.datetime.isoweekday() in user.weekend, records)
elif part_of_week != 'allweek':
raise KeyError(
"{} is not a valid value for part_of_week. it should be 'weekday', "
"'weekend' or 'allweek'.".format(part_of_week))
if user.night_start < user.night_end:
night_filter = lambda r: user.night_end > r.datetime.time(
) > user.night_start
else:
night_filter = lambda r: not(
user.night_end < r.datetime.time() < user.night_start)
if part_of_day == 'day':
records = filter(lambda r: not(night_filter(r)), records)
elif part_of_day == 'night':
records = filter(night_filter, records)
elif part_of_day != 'allday':
raise KeyError(
"{} is not a valid value for part_of_day. It should be 'day', 'night' or 'allday'.".format(part_of_day))
return list(records) | def function[filter_user, parameter[user, using, interaction, part_of_week, part_of_day]]:
constant[
Filter records of a User objects by interaction, part of week and day.
Parameters
----------
user : User
a bandicoot User object
type : str, default 'records'
'records' or 'recharges'
part_of_week : {'allweek', 'weekday', 'weekend'}, default 'allweek'
* 'weekend': keep only the weekend records
* 'weekday': keep only the weekdays records
* 'allweek': use all the records
part_of_day : {'allday', 'day', 'night'}, default 'allday'
* 'day': keep only the records during the day
* 'night': keep only the records during the night
* 'allday': use all the records
interaction : object
The interaction to filter records:
* "callandtext", for only callandtext;
* a string, to filter for one type;
* None, to use all records.
]
if compare[name[using] equal[==] constant[recharges]] begin[:]
variable[records] assign[=] name[user].recharges
if compare[name[part_of_week] equal[==] constant[weekday]] begin[:]
variable[records] assign[=] call[name[filter], parameter[<ast.Lambda object at 0x7da1b0d63550>, name[records]]]
if compare[name[user].night_start less[<] name[user].night_end] begin[:]
variable[night_filter] assign[=] <ast.Lambda object at 0x7da1b0d603a0>
if compare[name[part_of_day] equal[==] constant[day]] begin[:]
variable[records] assign[=] call[name[filter], parameter[<ast.Lambda object at 0x7da207f01390>, name[records]]]
return[call[name[list], parameter[name[records]]]] | keyword[def] identifier[filter_user] ( identifier[user] , identifier[using] = literal[string] , identifier[interaction] = keyword[None] ,
identifier[part_of_week] = literal[string] , identifier[part_of_day] = literal[string] ):
literal[string]
keyword[if] identifier[using] == literal[string] :
identifier[records] = identifier[user] . identifier[recharges]
keyword[else] :
identifier[records] = identifier[user] . identifier[records]
keyword[if] identifier[interaction] == literal[string] :
identifier[records] = identifier[filter] (
keyword[lambda] identifier[r] : identifier[r] . identifier[interaction] keyword[in] [ literal[string] , literal[string] ], identifier[records] )
keyword[elif] identifier[interaction] keyword[is] keyword[not] keyword[None] :
identifier[records] = identifier[filter] ( keyword[lambda] identifier[r] : identifier[r] . identifier[interaction] == identifier[interaction] , identifier[records] )
keyword[if] identifier[part_of_week] == literal[string] :
identifier[records] = identifier[filter] (
keyword[lambda] identifier[r] : identifier[r] . identifier[datetime] . identifier[isoweekday] () keyword[not] keyword[in] identifier[user] . identifier[weekend] , identifier[records] )
keyword[elif] identifier[part_of_week] == literal[string] :
identifier[records] = identifier[filter] (
keyword[lambda] identifier[r] : identifier[r] . identifier[datetime] . identifier[isoweekday] () keyword[in] identifier[user] . identifier[weekend] , identifier[records] )
keyword[elif] identifier[part_of_week] != literal[string] :
keyword[raise] identifier[KeyError] (
literal[string]
literal[string] . identifier[format] ( identifier[part_of_week] ))
keyword[if] identifier[user] . identifier[night_start] < identifier[user] . identifier[night_end] :
identifier[night_filter] = keyword[lambda] identifier[r] : identifier[user] . identifier[night_end] > identifier[r] . identifier[datetime] . identifier[time] (
)> identifier[user] . identifier[night_start]
keyword[else] :
identifier[night_filter] = keyword[lambda] identifier[r] : keyword[not] (
identifier[user] . identifier[night_end] < identifier[r] . identifier[datetime] . identifier[time] ()< identifier[user] . identifier[night_start] )
keyword[if] identifier[part_of_day] == literal[string] :
identifier[records] = identifier[filter] ( keyword[lambda] identifier[r] : keyword[not] ( identifier[night_filter] ( identifier[r] )), identifier[records] )
keyword[elif] identifier[part_of_day] == literal[string] :
identifier[records] = identifier[filter] ( identifier[night_filter] , identifier[records] )
keyword[elif] identifier[part_of_day] != literal[string] :
keyword[raise] identifier[KeyError] (
literal[string] . identifier[format] ( identifier[part_of_day] ))
keyword[return] identifier[list] ( identifier[records] ) | def filter_user(user, using='records', interaction=None, part_of_week='allweek', part_of_day='allday'):
"""
Filter records of a User objects by interaction, part of week and day.
Parameters
----------
user : User
a bandicoot User object
type : str, default 'records'
'records' or 'recharges'
part_of_week : {'allweek', 'weekday', 'weekend'}, default 'allweek'
* 'weekend': keep only the weekend records
* 'weekday': keep only the weekdays records
* 'allweek': use all the records
part_of_day : {'allday', 'day', 'night'}, default 'allday'
* 'day': keep only the records during the day
* 'night': keep only the records during the night
* 'allday': use all the records
interaction : object
The interaction to filter records:
* "callandtext", for only callandtext;
* a string, to filter for one type;
* None, to use all records.
"""
if using == 'recharges':
records = user.recharges # depends on [control=['if'], data=[]]
else:
records = user.records
if interaction == 'callandtext':
records = filter(lambda r: r.interaction in ['call', 'text'], records) # depends on [control=['if'], data=[]]
elif interaction is not None:
records = filter(lambda r: r.interaction == interaction, records) # depends on [control=['if'], data=['interaction']]
if part_of_week == 'weekday':
records = filter(lambda r: r.datetime.isoweekday() not in user.weekend, records) # depends on [control=['if'], data=[]]
elif part_of_week == 'weekend':
records = filter(lambda r: r.datetime.isoweekday() in user.weekend, records) # depends on [control=['if'], data=[]]
elif part_of_week != 'allweek':
raise KeyError("{} is not a valid value for part_of_week. it should be 'weekday', 'weekend' or 'allweek'.".format(part_of_week)) # depends on [control=['if'], data=['part_of_week']]
if user.night_start < user.night_end:
night_filter = lambda r: user.night_end > r.datetime.time() > user.night_start # depends on [control=['if'], data=[]]
else:
night_filter = lambda r: not user.night_end < r.datetime.time() < user.night_start
if part_of_day == 'day':
records = filter(lambda r: not night_filter(r), records) # depends on [control=['if'], data=[]]
elif part_of_day == 'night':
records = filter(night_filter, records) # depends on [control=['if'], data=[]]
elif part_of_day != 'allday':
raise KeyError("{} is not a valid value for part_of_day. It should be 'day', 'night' or 'allday'.".format(part_of_day)) # depends on [control=['if'], data=['part_of_day']]
return list(records) |
def parse(self):
""" parse the data
"""
# convert the xlsx file to csv first
delimiter = "|"
csv_file = self.xlsx_to_csv(self.getInputFile(), delimiter=delimiter)
reader = csv.DictReader(csv_file, delimiter=delimiter)
for n, row in enumerate(reader):
resid = row.get("SampleID", None)
serial = row.get("SerialNumber", None)
# Convert empty values as "Invalid"
value = row.get("Value", None) or "Invalid"
# no resid and no serial
if not any([resid, serial]):
self.err("Result identification not found.", numline=n)
continue
rawdict = row
rawdict["Value"] = value.rstrip(" cps/ml")
rawdict['DefaultResult'] = 'Value'
# HEALTH-567 correction factor for calculation
# XXX HEALTH-567 Is this just for nmrl?
if 'Plasma' in rawdict.get('Matrix', 'Other'):
rawdict['CF'] = 1 # report value as-is
else:
rawdict['CF'] = 1.82 # report value * 1.82
key = resid or serial
testname = row.get("Product", "EasyQDirector")
self._addRawResult(key, {testname: rawdict}, False) | def function[parse, parameter[self]]:
constant[ parse the data
]
variable[delimiter] assign[=] constant[|]
variable[csv_file] assign[=] call[name[self].xlsx_to_csv, parameter[call[name[self].getInputFile, parameter[]]]]
variable[reader] assign[=] call[name[csv].DictReader, parameter[name[csv_file]]]
for taget[tuple[[<ast.Name object at 0x7da1b2344100>, <ast.Name object at 0x7da1b2344c70>]]] in starred[call[name[enumerate], parameter[name[reader]]]] begin[:]
variable[resid] assign[=] call[name[row].get, parameter[constant[SampleID], constant[None]]]
variable[serial] assign[=] call[name[row].get, parameter[constant[SerialNumber], constant[None]]]
variable[value] assign[=] <ast.BoolOp object at 0x7da1b2346b60>
if <ast.UnaryOp object at 0x7da1b23462f0> begin[:]
call[name[self].err, parameter[constant[Result identification not found.]]]
continue
variable[rawdict] assign[=] name[row]
call[name[rawdict]][constant[Value]] assign[=] call[name[value].rstrip, parameter[constant[ cps/ml]]]
call[name[rawdict]][constant[DefaultResult]] assign[=] constant[Value]
if compare[constant[Plasma] in call[name[rawdict].get, parameter[constant[Matrix], constant[Other]]]] begin[:]
call[name[rawdict]][constant[CF]] assign[=] constant[1]
variable[key] assign[=] <ast.BoolOp object at 0x7da1b1d4aa40>
variable[testname] assign[=] call[name[row].get, parameter[constant[Product], constant[EasyQDirector]]]
call[name[self]._addRawResult, parameter[name[key], dictionary[[<ast.Name object at 0x7da1b1d4ad10>], [<ast.Name object at 0x7da1b1d4ac80>]], constant[False]]] | keyword[def] identifier[parse] ( identifier[self] ):
literal[string]
identifier[delimiter] = literal[string]
identifier[csv_file] = identifier[self] . identifier[xlsx_to_csv] ( identifier[self] . identifier[getInputFile] (), identifier[delimiter] = identifier[delimiter] )
identifier[reader] = identifier[csv] . identifier[DictReader] ( identifier[csv_file] , identifier[delimiter] = identifier[delimiter] )
keyword[for] identifier[n] , identifier[row] keyword[in] identifier[enumerate] ( identifier[reader] ):
identifier[resid] = identifier[row] . identifier[get] ( literal[string] , keyword[None] )
identifier[serial] = identifier[row] . identifier[get] ( literal[string] , keyword[None] )
identifier[value] = identifier[row] . identifier[get] ( literal[string] , keyword[None] ) keyword[or] literal[string]
keyword[if] keyword[not] identifier[any] ([ identifier[resid] , identifier[serial] ]):
identifier[self] . identifier[err] ( literal[string] , identifier[numline] = identifier[n] )
keyword[continue]
identifier[rawdict] = identifier[row]
identifier[rawdict] [ literal[string] ]= identifier[value] . identifier[rstrip] ( literal[string] )
identifier[rawdict] [ literal[string] ]= literal[string]
keyword[if] literal[string] keyword[in] identifier[rawdict] . identifier[get] ( literal[string] , literal[string] ):
identifier[rawdict] [ literal[string] ]= literal[int]
keyword[else] :
identifier[rawdict] [ literal[string] ]= literal[int]
identifier[key] = identifier[resid] keyword[or] identifier[serial]
identifier[testname] = identifier[row] . identifier[get] ( literal[string] , literal[string] )
identifier[self] . identifier[_addRawResult] ( identifier[key] ,{ identifier[testname] : identifier[rawdict] }, keyword[False] ) | def parse(self):
""" parse the data
"""
# convert the xlsx file to csv first
delimiter = '|'
csv_file = self.xlsx_to_csv(self.getInputFile(), delimiter=delimiter)
reader = csv.DictReader(csv_file, delimiter=delimiter)
for (n, row) in enumerate(reader):
resid = row.get('SampleID', None)
serial = row.get('SerialNumber', None)
# Convert empty values as "Invalid"
value = row.get('Value', None) or 'Invalid'
# no resid and no serial
if not any([resid, serial]):
self.err('Result identification not found.', numline=n)
continue # depends on [control=['if'], data=[]]
rawdict = row
rawdict['Value'] = value.rstrip(' cps/ml')
rawdict['DefaultResult'] = 'Value'
# HEALTH-567 correction factor for calculation
# XXX HEALTH-567 Is this just for nmrl?
if 'Plasma' in rawdict.get('Matrix', 'Other'):
rawdict['CF'] = 1 # report value as-is # depends on [control=['if'], data=[]]
else:
rawdict['CF'] = 1.82 # report value * 1.82
key = resid or serial
testname = row.get('Product', 'EasyQDirector')
self._addRawResult(key, {testname: rawdict}, False) # depends on [control=['for'], data=[]] |
def _run(cmd):
'''
Just a convenience function for ``__salt__['cmd.run_all'](cmd)``
'''
return __salt__['cmd.run_all'](cmd, env={'HOME': os.path.expanduser('~{0}'.format(__opts__['user']))}) | def function[_run, parameter[cmd]]:
constant[
Just a convenience function for ``__salt__['cmd.run_all'](cmd)``
]
return[call[call[name[__salt__]][constant[cmd.run_all]], parameter[name[cmd]]]] | keyword[def] identifier[_run] ( identifier[cmd] ):
literal[string]
keyword[return] identifier[__salt__] [ literal[string] ]( identifier[cmd] , identifier[env] ={ literal[string] : identifier[os] . identifier[path] . identifier[expanduser] ( literal[string] . identifier[format] ( identifier[__opts__] [ literal[string] ]))}) | def _run(cmd):
"""
Just a convenience function for ``__salt__['cmd.run_all'](cmd)``
"""
return __salt__['cmd.run_all'](cmd, env={'HOME': os.path.expanduser('~{0}'.format(__opts__['user']))}) |
def set(self):
"""Set the hook."""
if sys.displayhook is not self.hook:
self.old_hook = sys.displayhook
sys.displayhook = self.hook | def function[set, parameter[self]]:
constant[Set the hook.]
if compare[name[sys].displayhook is_not name[self].hook] begin[:]
name[self].old_hook assign[=] name[sys].displayhook
name[sys].displayhook assign[=] name[self].hook | keyword[def] identifier[set] ( identifier[self] ):
literal[string]
keyword[if] identifier[sys] . identifier[displayhook] keyword[is] keyword[not] identifier[self] . identifier[hook] :
identifier[self] . identifier[old_hook] = identifier[sys] . identifier[displayhook]
identifier[sys] . identifier[displayhook] = identifier[self] . identifier[hook] | def set(self):
"""Set the hook."""
if sys.displayhook is not self.hook:
self.old_hook = sys.displayhook
sys.displayhook = self.hook # depends on [control=['if'], data=[]] |
def syllabify(word):
'''Syllabify the given word, whether simplex or complex.'''
word = split(word) # detect any non-delimited compounds
compound = True if re.search(r'-| |\.', word) else False
syllabify = _syllabify_compound if compound else _syllabify
syll, rules = syllabify(word)
yield syll, rules
n = 3
if 'T4' in rules:
yield syllabify(word, T4=False)
n -= 1
if 'e' in rules:
yield syllabify(word, T1E=False)
n -= 1
if 'e' in rules and 'T4' in rules:
yield syllabify(word, T4=False, T1E=False)
n -= 1
# yield empty syllabifications and rules
for n in range(7):
yield '', '' | def function[syllabify, parameter[word]]:
constant[Syllabify the given word, whether simplex or complex.]
variable[word] assign[=] call[name[split], parameter[name[word]]]
variable[compound] assign[=] <ast.IfExp object at 0x7da1b11d3250>
variable[syllabify] assign[=] <ast.IfExp object at 0x7da1b11d3b20>
<ast.Tuple object at 0x7da1b11d2b00> assign[=] call[name[syllabify], parameter[name[word]]]
<ast.Yield object at 0x7da1b0f1c5e0>
variable[n] assign[=] constant[3]
if compare[constant[T4] in name[rules]] begin[:]
<ast.Yield object at 0x7da1b11a3cd0>
<ast.AugAssign object at 0x7da1b11a2200>
if compare[constant[e] in name[rules]] begin[:]
<ast.Yield object at 0x7da1b11a0cd0>
<ast.AugAssign object at 0x7da1b11a2f20>
if <ast.BoolOp object at 0x7da1b11a22c0> begin[:]
<ast.Yield object at 0x7da1b11a0d90>
<ast.AugAssign object at 0x7da1b1178580>
for taget[name[n]] in starred[call[name[range], parameter[constant[7]]]] begin[:]
<ast.Yield object at 0x7da1b117b760> | keyword[def] identifier[syllabify] ( identifier[word] ):
literal[string]
identifier[word] = identifier[split] ( identifier[word] )
identifier[compound] = keyword[True] keyword[if] identifier[re] . identifier[search] ( literal[string] , identifier[word] ) keyword[else] keyword[False]
identifier[syllabify] = identifier[_syllabify_compound] keyword[if] identifier[compound] keyword[else] identifier[_syllabify]
identifier[syll] , identifier[rules] = identifier[syllabify] ( identifier[word] )
keyword[yield] identifier[syll] , identifier[rules]
identifier[n] = literal[int]
keyword[if] literal[string] keyword[in] identifier[rules] :
keyword[yield] identifier[syllabify] ( identifier[word] , identifier[T4] = keyword[False] )
identifier[n] -= literal[int]
keyword[if] literal[string] keyword[in] identifier[rules] :
keyword[yield] identifier[syllabify] ( identifier[word] , identifier[T1E] = keyword[False] )
identifier[n] -= literal[int]
keyword[if] literal[string] keyword[in] identifier[rules] keyword[and] literal[string] keyword[in] identifier[rules] :
keyword[yield] identifier[syllabify] ( identifier[word] , identifier[T4] = keyword[False] , identifier[T1E] = keyword[False] )
identifier[n] -= literal[int]
keyword[for] identifier[n] keyword[in] identifier[range] ( literal[int] ):
keyword[yield] literal[string] , literal[string] | def syllabify(word):
"""Syllabify the given word, whether simplex or complex."""
word = split(word) # detect any non-delimited compounds
compound = True if re.search('-| |\\.', word) else False
syllabify = _syllabify_compound if compound else _syllabify
(syll, rules) = syllabify(word)
yield (syll, rules)
n = 3
if 'T4' in rules:
yield syllabify(word, T4=False)
n -= 1 # depends on [control=['if'], data=[]]
if 'e' in rules:
yield syllabify(word, T1E=False)
n -= 1 # depends on [control=['if'], data=[]]
if 'e' in rules and 'T4' in rules:
yield syllabify(word, T4=False, T1E=False)
n -= 1 # depends on [control=['if'], data=[]]
# yield empty syllabifications and rules
for n in range(7):
yield ('', '') # depends on [control=['for'], data=[]] |
def u_probs(self):
"""Probability P(x_i==1|Non-match) as described in the FS framework."""
log_u = self.kernel.feature_log_prob_[self._nonmatch_class_pos()]
return self._prob_inverse_transform(numpy.exp(log_u)) | def function[u_probs, parameter[self]]:
constant[Probability P(x_i==1|Non-match) as described in the FS framework.]
variable[log_u] assign[=] call[name[self].kernel.feature_log_prob_][call[name[self]._nonmatch_class_pos, parameter[]]]
return[call[name[self]._prob_inverse_transform, parameter[call[name[numpy].exp, parameter[name[log_u]]]]]] | keyword[def] identifier[u_probs] ( identifier[self] ):
literal[string]
identifier[log_u] = identifier[self] . identifier[kernel] . identifier[feature_log_prob_] [ identifier[self] . identifier[_nonmatch_class_pos] ()]
keyword[return] identifier[self] . identifier[_prob_inverse_transform] ( identifier[numpy] . identifier[exp] ( identifier[log_u] )) | def u_probs(self):
"""Probability P(x_i==1|Non-match) as described in the FS framework."""
log_u = self.kernel.feature_log_prob_[self._nonmatch_class_pos()]
return self._prob_inverse_transform(numpy.exp(log_u)) |
def ms_to_times(ms):
"""
Convert milliseconds to normalized tuple (h, m, s, ms).
Arguments:
ms: Number of milliseconds (may be int, float or other numeric class).
Should be non-negative.
Returns:
Named tuple (h, m, s, ms) of ints.
Invariants: ``ms in range(1000) and s in range(60) and m in range(60)``
"""
ms = int(round(ms))
h, ms = divmod(ms, 3600000)
m, ms = divmod(ms, 60000)
s, ms = divmod(ms, 1000)
return Times(h, m, s, ms) | def function[ms_to_times, parameter[ms]]:
constant[
Convert milliseconds to normalized tuple (h, m, s, ms).
Arguments:
ms: Number of milliseconds (may be int, float or other numeric class).
Should be non-negative.
Returns:
Named tuple (h, m, s, ms) of ints.
Invariants: ``ms in range(1000) and s in range(60) and m in range(60)``
]
variable[ms] assign[=] call[name[int], parameter[call[name[round], parameter[name[ms]]]]]
<ast.Tuple object at 0x7da204567190> assign[=] call[name[divmod], parameter[name[ms], constant[3600000]]]
<ast.Tuple object at 0x7da204565990> assign[=] call[name[divmod], parameter[name[ms], constant[60000]]]
<ast.Tuple object at 0x7da204564f10> assign[=] call[name[divmod], parameter[name[ms], constant[1000]]]
return[call[name[Times], parameter[name[h], name[m], name[s], name[ms]]]] | keyword[def] identifier[ms_to_times] ( identifier[ms] ):
literal[string]
identifier[ms] = identifier[int] ( identifier[round] ( identifier[ms] ))
identifier[h] , identifier[ms] = identifier[divmod] ( identifier[ms] , literal[int] )
identifier[m] , identifier[ms] = identifier[divmod] ( identifier[ms] , literal[int] )
identifier[s] , identifier[ms] = identifier[divmod] ( identifier[ms] , literal[int] )
keyword[return] identifier[Times] ( identifier[h] , identifier[m] , identifier[s] , identifier[ms] ) | def ms_to_times(ms):
"""
Convert milliseconds to normalized tuple (h, m, s, ms).
Arguments:
ms: Number of milliseconds (may be int, float or other numeric class).
Should be non-negative.
Returns:
Named tuple (h, m, s, ms) of ints.
Invariants: ``ms in range(1000) and s in range(60) and m in range(60)``
"""
ms = int(round(ms))
(h, ms) = divmod(ms, 3600000)
(m, ms) = divmod(ms, 60000)
(s, ms) = divmod(ms, 1000)
return Times(h, m, s, ms) |
def acc_difference(points):
""" Computes the accelaration difference between each adjacent point
Args:
points (:obj:`Point`)
Returns:
:obj:`list` of int: Indexes of changepoints
"""
data = [0]
for before, after in pairwise(points):
data.append(before.acc - after.acc)
return data | def function[acc_difference, parameter[points]]:
constant[ Computes the accelaration difference between each adjacent point
Args:
points (:obj:`Point`)
Returns:
:obj:`list` of int: Indexes of changepoints
]
variable[data] assign[=] list[[<ast.Constant object at 0x7da1b040acb0>]]
for taget[tuple[[<ast.Name object at 0x7da1b0409ab0>, <ast.Name object at 0x7da1b04099c0>]]] in starred[call[name[pairwise], parameter[name[points]]]] begin[:]
call[name[data].append, parameter[binary_operation[name[before].acc - name[after].acc]]]
return[name[data]] | keyword[def] identifier[acc_difference] ( identifier[points] ):
literal[string]
identifier[data] =[ literal[int] ]
keyword[for] identifier[before] , identifier[after] keyword[in] identifier[pairwise] ( identifier[points] ):
identifier[data] . identifier[append] ( identifier[before] . identifier[acc] - identifier[after] . identifier[acc] )
keyword[return] identifier[data] | def acc_difference(points):
""" Computes the accelaration difference between each adjacent point
Args:
points (:obj:`Point`)
Returns:
:obj:`list` of int: Indexes of changepoints
"""
data = [0]
for (before, after) in pairwise(points):
data.append(before.acc - after.acc) # depends on [control=['for'], data=[]]
return data |
def _baton_json_to_irods_entities(self, entities_as_baton_json: List[Dict]) -> List[EntityType]:
"""
Converts the baton representation of multiple iRODS entities to a list of `EntityType` models.
:param entities_as_baton_json: the baton serialization representation of the entities
:return: the equivalent models
"""
assert(isinstance(entities_as_baton_json, list))
entities = []
for file_as_baton_json in entities_as_baton_json:
entity = self._baton_json_to_irods_entity(file_as_baton_json)
entities.append(entity)
return entities | def function[_baton_json_to_irods_entities, parameter[self, entities_as_baton_json]]:
constant[
Converts the baton representation of multiple iRODS entities to a list of `EntityType` models.
:param entities_as_baton_json: the baton serialization representation of the entities
:return: the equivalent models
]
assert[call[name[isinstance], parameter[name[entities_as_baton_json], name[list]]]]
variable[entities] assign[=] list[[]]
for taget[name[file_as_baton_json]] in starred[name[entities_as_baton_json]] begin[:]
variable[entity] assign[=] call[name[self]._baton_json_to_irods_entity, parameter[name[file_as_baton_json]]]
call[name[entities].append, parameter[name[entity]]]
return[name[entities]] | keyword[def] identifier[_baton_json_to_irods_entities] ( identifier[self] , identifier[entities_as_baton_json] : identifier[List] [ identifier[Dict] ])-> identifier[List] [ identifier[EntityType] ]:
literal[string]
keyword[assert] ( identifier[isinstance] ( identifier[entities_as_baton_json] , identifier[list] ))
identifier[entities] =[]
keyword[for] identifier[file_as_baton_json] keyword[in] identifier[entities_as_baton_json] :
identifier[entity] = identifier[self] . identifier[_baton_json_to_irods_entity] ( identifier[file_as_baton_json] )
identifier[entities] . identifier[append] ( identifier[entity] )
keyword[return] identifier[entities] | def _baton_json_to_irods_entities(self, entities_as_baton_json: List[Dict]) -> List[EntityType]:
"""
Converts the baton representation of multiple iRODS entities to a list of `EntityType` models.
:param entities_as_baton_json: the baton serialization representation of the entities
:return: the equivalent models
"""
assert isinstance(entities_as_baton_json, list)
entities = []
for file_as_baton_json in entities_as_baton_json:
entity = self._baton_json_to_irods_entity(file_as_baton_json)
entities.append(entity) # depends on [control=['for'], data=['file_as_baton_json']]
return entities |
def _sync_with_file(self):
"""Clear in-memory structures so table is synced with the file."""
self._records = []
i = -1
for i, line in self._enum_lines():
self._records.append(None)
self._last_synced_index = i | def function[_sync_with_file, parameter[self]]:
constant[Clear in-memory structures so table is synced with the file.]
name[self]._records assign[=] list[[]]
variable[i] assign[=] <ast.UnaryOp object at 0x7da18f58d360>
for taget[tuple[[<ast.Name object at 0x7da18f58cca0>, <ast.Name object at 0x7da18f58c2b0>]]] in starred[call[name[self]._enum_lines, parameter[]]] begin[:]
call[name[self]._records.append, parameter[constant[None]]]
name[self]._last_synced_index assign[=] name[i] | keyword[def] identifier[_sync_with_file] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_records] =[]
identifier[i] =- literal[int]
keyword[for] identifier[i] , identifier[line] keyword[in] identifier[self] . identifier[_enum_lines] ():
identifier[self] . identifier[_records] . identifier[append] ( keyword[None] )
identifier[self] . identifier[_last_synced_index] = identifier[i] | def _sync_with_file(self):
"""Clear in-memory structures so table is synced with the file."""
self._records = []
i = -1
for (i, line) in self._enum_lines():
self._records.append(None) # depends on [control=['for'], data=[]]
self._last_synced_index = i |
def make_tuple(obj, cast=True):
"""
Converts an object *obj* to a tuple and returns it. Objects of types *list* and *set* are
converted if *cast* is *True*. Otherwise, and for all other types, *obj* is put in a new tuple.
"""
if isinstance(obj, tuple):
return tuple(obj)
elif is_lazy_iterable(obj):
return tuple(obj)
elif isinstance(obj, (list, set)) and cast:
return tuple(obj)
else:
return (obj,) | def function[make_tuple, parameter[obj, cast]]:
constant[
Converts an object *obj* to a tuple and returns it. Objects of types *list* and *set* are
converted if *cast* is *True*. Otherwise, and for all other types, *obj* is put in a new tuple.
]
if call[name[isinstance], parameter[name[obj], name[tuple]]] begin[:]
return[call[name[tuple], parameter[name[obj]]]] | keyword[def] identifier[make_tuple] ( identifier[obj] , identifier[cast] = keyword[True] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[tuple] ):
keyword[return] identifier[tuple] ( identifier[obj] )
keyword[elif] identifier[is_lazy_iterable] ( identifier[obj] ):
keyword[return] identifier[tuple] ( identifier[obj] )
keyword[elif] identifier[isinstance] ( identifier[obj] ,( identifier[list] , identifier[set] )) keyword[and] identifier[cast] :
keyword[return] identifier[tuple] ( identifier[obj] )
keyword[else] :
keyword[return] ( identifier[obj] ,) | def make_tuple(obj, cast=True):
"""
Converts an object *obj* to a tuple and returns it. Objects of types *list* and *set* are
converted if *cast* is *True*. Otherwise, and for all other types, *obj* is put in a new tuple.
"""
if isinstance(obj, tuple):
return tuple(obj) # depends on [control=['if'], data=[]]
elif is_lazy_iterable(obj):
return tuple(obj) # depends on [control=['if'], data=[]]
elif isinstance(obj, (list, set)) and cast:
return tuple(obj) # depends on [control=['if'], data=[]]
else:
return (obj,) |
def to_protobuf(self) -> str:
'''encode vote into protobuf'''
vote = pavoteproto.Vote()
vote.version = self.version
vote.description = self.description
vote.count_mode = vote.MODE.Value(self.count_mode)
vote.start_block = self.start_block
vote.end_block = self.end_block
vote.choices.extend(self.choices)
if not isinstance(self.vote_metainfo, bytes):
vote.vote_metainfo = self.vote_metainfo.encode()
else:
vote.vote_metainfo = self.vote_metainfo
proto = vote.SerializeToString()
if len(proto) > 80:
warnings.warn('\nMetainfo size exceeds maximum of 80 bytes allowed by OP_RETURN.')
return proto | def function[to_protobuf, parameter[self]]:
constant[encode vote into protobuf]
variable[vote] assign[=] call[name[pavoteproto].Vote, parameter[]]
name[vote].version assign[=] name[self].version
name[vote].description assign[=] name[self].description
name[vote].count_mode assign[=] call[name[vote].MODE.Value, parameter[name[self].count_mode]]
name[vote].start_block assign[=] name[self].start_block
name[vote].end_block assign[=] name[self].end_block
call[name[vote].choices.extend, parameter[name[self].choices]]
if <ast.UnaryOp object at 0x7da18f00d240> begin[:]
name[vote].vote_metainfo assign[=] call[name[self].vote_metainfo.encode, parameter[]]
variable[proto] assign[=] call[name[vote].SerializeToString, parameter[]]
if compare[call[name[len], parameter[name[proto]]] greater[>] constant[80]] begin[:]
call[name[warnings].warn, parameter[constant[
Metainfo size exceeds maximum of 80 bytes allowed by OP_RETURN.]]]
return[name[proto]] | keyword[def] identifier[to_protobuf] ( identifier[self] )-> identifier[str] :
literal[string]
identifier[vote] = identifier[pavoteproto] . identifier[Vote] ()
identifier[vote] . identifier[version] = identifier[self] . identifier[version]
identifier[vote] . identifier[description] = identifier[self] . identifier[description]
identifier[vote] . identifier[count_mode] = identifier[vote] . identifier[MODE] . identifier[Value] ( identifier[self] . identifier[count_mode] )
identifier[vote] . identifier[start_block] = identifier[self] . identifier[start_block]
identifier[vote] . identifier[end_block] = identifier[self] . identifier[end_block]
identifier[vote] . identifier[choices] . identifier[extend] ( identifier[self] . identifier[choices] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[self] . identifier[vote_metainfo] , identifier[bytes] ):
identifier[vote] . identifier[vote_metainfo] = identifier[self] . identifier[vote_metainfo] . identifier[encode] ()
keyword[else] :
identifier[vote] . identifier[vote_metainfo] = identifier[self] . identifier[vote_metainfo]
identifier[proto] = identifier[vote] . identifier[SerializeToString] ()
keyword[if] identifier[len] ( identifier[proto] )> literal[int] :
identifier[warnings] . identifier[warn] ( literal[string] )
keyword[return] identifier[proto] | def to_protobuf(self) -> str:
"""encode vote into protobuf"""
vote = pavoteproto.Vote()
vote.version = self.version
vote.description = self.description
vote.count_mode = vote.MODE.Value(self.count_mode)
vote.start_block = self.start_block
vote.end_block = self.end_block
vote.choices.extend(self.choices)
if not isinstance(self.vote_metainfo, bytes):
vote.vote_metainfo = self.vote_metainfo.encode() # depends on [control=['if'], data=[]]
else:
vote.vote_metainfo = self.vote_metainfo
proto = vote.SerializeToString()
if len(proto) > 80:
warnings.warn('\nMetainfo size exceeds maximum of 80 bytes allowed by OP_RETURN.') # depends on [control=['if'], data=[]]
return proto |
async def proposal(self):
"""Get the proposal in question.
Actually just the first proposal with the same name, but the
chance of a collision is tiny.
Returns
-------
awaitable of :class:`aionationstates.Proposal`
The proposal submitted.
Raises
------
aionationstates.NotFound
If the proposal has since been withdrawn or promoted.
"""
proposals = await aionationstates.wa.proposals()
for proposal in proposals:
if (proposal.name == self.proposal_name):
return proposal
raise aionationstates.NotFound | <ast.AsyncFunctionDef object at 0x7da1b2749b70> | keyword[async] keyword[def] identifier[proposal] ( identifier[self] ):
literal[string]
identifier[proposals] = keyword[await] identifier[aionationstates] . identifier[wa] . identifier[proposals] ()
keyword[for] identifier[proposal] keyword[in] identifier[proposals] :
keyword[if] ( identifier[proposal] . identifier[name] == identifier[self] . identifier[proposal_name] ):
keyword[return] identifier[proposal]
keyword[raise] identifier[aionationstates] . identifier[NotFound] | async def proposal(self):
"""Get the proposal in question.
Actually just the first proposal with the same name, but the
chance of a collision is tiny.
Returns
-------
awaitable of :class:`aionationstates.Proposal`
The proposal submitted.
Raises
------
aionationstates.NotFound
If the proposal has since been withdrawn or promoted.
"""
proposals = await aionationstates.wa.proposals()
for proposal in proposals:
if proposal.name == self.proposal_name:
return proposal # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['proposal']]
raise aionationstates.NotFound |
def _set_attribute(self, name, value):
"""Make sure namespace gets updated when setting attributes."""
setattr(self, name, value)
self.namespace.update({name: getattr(self, name)}) | def function[_set_attribute, parameter[self, name, value]]:
constant[Make sure namespace gets updated when setting attributes.]
call[name[setattr], parameter[name[self], name[name], name[value]]]
call[name[self].namespace.update, parameter[dictionary[[<ast.Name object at 0x7da1b1cf54b0>], [<ast.Call object at 0x7da1b1cf64a0>]]]] | keyword[def] identifier[_set_attribute] ( identifier[self] , identifier[name] , identifier[value] ):
literal[string]
identifier[setattr] ( identifier[self] , identifier[name] , identifier[value] )
identifier[self] . identifier[namespace] . identifier[update] ({ identifier[name] : identifier[getattr] ( identifier[self] , identifier[name] )}) | def _set_attribute(self, name, value):
"""Make sure namespace gets updated when setting attributes."""
setattr(self, name, value)
self.namespace.update({name: getattr(self, name)}) |
def load_by_pub_key(self, public_key):
"""
This method will load a SSHKey object from DigitalOcean
from a public_key. This method will avoid problems like
uploading the same public_key twice.
"""
data = self.get_data("account/keys/")
for jsoned in data['ssh_keys']:
if jsoned.get('public_key', "") == public_key:
self.id = jsoned['id']
self.load()
return self
return None | def function[load_by_pub_key, parameter[self, public_key]]:
constant[
This method will load a SSHKey object from DigitalOcean
from a public_key. This method will avoid problems like
uploading the same public_key twice.
]
variable[data] assign[=] call[name[self].get_data, parameter[constant[account/keys/]]]
for taget[name[jsoned]] in starred[call[name[data]][constant[ssh_keys]]] begin[:]
if compare[call[name[jsoned].get, parameter[constant[public_key], constant[]]] equal[==] name[public_key]] begin[:]
name[self].id assign[=] call[name[jsoned]][constant[id]]
call[name[self].load, parameter[]]
return[name[self]]
return[constant[None]] | keyword[def] identifier[load_by_pub_key] ( identifier[self] , identifier[public_key] ):
literal[string]
identifier[data] = identifier[self] . identifier[get_data] ( literal[string] )
keyword[for] identifier[jsoned] keyword[in] identifier[data] [ literal[string] ]:
keyword[if] identifier[jsoned] . identifier[get] ( literal[string] , literal[string] )== identifier[public_key] :
identifier[self] . identifier[id] = identifier[jsoned] [ literal[string] ]
identifier[self] . identifier[load] ()
keyword[return] identifier[self]
keyword[return] keyword[None] | def load_by_pub_key(self, public_key):
"""
This method will load a SSHKey object from DigitalOcean
from a public_key. This method will avoid problems like
uploading the same public_key twice.
"""
data = self.get_data('account/keys/')
for jsoned in data['ssh_keys']:
if jsoned.get('public_key', '') == public_key:
self.id = jsoned['id']
self.load()
return self # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['jsoned']]
return None |
def append_payment_op(self,
destination,
amount,
asset_code='XLM',
asset_issuer=None,
source=None):
"""Append a :class:`Payment <stellar_base.operation.Payment>` operation
to the list of operations.
:param str destination: Account address that receives the payment.
:param str amount: The amount of the currency to send in the payment.
:param str asset_code: The asset code for the asset to send.
:param asset_issuer: The address of the issuer of the asset.
:type asset_issuer: str, None
:param str source: The source address of the payment.
:return: This builder instance.
"""
asset = Asset(code=asset_code, issuer=asset_issuer)
op = operation.Payment(destination, asset, amount, source)
return self.append_op(op) | def function[append_payment_op, parameter[self, destination, amount, asset_code, asset_issuer, source]]:
constant[Append a :class:`Payment <stellar_base.operation.Payment>` operation
to the list of operations.
:param str destination: Account address that receives the payment.
:param str amount: The amount of the currency to send in the payment.
:param str asset_code: The asset code for the asset to send.
:param asset_issuer: The address of the issuer of the asset.
:type asset_issuer: str, None
:param str source: The source address of the payment.
:return: This builder instance.
]
variable[asset] assign[=] call[name[Asset], parameter[]]
variable[op] assign[=] call[name[operation].Payment, parameter[name[destination], name[asset], name[amount], name[source]]]
return[call[name[self].append_op, parameter[name[op]]]] | keyword[def] identifier[append_payment_op] ( identifier[self] ,
identifier[destination] ,
identifier[amount] ,
identifier[asset_code] = literal[string] ,
identifier[asset_issuer] = keyword[None] ,
identifier[source] = keyword[None] ):
literal[string]
identifier[asset] = identifier[Asset] ( identifier[code] = identifier[asset_code] , identifier[issuer] = identifier[asset_issuer] )
identifier[op] = identifier[operation] . identifier[Payment] ( identifier[destination] , identifier[asset] , identifier[amount] , identifier[source] )
keyword[return] identifier[self] . identifier[append_op] ( identifier[op] ) | def append_payment_op(self, destination, amount, asset_code='XLM', asset_issuer=None, source=None):
"""Append a :class:`Payment <stellar_base.operation.Payment>` operation
to the list of operations.
:param str destination: Account address that receives the payment.
:param str amount: The amount of the currency to send in the payment.
:param str asset_code: The asset code for the asset to send.
:param asset_issuer: The address of the issuer of the asset.
:type asset_issuer: str, None
:param str source: The source address of the payment.
:return: This builder instance.
"""
asset = Asset(code=asset_code, issuer=asset_issuer)
op = operation.Payment(destination, asset, amount, source)
return self.append_op(op) |
def put_attachment(self, attachment, content_type, data, headers=None):
"""
Adds a new attachment, or updates an existing attachment, to
the remote document and refreshes the locally cached
Document object accordingly.
:param attachment: Attachment file name used to identify the
attachment.
:param content_type: The http ``Content-Type`` of the attachment used
as an additional header.
:param data: Attachment data defining the attachment content.
:param headers: Optional, additional headers to be sent
with request.
:returns: Attachment addition/update status in JSON format
"""
# need latest rev
self.fetch()
attachment_url = '/'.join((self.document_url, attachment))
if headers is None:
headers = {
'If-Match': self['_rev'],
'Content-Type': content_type
}
else:
headers['If-Match'] = self['_rev']
headers['Content-Type'] = content_type
resp = self.r_session.put(
attachment_url,
data=data,
headers=headers
)
resp.raise_for_status()
self.fetch()
return response_to_json_dict(resp) | def function[put_attachment, parameter[self, attachment, content_type, data, headers]]:
constant[
Adds a new attachment, or updates an existing attachment, to
the remote document and refreshes the locally cached
Document object accordingly.
:param attachment: Attachment file name used to identify the
attachment.
:param content_type: The http ``Content-Type`` of the attachment used
as an additional header.
:param data: Attachment data defining the attachment content.
:param headers: Optional, additional headers to be sent
with request.
:returns: Attachment addition/update status in JSON format
]
call[name[self].fetch, parameter[]]
variable[attachment_url] assign[=] call[constant[/].join, parameter[tuple[[<ast.Attribute object at 0x7da20c76d630>, <ast.Name object at 0x7da20c76de70>]]]]
if compare[name[headers] is constant[None]] begin[:]
variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da20e955060>, <ast.Constant object at 0x7da20e957d30>], [<ast.Subscript object at 0x7da20e957cd0>, <ast.Name object at 0x7da20e9557b0>]]
variable[resp] assign[=] call[name[self].r_session.put, parameter[name[attachment_url]]]
call[name[resp].raise_for_status, parameter[]]
call[name[self].fetch, parameter[]]
return[call[name[response_to_json_dict], parameter[name[resp]]]] | keyword[def] identifier[put_attachment] ( identifier[self] , identifier[attachment] , identifier[content_type] , identifier[data] , identifier[headers] = keyword[None] ):
literal[string]
identifier[self] . identifier[fetch] ()
identifier[attachment_url] = literal[string] . identifier[join] (( identifier[self] . identifier[document_url] , identifier[attachment] ))
keyword[if] identifier[headers] keyword[is] keyword[None] :
identifier[headers] ={
literal[string] : identifier[self] [ literal[string] ],
literal[string] : identifier[content_type]
}
keyword[else] :
identifier[headers] [ literal[string] ]= identifier[self] [ literal[string] ]
identifier[headers] [ literal[string] ]= identifier[content_type]
identifier[resp] = identifier[self] . identifier[r_session] . identifier[put] (
identifier[attachment_url] ,
identifier[data] = identifier[data] ,
identifier[headers] = identifier[headers]
)
identifier[resp] . identifier[raise_for_status] ()
identifier[self] . identifier[fetch] ()
keyword[return] identifier[response_to_json_dict] ( identifier[resp] ) | def put_attachment(self, attachment, content_type, data, headers=None):
"""
Adds a new attachment, or updates an existing attachment, to
the remote document and refreshes the locally cached
Document object accordingly.
:param attachment: Attachment file name used to identify the
attachment.
:param content_type: The http ``Content-Type`` of the attachment used
as an additional header.
:param data: Attachment data defining the attachment content.
:param headers: Optional, additional headers to be sent
with request.
:returns: Attachment addition/update status in JSON format
"""
# need latest rev
self.fetch()
attachment_url = '/'.join((self.document_url, attachment))
if headers is None:
headers = {'If-Match': self['_rev'], 'Content-Type': content_type} # depends on [control=['if'], data=['headers']]
else:
headers['If-Match'] = self['_rev']
headers['Content-Type'] = content_type
resp = self.r_session.put(attachment_url, data=data, headers=headers)
resp.raise_for_status()
self.fetch()
return response_to_json_dict(resp) |
def is_mod_function(mod, fun):
"""Checks if a function in a module was declared in that module.
http://stackoverflow.com/a/1107150/3004221
Args:
mod: the module
fun: the function
"""
return inspect.isfunction(fun) and inspect.getmodule(fun) == mod | def function[is_mod_function, parameter[mod, fun]]:
constant[Checks if a function in a module was declared in that module.
http://stackoverflow.com/a/1107150/3004221
Args:
mod: the module
fun: the function
]
return[<ast.BoolOp object at 0x7da1b0fe4b80>] | keyword[def] identifier[is_mod_function] ( identifier[mod] , identifier[fun] ):
literal[string]
keyword[return] identifier[inspect] . identifier[isfunction] ( identifier[fun] ) keyword[and] identifier[inspect] . identifier[getmodule] ( identifier[fun] )== identifier[mod] | def is_mod_function(mod, fun):
"""Checks if a function in a module was declared in that module.
http://stackoverflow.com/a/1107150/3004221
Args:
mod: the module
fun: the function
"""
return inspect.isfunction(fun) and inspect.getmodule(fun) == mod |
def phase2radians(phasedata, v0):
""" Convert phase in seconds to phase in radians
Parameters
----------
phasedata: np.array
Data array of phase in seconds
v0: float
Nominal oscillator frequency in Hz
Returns
-------
fi:
phase data in radians
"""
fi = [2*np.pi*v0*xx for xx in phasedata]
return fi | def function[phase2radians, parameter[phasedata, v0]]:
constant[ Convert phase in seconds to phase in radians
Parameters
----------
phasedata: np.array
Data array of phase in seconds
v0: float
Nominal oscillator frequency in Hz
Returns
-------
fi:
phase data in radians
]
variable[fi] assign[=] <ast.ListComp object at 0x7da1b153dc00>
return[name[fi]] | keyword[def] identifier[phase2radians] ( identifier[phasedata] , identifier[v0] ):
literal[string]
identifier[fi] =[ literal[int] * identifier[np] . identifier[pi] * identifier[v0] * identifier[xx] keyword[for] identifier[xx] keyword[in] identifier[phasedata] ]
keyword[return] identifier[fi] | def phase2radians(phasedata, v0):
""" Convert phase in seconds to phase in radians
Parameters
----------
phasedata: np.array
Data array of phase in seconds
v0: float
Nominal oscillator frequency in Hz
Returns
-------
fi:
phase data in radians
"""
fi = [2 * np.pi * v0 * xx for xx in phasedata]
return fi |
def list_tokens(opts):
'''
List all tokens in the store.
:param opts: Salt master config options
:returns: List of dicts (token_data)
'''
ret = []
redis_client = _redis_client(opts)
if not redis_client:
return []
serial = salt.payload.Serial(opts)
try:
return [k.decode('utf8') for k in redis_client.keys()]
except Exception as err:
log.warning('Failed to list keys: %s', err)
return [] | def function[list_tokens, parameter[opts]]:
constant[
List all tokens in the store.
:param opts: Salt master config options
:returns: List of dicts (token_data)
]
variable[ret] assign[=] list[[]]
variable[redis_client] assign[=] call[name[_redis_client], parameter[name[opts]]]
if <ast.UnaryOp object at 0x7da1b1ebf700> begin[:]
return[list[[]]]
variable[serial] assign[=] call[name[salt].payload.Serial, parameter[name[opts]]]
<ast.Try object at 0x7da1b1ebfa90> | keyword[def] identifier[list_tokens] ( identifier[opts] ):
literal[string]
identifier[ret] =[]
identifier[redis_client] = identifier[_redis_client] ( identifier[opts] )
keyword[if] keyword[not] identifier[redis_client] :
keyword[return] []
identifier[serial] = identifier[salt] . identifier[payload] . identifier[Serial] ( identifier[opts] )
keyword[try] :
keyword[return] [ identifier[k] . identifier[decode] ( literal[string] ) keyword[for] identifier[k] keyword[in] identifier[redis_client] . identifier[keys] ()]
keyword[except] identifier[Exception] keyword[as] identifier[err] :
identifier[log] . identifier[warning] ( literal[string] , identifier[err] )
keyword[return] [] | def list_tokens(opts):
"""
List all tokens in the store.
:param opts: Salt master config options
:returns: List of dicts (token_data)
"""
ret = []
redis_client = _redis_client(opts)
if not redis_client:
return [] # depends on [control=['if'], data=[]]
serial = salt.payload.Serial(opts)
try:
return [k.decode('utf8') for k in redis_client.keys()] # depends on [control=['try'], data=[]]
except Exception as err:
log.warning('Failed to list keys: %s', err)
return [] # depends on [control=['except'], data=['err']] |
def memberness(context):
'''The likelihood that the context is a "member".'''
if context:
texts = context.xpath('.//*[local-name()="explicitMember"]/text()').extract()
text = str(texts).lower()
if len(texts) > 1:
return 2
elif 'country' in text:
return 2
elif 'member' not in text:
return 0
elif 'successor' in text:
# 'SuccessorMember' is a rare case that shouldn't be treated as member
return 1
elif 'parent' in text:
return 2
return 3 | def function[memberness, parameter[context]]:
constant[The likelihood that the context is a "member".]
if name[context] begin[:]
variable[texts] assign[=] call[call[name[context].xpath, parameter[constant[.//*[local-name()="explicitMember"]/text()]]].extract, parameter[]]
variable[text] assign[=] call[call[name[str], parameter[name[texts]]].lower, parameter[]]
if compare[call[name[len], parameter[name[texts]]] greater[>] constant[1]] begin[:]
return[constant[2]]
return[constant[3]] | keyword[def] identifier[memberness] ( identifier[context] ):
literal[string]
keyword[if] identifier[context] :
identifier[texts] = identifier[context] . identifier[xpath] ( literal[string] ). identifier[extract] ()
identifier[text] = identifier[str] ( identifier[texts] ). identifier[lower] ()
keyword[if] identifier[len] ( identifier[texts] )> literal[int] :
keyword[return] literal[int]
keyword[elif] literal[string] keyword[in] identifier[text] :
keyword[return] literal[int]
keyword[elif] literal[string] keyword[not] keyword[in] identifier[text] :
keyword[return] literal[int]
keyword[elif] literal[string] keyword[in] identifier[text] :
keyword[return] literal[int]
keyword[elif] literal[string] keyword[in] identifier[text] :
keyword[return] literal[int]
keyword[return] literal[int] | def memberness(context):
"""The likelihood that the context is a "member"."""
if context:
texts = context.xpath('.//*[local-name()="explicitMember"]/text()').extract()
text = str(texts).lower()
if len(texts) > 1:
return 2 # depends on [control=['if'], data=[]]
elif 'country' in text:
return 2 # depends on [control=['if'], data=[]]
elif 'member' not in text:
return 0 # depends on [control=['if'], data=[]]
elif 'successor' in text:
# 'SuccessorMember' is a rare case that shouldn't be treated as member
return 1 # depends on [control=['if'], data=[]]
elif 'parent' in text:
return 2 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return 3 |
def datetime(self):
'分钟线结构返回datetime 日线结构返回date'
index = self.data.index.remove_unused_levels()
return pd.to_datetime(index.levels[0]) | def function[datetime, parameter[self]]:
constant[分钟线结构返回datetime 日线结构返回date]
variable[index] assign[=] call[name[self].data.index.remove_unused_levels, parameter[]]
return[call[name[pd].to_datetime, parameter[call[name[index].levels][constant[0]]]]] | keyword[def] identifier[datetime] ( identifier[self] ):
literal[string]
identifier[index] = identifier[self] . identifier[data] . identifier[index] . identifier[remove_unused_levels] ()
keyword[return] identifier[pd] . identifier[to_datetime] ( identifier[index] . identifier[levels] [ literal[int] ]) | def datetime(self):
"""分钟线结构返回datetime 日线结构返回date"""
index = self.data.index.remove_unused_levels()
return pd.to_datetime(index.levels[0]) |
def create_vector_observation_encoder(observation_input, h_size, activation, num_layers, scope,
reuse):
"""
Builds a set of hidden state encoders.
:param reuse: Whether to re-use the weights within the same scope.
:param scope: Graph scope for the encoder ops.
:param observation_input: Input vector.
:param h_size: Hidden layer size.
:param activation: What type of activation function to use for layers.
:param num_layers: number of hidden layers to create.
:return: List of hidden layer tensors.
"""
with tf.variable_scope(scope):
hidden = observation_input
for i in range(num_layers):
hidden = tf.layers.dense(hidden, h_size, activation=activation, reuse=reuse,
name="hidden_{}".format(i),
kernel_initializer=c_layers.variance_scaling_initializer(
1.0))
return hidden | def function[create_vector_observation_encoder, parameter[observation_input, h_size, activation, num_layers, scope, reuse]]:
constant[
Builds a set of hidden state encoders.
:param reuse: Whether to re-use the weights within the same scope.
:param scope: Graph scope for the encoder ops.
:param observation_input: Input vector.
:param h_size: Hidden layer size.
:param activation: What type of activation function to use for layers.
:param num_layers: number of hidden layers to create.
:return: List of hidden layer tensors.
]
with call[name[tf].variable_scope, parameter[name[scope]]] begin[:]
variable[hidden] assign[=] name[observation_input]
for taget[name[i]] in starred[call[name[range], parameter[name[num_layers]]]] begin[:]
variable[hidden] assign[=] call[name[tf].layers.dense, parameter[name[hidden], name[h_size]]]
return[name[hidden]] | keyword[def] identifier[create_vector_observation_encoder] ( identifier[observation_input] , identifier[h_size] , identifier[activation] , identifier[num_layers] , identifier[scope] ,
identifier[reuse] ):
literal[string]
keyword[with] identifier[tf] . identifier[variable_scope] ( identifier[scope] ):
identifier[hidden] = identifier[observation_input]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[num_layers] ):
identifier[hidden] = identifier[tf] . identifier[layers] . identifier[dense] ( identifier[hidden] , identifier[h_size] , identifier[activation] = identifier[activation] , identifier[reuse] = identifier[reuse] ,
identifier[name] = literal[string] . identifier[format] ( identifier[i] ),
identifier[kernel_initializer] = identifier[c_layers] . identifier[variance_scaling_initializer] (
literal[int] ))
keyword[return] identifier[hidden] | def create_vector_observation_encoder(observation_input, h_size, activation, num_layers, scope, reuse):
"""
Builds a set of hidden state encoders.
:param reuse: Whether to re-use the weights within the same scope.
:param scope: Graph scope for the encoder ops.
:param observation_input: Input vector.
:param h_size: Hidden layer size.
:param activation: What type of activation function to use for layers.
:param num_layers: number of hidden layers to create.
:return: List of hidden layer tensors.
"""
with tf.variable_scope(scope):
hidden = observation_input
for i in range(num_layers):
hidden = tf.layers.dense(hidden, h_size, activation=activation, reuse=reuse, name='hidden_{}'.format(i), kernel_initializer=c_layers.variance_scaling_initializer(1.0)) # depends on [control=['for'], data=['i']] # depends on [control=['with'], data=[]]
return hidden |
def get_assessment_query_session(self, proxy):
"""Gets the ``OsidSession`` associated with the assessment query service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentQuerySession) - an
``AssessmentQuerySession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_query()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_query()`` is ``true``.*
"""
if not self.supports_assessment_query():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.AssessmentQuerySession(proxy=proxy, runtime=self._runtime) | def function[get_assessment_query_session, parameter[self, proxy]]:
constant[Gets the ``OsidSession`` associated with the assessment query service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentQuerySession) - an
``AssessmentQuerySession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_query()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_query()`` is ``true``.*
]
if <ast.UnaryOp object at 0x7da20c6e7580> begin[:]
<ast.Raise object at 0x7da20c6e4bb0>
return[call[name[sessions].AssessmentQuerySession, parameter[]]] | keyword[def] identifier[get_assessment_query_session] ( identifier[self] , identifier[proxy] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[supports_assessment_query] ():
keyword[raise] identifier[errors] . identifier[Unimplemented] ()
keyword[return] identifier[sessions] . identifier[AssessmentQuerySession] ( identifier[proxy] = identifier[proxy] , identifier[runtime] = identifier[self] . identifier[_runtime] ) | def get_assessment_query_session(self, proxy):
"""Gets the ``OsidSession`` associated with the assessment query service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentQuerySession) - an
``AssessmentQuerySession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_assessment_query()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_query()`` is ``true``.*
"""
if not self.supports_assessment_query():
raise errors.Unimplemented() # depends on [control=['if'], data=[]]
# pylint: disable=no-member
return sessions.AssessmentQuerySession(proxy=proxy, runtime=self._runtime) |
def configure_app(app):
"""Configure Flask/Celery application.
* Rio will find environment variable `RIO_SETTINGS` first::
$ export RIO_SETTINGS=/path/to/settings.cfg
$ rio worker
* If `RIO_SETTINGS` is missing, Rio will try to load configuration
module in `rio.settings` according to another environment
variable `RIO_ENV`. Default load `rio.settings.dev`.
$ export RIO_ENV=prod
$ rio worker
"""
app.config_from_object('rio.settings.default')
if environ.get('RIO_SETTINGS'):
app.config_from_envvar('RIO_SETTINGS')
return
config_map = {
'dev': 'rio.settings.dev',
'stag': 'rio.settings.stag',
'prod': 'rio.settings.prod',
'test': 'rio.settings.test',
}
rio_env = environ.get('RIO_ENV', 'dev')
config = config_map.get(rio_env, config_map['dev'])
app.config_from_object(config) | def function[configure_app, parameter[app]]:
constant[Configure Flask/Celery application.
* Rio will find environment variable `RIO_SETTINGS` first::
$ export RIO_SETTINGS=/path/to/settings.cfg
$ rio worker
* If `RIO_SETTINGS` is missing, Rio will try to load configuration
module in `rio.settings` according to another environment
variable `RIO_ENV`. Default load `rio.settings.dev`.
$ export RIO_ENV=prod
$ rio worker
]
call[name[app].config_from_object, parameter[constant[rio.settings.default]]]
if call[name[environ].get, parameter[constant[RIO_SETTINGS]]] begin[:]
call[name[app].config_from_envvar, parameter[constant[RIO_SETTINGS]]]
return[None]
variable[config_map] assign[=] dictionary[[<ast.Constant object at 0x7da1b09ee830>, <ast.Constant object at 0x7da1b09ed330>, <ast.Constant object at 0x7da1b09ed9c0>, <ast.Constant object at 0x7da1b09ef9d0>], [<ast.Constant object at 0x7da1b09ef7c0>, <ast.Constant object at 0x7da1b09ef0d0>, <ast.Constant object at 0x7da1b09ef640>, <ast.Constant object at 0x7da1b09ec2e0>]]
variable[rio_env] assign[=] call[name[environ].get, parameter[constant[RIO_ENV], constant[dev]]]
variable[config] assign[=] call[name[config_map].get, parameter[name[rio_env], call[name[config_map]][constant[dev]]]]
call[name[app].config_from_object, parameter[name[config]]] | keyword[def] identifier[configure_app] ( identifier[app] ):
literal[string]
identifier[app] . identifier[config_from_object] ( literal[string] )
keyword[if] identifier[environ] . identifier[get] ( literal[string] ):
identifier[app] . identifier[config_from_envvar] ( literal[string] )
keyword[return]
identifier[config_map] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
}
identifier[rio_env] = identifier[environ] . identifier[get] ( literal[string] , literal[string] )
identifier[config] = identifier[config_map] . identifier[get] ( identifier[rio_env] , identifier[config_map] [ literal[string] ])
identifier[app] . identifier[config_from_object] ( identifier[config] ) | def configure_app(app):
"""Configure Flask/Celery application.
* Rio will find environment variable `RIO_SETTINGS` first::
$ export RIO_SETTINGS=/path/to/settings.cfg
$ rio worker
* If `RIO_SETTINGS` is missing, Rio will try to load configuration
module in `rio.settings` according to another environment
variable `RIO_ENV`. Default load `rio.settings.dev`.
$ export RIO_ENV=prod
$ rio worker
"""
app.config_from_object('rio.settings.default')
if environ.get('RIO_SETTINGS'):
app.config_from_envvar('RIO_SETTINGS')
return # depends on [control=['if'], data=[]]
config_map = {'dev': 'rio.settings.dev', 'stag': 'rio.settings.stag', 'prod': 'rio.settings.prod', 'test': 'rio.settings.test'}
rio_env = environ.get('RIO_ENV', 'dev')
config = config_map.get(rio_env, config_map['dev'])
app.config_from_object(config) |
def xmlSetup(self, logType, logList):
"""Create xml file with fields from logbook form."""
from xml.etree.ElementTree import Element, SubElement, ElementTree
from datetime import datetime
curr_time = datetime.now()
if logType == "MCC":
# Set up xml tags
log_entry = Element('log_entry')
title = SubElement(log_entry, 'title')
program = SubElement(log_entry, 'program')
timestamp = SubElement(log_entry, 'timestamp')
priority = SubElement(log_entry, 'priority')
os_user = SubElement(log_entry, 'os_user')
hostname = SubElement(log_entry, 'hostname')
text = SubElement(log_entry, 'text')
log_user = SubElement(log_entry, 'log_user')
# Check for multiple logbooks and parse into seperate tags
logbook = []
for i in range(len(logList)):
logbook.append(SubElement(log_entry, 'logbook'))
logbook[i].text = logList[i].lower()
# Take care of dummy, unchanging tags first
log_entry.attrib['type'] = "LOGENTRY"
program.text = "152"
priority.text = "NORMAL"
os_user.text = "nobody"
hostname.text = "mccelog"
text.attrib['type'] = "text/plain"
# Handle attachment if image exists
if not self.imagePixmap.isNull():
attachment = SubElement(log_entry, 'attachment')
attachment.attrib['name'] = "Figure 1"
attachment.attrib['type'] = "image/" + self.imageType
attachment.text = curr_time.strftime("%Y%m%d_%H%M%S_") + str(curr_time.microsecond) + "." + self.imageType
# Set timestamp format
timestamp.text = curr_time.strftime("%Y/%m/%d %H:%M:%S")
fileName = "/tmp/" + curr_time.strftime("%Y%m%d_%H%M%S_") + str(curr_time.microsecond) + ".xml"
else: # If using Physics logbook
timeString = curr_time.strftime("%Y-%m-%dT%H:%M:%S")
# Set up xml tags
log_entry = Element(None)
severity = SubElement(log_entry, 'severity')
location = SubElement(log_entry, 'location')
keywords = SubElement(log_entry, 'keywords')
time = SubElement(log_entry, 'time')
isodate = SubElement(log_entry, 'isodate')
log_user = SubElement(log_entry, 'author')
category = SubElement(log_entry, 'category')
title = SubElement(log_entry, 'title')
metainfo = SubElement(log_entry, 'metainfo')
# Handle attachment if image exists
if not self.imagePixmap.isNull():
imageFile = SubElement(log_entry, 'link')
imageFile.text = timeString + "-00." + self.imageType
thumbnail = SubElement(log_entry, 'file')
thumbnail.text = timeString + "-00.png"
text = SubElement(log_entry, 'text') # Logbook expects Text tag to come last (for some strange reason)
# Take care of dummy, unchanging tags first
log_entry.attrib['type'] = "LOGENTRY"
category.text = "USERLOG"
location.text = "not set"
severity.text = "NONE"
keywords.text = "none"
time.text = curr_time.strftime("%H:%M:%S")
isodate.text = curr_time.strftime("%Y-%m-%d")
metainfo.text = timeString + "-00.xml"
fileName = "/tmp/" + metainfo.text
# Fill in user inputs
log_user.text = str(self.logui.userName.text())
title.text = str(self.logui.titleEntry.text())
if title.text == "":
QMessageBox().warning(self, "No Title entered", "Please enter a title for the entry...")
return None
text.text = str(self.logui.textEntry.toPlainText())
# If text field is truly empty, ElementTree leaves off tag entirely which causes logbook parser to fail
if text.text == "":
text.text = " "
# Create xml file
xmlFile = open(fileName, "w")
if logType == "MCC":
ElementTree(log_entry).write(xmlFile)
else:
xmlString = self.prettify(log_entry)
xmlFile.write(xmlString)
xmlFile.write("\n") # Close with newline so cron job parses correctly
xmlFile.close()
return fileName.rstrip(".xml") | def function[xmlSetup, parameter[self, logType, logList]]:
constant[Create xml file with fields from logbook form.]
from relative_module[xml.etree.ElementTree] import module[Element], module[SubElement], module[ElementTree]
from relative_module[datetime] import module[datetime]
variable[curr_time] assign[=] call[name[datetime].now, parameter[]]
if compare[name[logType] equal[==] constant[MCC]] begin[:]
variable[log_entry] assign[=] call[name[Element], parameter[constant[log_entry]]]
variable[title] assign[=] call[name[SubElement], parameter[name[log_entry], constant[title]]]
variable[program] assign[=] call[name[SubElement], parameter[name[log_entry], constant[program]]]
variable[timestamp] assign[=] call[name[SubElement], parameter[name[log_entry], constant[timestamp]]]
variable[priority] assign[=] call[name[SubElement], parameter[name[log_entry], constant[priority]]]
variable[os_user] assign[=] call[name[SubElement], parameter[name[log_entry], constant[os_user]]]
variable[hostname] assign[=] call[name[SubElement], parameter[name[log_entry], constant[hostname]]]
variable[text] assign[=] call[name[SubElement], parameter[name[log_entry], constant[text]]]
variable[log_user] assign[=] call[name[SubElement], parameter[name[log_entry], constant[log_user]]]
variable[logbook] assign[=] list[[]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[logList]]]]]] begin[:]
call[name[logbook].append, parameter[call[name[SubElement], parameter[name[log_entry], constant[logbook]]]]]
call[name[logbook]][name[i]].text assign[=] call[call[name[logList]][name[i]].lower, parameter[]]
call[name[log_entry].attrib][constant[type]] assign[=] constant[LOGENTRY]
name[program].text assign[=] constant[152]
name[priority].text assign[=] constant[NORMAL]
name[os_user].text assign[=] constant[nobody]
name[hostname].text assign[=] constant[mccelog]
call[name[text].attrib][constant[type]] assign[=] constant[text/plain]
if <ast.UnaryOp object at 0x7da1b004fd30> begin[:]
variable[attachment] assign[=] call[name[SubElement], parameter[name[log_entry], constant[attachment]]]
call[name[attachment].attrib][constant[name]] assign[=] constant[Figure 1]
call[name[attachment].attrib][constant[type]] assign[=] binary_operation[constant[image/] + name[self].imageType]
name[attachment].text assign[=] binary_operation[binary_operation[binary_operation[call[name[curr_time].strftime, parameter[constant[%Y%m%d_%H%M%S_]]] + call[name[str], parameter[name[curr_time].microsecond]]] + constant[.]] + name[self].imageType]
name[timestamp].text assign[=] call[name[curr_time].strftime, parameter[constant[%Y/%m/%d %H:%M:%S]]]
variable[fileName] assign[=] binary_operation[binary_operation[binary_operation[constant[/tmp/] + call[name[curr_time].strftime, parameter[constant[%Y%m%d_%H%M%S_]]]] + call[name[str], parameter[name[curr_time].microsecond]]] + constant[.xml]]
name[log_user].text assign[=] call[name[str], parameter[call[name[self].logui.userName.text, parameter[]]]]
name[title].text assign[=] call[name[str], parameter[call[name[self].logui.titleEntry.text, parameter[]]]]
if compare[name[title].text equal[==] constant[]] begin[:]
call[call[name[QMessageBox], parameter[]].warning, parameter[name[self], constant[No Title entered], constant[Please enter a title for the entry...]]]
return[constant[None]]
name[text].text assign[=] call[name[str], parameter[call[name[self].logui.textEntry.toPlainText, parameter[]]]]
if compare[name[text].text equal[==] constant[]] begin[:]
name[text].text assign[=] constant[ ]
variable[xmlFile] assign[=] call[name[open], parameter[name[fileName], constant[w]]]
if compare[name[logType] equal[==] constant[MCC]] begin[:]
call[call[name[ElementTree], parameter[name[log_entry]]].write, parameter[name[xmlFile]]]
call[name[xmlFile].write, parameter[constant[
]]]
call[name[xmlFile].close, parameter[]]
return[call[name[fileName].rstrip, parameter[constant[.xml]]]] | keyword[def] identifier[xmlSetup] ( identifier[self] , identifier[logType] , identifier[logList] ):
literal[string]
keyword[from] identifier[xml] . identifier[etree] . identifier[ElementTree] keyword[import] identifier[Element] , identifier[SubElement] , identifier[ElementTree]
keyword[from] identifier[datetime] keyword[import] identifier[datetime]
identifier[curr_time] = identifier[datetime] . identifier[now] ()
keyword[if] identifier[logType] == literal[string] :
identifier[log_entry] = identifier[Element] ( literal[string] )
identifier[title] = identifier[SubElement] ( identifier[log_entry] , literal[string] )
identifier[program] = identifier[SubElement] ( identifier[log_entry] , literal[string] )
identifier[timestamp] = identifier[SubElement] ( identifier[log_entry] , literal[string] )
identifier[priority] = identifier[SubElement] ( identifier[log_entry] , literal[string] )
identifier[os_user] = identifier[SubElement] ( identifier[log_entry] , literal[string] )
identifier[hostname] = identifier[SubElement] ( identifier[log_entry] , literal[string] )
identifier[text] = identifier[SubElement] ( identifier[log_entry] , literal[string] )
identifier[log_user] = identifier[SubElement] ( identifier[log_entry] , literal[string] )
identifier[logbook] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[logList] )):
identifier[logbook] . identifier[append] ( identifier[SubElement] ( identifier[log_entry] , literal[string] ))
identifier[logbook] [ identifier[i] ]. identifier[text] = identifier[logList] [ identifier[i] ]. identifier[lower] ()
identifier[log_entry] . identifier[attrib] [ literal[string] ]= literal[string]
identifier[program] . identifier[text] = literal[string]
identifier[priority] . identifier[text] = literal[string]
identifier[os_user] . identifier[text] = literal[string]
identifier[hostname] . identifier[text] = literal[string]
identifier[text] . identifier[attrib] [ literal[string] ]= literal[string]
keyword[if] keyword[not] identifier[self] . identifier[imagePixmap] . identifier[isNull] ():
identifier[attachment] = identifier[SubElement] ( identifier[log_entry] , literal[string] )
identifier[attachment] . identifier[attrib] [ literal[string] ]= literal[string]
identifier[attachment] . identifier[attrib] [ literal[string] ]= literal[string] + identifier[self] . identifier[imageType]
identifier[attachment] . identifier[text] = identifier[curr_time] . identifier[strftime] ( literal[string] )+ identifier[str] ( identifier[curr_time] . identifier[microsecond] )+ literal[string] + identifier[self] . identifier[imageType]
identifier[timestamp] . identifier[text] = identifier[curr_time] . identifier[strftime] ( literal[string] )
identifier[fileName] = literal[string] + identifier[curr_time] . identifier[strftime] ( literal[string] )+ identifier[str] ( identifier[curr_time] . identifier[microsecond] )+ literal[string]
keyword[else] :
identifier[timeString] = identifier[curr_time] . identifier[strftime] ( literal[string] )
identifier[log_entry] = identifier[Element] ( keyword[None] )
identifier[severity] = identifier[SubElement] ( identifier[log_entry] , literal[string] )
identifier[location] = identifier[SubElement] ( identifier[log_entry] , literal[string] )
identifier[keywords] = identifier[SubElement] ( identifier[log_entry] , literal[string] )
identifier[time] = identifier[SubElement] ( identifier[log_entry] , literal[string] )
identifier[isodate] = identifier[SubElement] ( identifier[log_entry] , literal[string] )
identifier[log_user] = identifier[SubElement] ( identifier[log_entry] , literal[string] )
identifier[category] = identifier[SubElement] ( identifier[log_entry] , literal[string] )
identifier[title] = identifier[SubElement] ( identifier[log_entry] , literal[string] )
identifier[metainfo] = identifier[SubElement] ( identifier[log_entry] , literal[string] )
keyword[if] keyword[not] identifier[self] . identifier[imagePixmap] . identifier[isNull] ():
identifier[imageFile] = identifier[SubElement] ( identifier[log_entry] , literal[string] )
identifier[imageFile] . identifier[text] = identifier[timeString] + literal[string] + identifier[self] . identifier[imageType]
identifier[thumbnail] = identifier[SubElement] ( identifier[log_entry] , literal[string] )
identifier[thumbnail] . identifier[text] = identifier[timeString] + literal[string]
identifier[text] = identifier[SubElement] ( identifier[log_entry] , literal[string] )
identifier[log_entry] . identifier[attrib] [ literal[string] ]= literal[string]
identifier[category] . identifier[text] = literal[string]
identifier[location] . identifier[text] = literal[string]
identifier[severity] . identifier[text] = literal[string]
identifier[keywords] . identifier[text] = literal[string]
identifier[time] . identifier[text] = identifier[curr_time] . identifier[strftime] ( literal[string] )
identifier[isodate] . identifier[text] = identifier[curr_time] . identifier[strftime] ( literal[string] )
identifier[metainfo] . identifier[text] = identifier[timeString] + literal[string]
identifier[fileName] = literal[string] + identifier[metainfo] . identifier[text]
identifier[log_user] . identifier[text] = identifier[str] ( identifier[self] . identifier[logui] . identifier[userName] . identifier[text] ())
identifier[title] . identifier[text] = identifier[str] ( identifier[self] . identifier[logui] . identifier[titleEntry] . identifier[text] ())
keyword[if] identifier[title] . identifier[text] == literal[string] :
identifier[QMessageBox] (). identifier[warning] ( identifier[self] , literal[string] , literal[string] )
keyword[return] keyword[None]
identifier[text] . identifier[text] = identifier[str] ( identifier[self] . identifier[logui] . identifier[textEntry] . identifier[toPlainText] ())
keyword[if] identifier[text] . identifier[text] == literal[string] :
identifier[text] . identifier[text] = literal[string]
identifier[xmlFile] = identifier[open] ( identifier[fileName] , literal[string] )
keyword[if] identifier[logType] == literal[string] :
identifier[ElementTree] ( identifier[log_entry] ). identifier[write] ( identifier[xmlFile] )
keyword[else] :
identifier[xmlString] = identifier[self] . identifier[prettify] ( identifier[log_entry] )
identifier[xmlFile] . identifier[write] ( identifier[xmlString] )
identifier[xmlFile] . identifier[write] ( literal[string] )
identifier[xmlFile] . identifier[close] ()
keyword[return] identifier[fileName] . identifier[rstrip] ( literal[string] ) | def xmlSetup(self, logType, logList):
"""Create xml file with fields from logbook form."""
from xml.etree.ElementTree import Element, SubElement, ElementTree
from datetime import datetime
curr_time = datetime.now()
if logType == 'MCC':
# Set up xml tags
log_entry = Element('log_entry')
title = SubElement(log_entry, 'title')
program = SubElement(log_entry, 'program')
timestamp = SubElement(log_entry, 'timestamp')
priority = SubElement(log_entry, 'priority')
os_user = SubElement(log_entry, 'os_user')
hostname = SubElement(log_entry, 'hostname')
text = SubElement(log_entry, 'text')
log_user = SubElement(log_entry, 'log_user')
# Check for multiple logbooks and parse into seperate tags
logbook = []
for i in range(len(logList)):
logbook.append(SubElement(log_entry, 'logbook'))
logbook[i].text = logList[i].lower() # depends on [control=['for'], data=['i']]
# Take care of dummy, unchanging tags first
log_entry.attrib['type'] = 'LOGENTRY'
program.text = '152'
priority.text = 'NORMAL'
os_user.text = 'nobody'
hostname.text = 'mccelog'
text.attrib['type'] = 'text/plain'
# Handle attachment if image exists
if not self.imagePixmap.isNull():
attachment = SubElement(log_entry, 'attachment')
attachment.attrib['name'] = 'Figure 1'
attachment.attrib['type'] = 'image/' + self.imageType
attachment.text = curr_time.strftime('%Y%m%d_%H%M%S_') + str(curr_time.microsecond) + '.' + self.imageType # depends on [control=['if'], data=[]]
# Set timestamp format
timestamp.text = curr_time.strftime('%Y/%m/%d %H:%M:%S')
fileName = '/tmp/' + curr_time.strftime('%Y%m%d_%H%M%S_') + str(curr_time.microsecond) + '.xml' # depends on [control=['if'], data=[]]
else: # If using Physics logbook
timeString = curr_time.strftime('%Y-%m-%dT%H:%M:%S')
# Set up xml tags
log_entry = Element(None)
severity = SubElement(log_entry, 'severity')
location = SubElement(log_entry, 'location')
keywords = SubElement(log_entry, 'keywords')
time = SubElement(log_entry, 'time')
isodate = SubElement(log_entry, 'isodate')
log_user = SubElement(log_entry, 'author')
category = SubElement(log_entry, 'category')
title = SubElement(log_entry, 'title')
metainfo = SubElement(log_entry, 'metainfo')
# Handle attachment if image exists
if not self.imagePixmap.isNull():
imageFile = SubElement(log_entry, 'link')
imageFile.text = timeString + '-00.' + self.imageType
thumbnail = SubElement(log_entry, 'file')
thumbnail.text = timeString + '-00.png' # depends on [control=['if'], data=[]]
text = SubElement(log_entry, 'text') # Logbook expects Text tag to come last (for some strange reason)
# Take care of dummy, unchanging tags first
log_entry.attrib['type'] = 'LOGENTRY'
category.text = 'USERLOG'
location.text = 'not set'
severity.text = 'NONE'
keywords.text = 'none'
time.text = curr_time.strftime('%H:%M:%S')
isodate.text = curr_time.strftime('%Y-%m-%d')
metainfo.text = timeString + '-00.xml'
fileName = '/tmp/' + metainfo.text
# Fill in user inputs
log_user.text = str(self.logui.userName.text())
title.text = str(self.logui.titleEntry.text())
if title.text == '':
QMessageBox().warning(self, 'No Title entered', 'Please enter a title for the entry...')
return None # depends on [control=['if'], data=[]]
text.text = str(self.logui.textEntry.toPlainText())
# If text field is truly empty, ElementTree leaves off tag entirely which causes logbook parser to fail
if text.text == '':
text.text = ' ' # depends on [control=['if'], data=[]]
# Create xml file
xmlFile = open(fileName, 'w')
if logType == 'MCC':
ElementTree(log_entry).write(xmlFile) # depends on [control=['if'], data=[]]
else:
xmlString = self.prettify(log_entry)
xmlFile.write(xmlString)
xmlFile.write('\n') # Close with newline so cron job parses correctly
xmlFile.close()
return fileName.rstrip('.xml') |
def _process_rval_components(self):
"""This is suspiciously similar to _process_macro_default_arg, probably
want to figure out how to merge the two.
Process the rval of an assignment statement or a do-block
"""
while True:
match = self._expect_match(
'do block component',
# you could have a string, though that would be weird
STRING_PATTERN,
# a quote or an open/close parenthesis
NON_STRING_DO_BLOCK_MEMBER_PATTERN,
# a tag close
TAG_CLOSE_PATTERN
)
matchgroups = match.groupdict()
self.advance(match.end())
if matchgroups.get('string') is not None:
continue
elif matchgroups.get('quote') is not None:
self.rewind()
# now look for a string
match = self._expect_match('any string', STRING_PATTERN)
self.advance(match.end())
elif matchgroups.get('open'):
self._parenthesis_stack.append(True)
elif matchgroups.get('close'):
self._parenthesis_stack.pop()
elif matchgroups.get('tag_close'):
if self._parenthesis_stack:
msg = ('Found "%}", expected ")"')
dbt.exceptions.raise_compiler_error(msg)
return | def function[_process_rval_components, parameter[self]]:
constant[This is suspiciously similar to _process_macro_default_arg, probably
want to figure out how to merge the two.
Process the rval of an assignment statement or a do-block
]
while constant[True] begin[:]
variable[match] assign[=] call[name[self]._expect_match, parameter[constant[do block component], name[STRING_PATTERN], name[NON_STRING_DO_BLOCK_MEMBER_PATTERN], name[TAG_CLOSE_PATTERN]]]
variable[matchgroups] assign[=] call[name[match].groupdict, parameter[]]
call[name[self].advance, parameter[call[name[match].end, parameter[]]]]
if compare[call[name[matchgroups].get, parameter[constant[string]]] is_not constant[None]] begin[:]
continue | keyword[def] identifier[_process_rval_components] ( identifier[self] ):
literal[string]
keyword[while] keyword[True] :
identifier[match] = identifier[self] . identifier[_expect_match] (
literal[string] ,
identifier[STRING_PATTERN] ,
identifier[NON_STRING_DO_BLOCK_MEMBER_PATTERN] ,
identifier[TAG_CLOSE_PATTERN]
)
identifier[matchgroups] = identifier[match] . identifier[groupdict] ()
identifier[self] . identifier[advance] ( identifier[match] . identifier[end] ())
keyword[if] identifier[matchgroups] . identifier[get] ( literal[string] ) keyword[is] keyword[not] keyword[None] :
keyword[continue]
keyword[elif] identifier[matchgroups] . identifier[get] ( literal[string] ) keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[rewind] ()
identifier[match] = identifier[self] . identifier[_expect_match] ( literal[string] , identifier[STRING_PATTERN] )
identifier[self] . identifier[advance] ( identifier[match] . identifier[end] ())
keyword[elif] identifier[matchgroups] . identifier[get] ( literal[string] ):
identifier[self] . identifier[_parenthesis_stack] . identifier[append] ( keyword[True] )
keyword[elif] identifier[matchgroups] . identifier[get] ( literal[string] ):
identifier[self] . identifier[_parenthesis_stack] . identifier[pop] ()
keyword[elif] identifier[matchgroups] . identifier[get] ( literal[string] ):
keyword[if] identifier[self] . identifier[_parenthesis_stack] :
identifier[msg] =( literal[string] )
identifier[dbt] . identifier[exceptions] . identifier[raise_compiler_error] ( identifier[msg] )
keyword[return] | def _process_rval_components(self):
"""This is suspiciously similar to _process_macro_default_arg, probably
want to figure out how to merge the two.
Process the rval of an assignment statement or a do-block
"""
while True:
# you could have a string, though that would be weird
# a quote or an open/close parenthesis
# a tag close
match = self._expect_match('do block component', STRING_PATTERN, NON_STRING_DO_BLOCK_MEMBER_PATTERN, TAG_CLOSE_PATTERN)
matchgroups = match.groupdict()
self.advance(match.end())
if matchgroups.get('string') is not None:
continue # depends on [control=['if'], data=[]]
elif matchgroups.get('quote') is not None:
self.rewind()
# now look for a string
match = self._expect_match('any string', STRING_PATTERN)
self.advance(match.end()) # depends on [control=['if'], data=[]]
elif matchgroups.get('open'):
self._parenthesis_stack.append(True) # depends on [control=['if'], data=[]]
elif matchgroups.get('close'):
self._parenthesis_stack.pop() # depends on [control=['if'], data=[]]
elif matchgroups.get('tag_close'):
if self._parenthesis_stack:
msg = 'Found "%}", expected ")"'
dbt.exceptions.raise_compiler_error(msg) # depends on [control=['if'], data=[]]
return # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] |
def setup_actions(self):
""" Connects slots to signals """
self.actionOpen.triggered.connect(self.on_open)
self.actionNew.triggered.connect(self.on_new)
self.actionSave.triggered.connect(self.on_save)
self.actionSave_as.triggered.connect(self.on_save_as)
self.actionQuit.triggered.connect(QtWidgets.QApplication.instance().quit)
self.tabWidget.current_changed.connect(
self.on_current_tab_changed)
self.actionAbout.triggered.connect(self.on_about) | def function[setup_actions, parameter[self]]:
constant[ Connects slots to signals ]
call[name[self].actionOpen.triggered.connect, parameter[name[self].on_open]]
call[name[self].actionNew.triggered.connect, parameter[name[self].on_new]]
call[name[self].actionSave.triggered.connect, parameter[name[self].on_save]]
call[name[self].actionSave_as.triggered.connect, parameter[name[self].on_save_as]]
call[name[self].actionQuit.triggered.connect, parameter[call[name[QtWidgets].QApplication.instance, parameter[]].quit]]
call[name[self].tabWidget.current_changed.connect, parameter[name[self].on_current_tab_changed]]
call[name[self].actionAbout.triggered.connect, parameter[name[self].on_about]] | keyword[def] identifier[setup_actions] ( identifier[self] ):
literal[string]
identifier[self] . identifier[actionOpen] . identifier[triggered] . identifier[connect] ( identifier[self] . identifier[on_open] )
identifier[self] . identifier[actionNew] . identifier[triggered] . identifier[connect] ( identifier[self] . identifier[on_new] )
identifier[self] . identifier[actionSave] . identifier[triggered] . identifier[connect] ( identifier[self] . identifier[on_save] )
identifier[self] . identifier[actionSave_as] . identifier[triggered] . identifier[connect] ( identifier[self] . identifier[on_save_as] )
identifier[self] . identifier[actionQuit] . identifier[triggered] . identifier[connect] ( identifier[QtWidgets] . identifier[QApplication] . identifier[instance] (). identifier[quit] )
identifier[self] . identifier[tabWidget] . identifier[current_changed] . identifier[connect] (
identifier[self] . identifier[on_current_tab_changed] )
identifier[self] . identifier[actionAbout] . identifier[triggered] . identifier[connect] ( identifier[self] . identifier[on_about] ) | def setup_actions(self):
""" Connects slots to signals """
self.actionOpen.triggered.connect(self.on_open)
self.actionNew.triggered.connect(self.on_new)
self.actionSave.triggered.connect(self.on_save)
self.actionSave_as.triggered.connect(self.on_save_as)
self.actionQuit.triggered.connect(QtWidgets.QApplication.instance().quit)
self.tabWidget.current_changed.connect(self.on_current_tab_changed)
self.actionAbout.triggered.connect(self.on_about) |
def initialize_repo(self):
"""
Clones repository & sets up usernames.
"""
logging.info('Repo {} doesn\'t exist. Cloning...'.format(self.repo_dir))
clone_args = ['git', 'clone']
if self.depth and self.depth > 0:
clone_args.extend(['--depth', str(self.depth)])
clone_args.extend(['--branch', self.branch_name])
clone_args.extend([self.git_url, self.repo_dir])
yield from execute_cmd(clone_args)
yield from execute_cmd(['git', 'config', 'user.email', '[email protected]'], cwd=self.repo_dir)
yield from execute_cmd(['git', 'config', 'user.name', 'nbgitpuller'], cwd=self.repo_dir)
logging.info('Repo {} initialized'.format(self.repo_dir)) | def function[initialize_repo, parameter[self]]:
constant[
Clones repository & sets up usernames.
]
call[name[logging].info, parameter[call[constant[Repo {} doesn't exist. Cloning...].format, parameter[name[self].repo_dir]]]]
variable[clone_args] assign[=] list[[<ast.Constant object at 0x7da1b128aaa0>, <ast.Constant object at 0x7da1b1289720>]]
if <ast.BoolOp object at 0x7da1b128a410> begin[:]
call[name[clone_args].extend, parameter[list[[<ast.Constant object at 0x7da1b128bb50>, <ast.Call object at 0x7da1b12892a0>]]]]
call[name[clone_args].extend, parameter[list[[<ast.Constant object at 0x7da1b1289ea0>, <ast.Attribute object at 0x7da1b12884c0>]]]]
call[name[clone_args].extend, parameter[list[[<ast.Attribute object at 0x7da1b12897b0>, <ast.Attribute object at 0x7da1b12883a0>]]]]
<ast.YieldFrom object at 0x7da1b128ba60>
<ast.YieldFrom object at 0x7da1b124e170>
<ast.YieldFrom object at 0x7da18ede7700>
call[name[logging].info, parameter[call[constant[Repo {} initialized].format, parameter[name[self].repo_dir]]]] | keyword[def] identifier[initialize_repo] ( identifier[self] ):
literal[string]
identifier[logging] . identifier[info] ( literal[string] . identifier[format] ( identifier[self] . identifier[repo_dir] ))
identifier[clone_args] =[ literal[string] , literal[string] ]
keyword[if] identifier[self] . identifier[depth] keyword[and] identifier[self] . identifier[depth] > literal[int] :
identifier[clone_args] . identifier[extend] ([ literal[string] , identifier[str] ( identifier[self] . identifier[depth] )])
identifier[clone_args] . identifier[extend] ([ literal[string] , identifier[self] . identifier[branch_name] ])
identifier[clone_args] . identifier[extend] ([ identifier[self] . identifier[git_url] , identifier[self] . identifier[repo_dir] ])
keyword[yield] keyword[from] identifier[execute_cmd] ( identifier[clone_args] )
keyword[yield] keyword[from] identifier[execute_cmd] ([ literal[string] , literal[string] , literal[string] , literal[string] ], identifier[cwd] = identifier[self] . identifier[repo_dir] )
keyword[yield] keyword[from] identifier[execute_cmd] ([ literal[string] , literal[string] , literal[string] , literal[string] ], identifier[cwd] = identifier[self] . identifier[repo_dir] )
identifier[logging] . identifier[info] ( literal[string] . identifier[format] ( identifier[self] . identifier[repo_dir] )) | def initialize_repo(self):
"""
Clones repository & sets up usernames.
"""
logging.info("Repo {} doesn't exist. Cloning...".format(self.repo_dir))
clone_args = ['git', 'clone']
if self.depth and self.depth > 0:
clone_args.extend(['--depth', str(self.depth)]) # depends on [control=['if'], data=[]]
clone_args.extend(['--branch', self.branch_name])
clone_args.extend([self.git_url, self.repo_dir])
yield from execute_cmd(clone_args)
yield from execute_cmd(['git', 'config', 'user.email', '[email protected]'], cwd=self.repo_dir)
yield from execute_cmd(['git', 'config', 'user.name', 'nbgitpuller'], cwd=self.repo_dir)
logging.info('Repo {} initialized'.format(self.repo_dir)) |
def react(reactor, main, argv):
"""
Call C{main} and run the reactor until the L{Deferred} it returns fires.
@param reactor: An unstarted L{IReactorCore} provider which will be run and
later stopped.
@param main: A callable which returns a L{Deferred}. It should take as
many arguments as there are elements in the list C{argv}.
@param argv: A list of arguments to pass to C{main}.
@return: C{None}
"""
stopping = []
reactor.addSystemEventTrigger('before', 'shutdown', stopping.append, True)
finished = main(reactor, *argv)
finished.addErrback(err, "main function encountered error")
def cbFinish(ignored):
if not stopping:
reactor.callWhenRunning(reactor.stop)
finished.addCallback(cbFinish)
reactor.run() | def function[react, parameter[reactor, main, argv]]:
constant[
Call C{main} and run the reactor until the L{Deferred} it returns fires.
@param reactor: An unstarted L{IReactorCore} provider which will be run and
later stopped.
@param main: A callable which returns a L{Deferred}. It should take as
many arguments as there are elements in the list C{argv}.
@param argv: A list of arguments to pass to C{main}.
@return: C{None}
]
variable[stopping] assign[=] list[[]]
call[name[reactor].addSystemEventTrigger, parameter[constant[before], constant[shutdown], name[stopping].append, constant[True]]]
variable[finished] assign[=] call[name[main], parameter[name[reactor], <ast.Starred object at 0x7da18eb570a0>]]
call[name[finished].addErrback, parameter[name[err], constant[main function encountered error]]]
def function[cbFinish, parameter[ignored]]:
if <ast.UnaryOp object at 0x7da18eb55c60> begin[:]
call[name[reactor].callWhenRunning, parameter[name[reactor].stop]]
call[name[finished].addCallback, parameter[name[cbFinish]]]
call[name[reactor].run, parameter[]] | keyword[def] identifier[react] ( identifier[reactor] , identifier[main] , identifier[argv] ):
literal[string]
identifier[stopping] =[]
identifier[reactor] . identifier[addSystemEventTrigger] ( literal[string] , literal[string] , identifier[stopping] . identifier[append] , keyword[True] )
identifier[finished] = identifier[main] ( identifier[reactor] ,* identifier[argv] )
identifier[finished] . identifier[addErrback] ( identifier[err] , literal[string] )
keyword[def] identifier[cbFinish] ( identifier[ignored] ):
keyword[if] keyword[not] identifier[stopping] :
identifier[reactor] . identifier[callWhenRunning] ( identifier[reactor] . identifier[stop] )
identifier[finished] . identifier[addCallback] ( identifier[cbFinish] )
identifier[reactor] . identifier[run] () | def react(reactor, main, argv):
"""
Call C{main} and run the reactor until the L{Deferred} it returns fires.
@param reactor: An unstarted L{IReactorCore} provider which will be run and
later stopped.
@param main: A callable which returns a L{Deferred}. It should take as
many arguments as there are elements in the list C{argv}.
@param argv: A list of arguments to pass to C{main}.
@return: C{None}
"""
stopping = []
reactor.addSystemEventTrigger('before', 'shutdown', stopping.append, True)
finished = main(reactor, *argv)
finished.addErrback(err, 'main function encountered error')
def cbFinish(ignored):
if not stopping:
reactor.callWhenRunning(reactor.stop) # depends on [control=['if'], data=[]]
finished.addCallback(cbFinish)
reactor.run() |
def p_idcall_expr(p):
""" func_call : ID arg_list %prec UMINUS
""" # This can be a function call or a string index
p[0] = make_call(p[1], p.lineno(1), p[2])
if p[0] is None:
return
if p[0].token in ('STRSLICE', 'VAR', 'STRING'):
entry = SYMBOL_TABLE.access_call(p[1], p.lineno(1))
entry.accessed = True
return
# TODO: Check that arrays really needs kind=function to be set
# Both array accesses and functions are tagged as functions
# functions also has the class_ attribute set to 'function'
p[0].entry.set_kind(KIND.function, p.lineno(1))
p[0].entry.accessed = True | def function[p_idcall_expr, parameter[p]]:
constant[ func_call : ID arg_list %prec UMINUS
]
call[name[p]][constant[0]] assign[=] call[name[make_call], parameter[call[name[p]][constant[1]], call[name[p].lineno, parameter[constant[1]]], call[name[p]][constant[2]]]]
if compare[call[name[p]][constant[0]] is constant[None]] begin[:]
return[None]
if compare[call[name[p]][constant[0]].token in tuple[[<ast.Constant object at 0x7da18bccb6d0>, <ast.Constant object at 0x7da18bccba00>, <ast.Constant object at 0x7da18bcca4d0>]]] begin[:]
variable[entry] assign[=] call[name[SYMBOL_TABLE].access_call, parameter[call[name[p]][constant[1]], call[name[p].lineno, parameter[constant[1]]]]]
name[entry].accessed assign[=] constant[True]
return[None]
call[call[name[p]][constant[0]].entry.set_kind, parameter[name[KIND].function, call[name[p].lineno, parameter[constant[1]]]]]
call[name[p]][constant[0]].entry.accessed assign[=] constant[True] | keyword[def] identifier[p_idcall_expr] ( identifier[p] ):
literal[string]
identifier[p] [ literal[int] ]= identifier[make_call] ( identifier[p] [ literal[int] ], identifier[p] . identifier[lineno] ( literal[int] ), identifier[p] [ literal[int] ])
keyword[if] identifier[p] [ literal[int] ] keyword[is] keyword[None] :
keyword[return]
keyword[if] identifier[p] [ literal[int] ]. identifier[token] keyword[in] ( literal[string] , literal[string] , literal[string] ):
identifier[entry] = identifier[SYMBOL_TABLE] . identifier[access_call] ( identifier[p] [ literal[int] ], identifier[p] . identifier[lineno] ( literal[int] ))
identifier[entry] . identifier[accessed] = keyword[True]
keyword[return]
identifier[p] [ literal[int] ]. identifier[entry] . identifier[set_kind] ( identifier[KIND] . identifier[function] , identifier[p] . identifier[lineno] ( literal[int] ))
identifier[p] [ literal[int] ]. identifier[entry] . identifier[accessed] = keyword[True] | def p_idcall_expr(p):
""" func_call : ID arg_list %prec UMINUS
""" # This can be a function call or a string index
p[0] = make_call(p[1], p.lineno(1), p[2])
if p[0] is None:
return # depends on [control=['if'], data=[]]
if p[0].token in ('STRSLICE', 'VAR', 'STRING'):
entry = SYMBOL_TABLE.access_call(p[1], p.lineno(1))
entry.accessed = True
return # depends on [control=['if'], data=[]]
# TODO: Check that arrays really needs kind=function to be set
# Both array accesses and functions are tagged as functions
# functions also has the class_ attribute set to 'function'
p[0].entry.set_kind(KIND.function, p.lineno(1))
p[0].entry.accessed = True |
def __flags(self):
"""
Internal method. Turns arguments into flags.
"""
flags = []
if self._capture:
flags.append("-capture")
if self._spy:
flags.append("-spy")
if self._dbpath:
flags += ["-db-path", self._dbpath]
flags += ["-db", "boltdb"]
else:
flags += ["-db", "memory"]
if self._synthesize:
assert(self._middleware)
flags += ["-synthesize"]
if self._simulation:
flags += ["-import", self._simulation]
if self._proxyPort:
flags += ["-pp", str(self._proxyPort)]
if self._adminPort:
flags += ["-ap", str(self._adminPort)]
if self._modify:
flags += ["-modify"]
if self._verbose:
flags += ["-v"]
if self._dev:
flags += ["-dev"]
if self._metrics:
flags += ["-metrics"]
if self._auth:
flags += ["-auth"]
if self._middleware:
flags += ["-middleware", self._middleware]
if self._cert:
flags += ["-cert", self._cert]
if self._certName:
flags += ["-cert-name", self._certName]
if self._certOrg:
flags += ["-cert-org", self._certOrg]
if self._destination:
flags += ["-destination", self._destination]
if self._key:
flags += ["-key", self._key]
if self._dest:
for i in range(len(self._dest)):
flags += ["-dest", self._dest[i]]
if self._generateCACert:
flags += ["-generate-ca-cert"]
if not self._tlsVerification:
flags += ["-tls-verification", "false"]
logging.debug("flags:" + str(flags))
return flags | def function[__flags, parameter[self]]:
constant[
Internal method. Turns arguments into flags.
]
variable[flags] assign[=] list[[]]
if name[self]._capture begin[:]
call[name[flags].append, parameter[constant[-capture]]]
if name[self]._spy begin[:]
call[name[flags].append, parameter[constant[-spy]]]
if name[self]._dbpath begin[:]
<ast.AugAssign object at 0x7da1b10c0160>
<ast.AugAssign object at 0x7da1b10c0700>
if name[self]._synthesize begin[:]
assert[name[self]._middleware]
<ast.AugAssign object at 0x7da1b10c3010>
if name[self]._simulation begin[:]
<ast.AugAssign object at 0x7da1b10c2440>
if name[self]._proxyPort begin[:]
<ast.AugAssign object at 0x7da1b10c0910>
if name[self]._adminPort begin[:]
<ast.AugAssign object at 0x7da1b10c0c40>
if name[self]._modify begin[:]
<ast.AugAssign object at 0x7da1b10c0c10>
if name[self]._verbose begin[:]
<ast.AugAssign object at 0x7da1b10c2e60>
if name[self]._dev begin[:]
<ast.AugAssign object at 0x7da1b10c0310>
if name[self]._metrics begin[:]
<ast.AugAssign object at 0x7da1b10c23b0>
if name[self]._auth begin[:]
<ast.AugAssign object at 0x7da1b10c2980>
if name[self]._middleware begin[:]
<ast.AugAssign object at 0x7da1b10c2ce0>
if name[self]._cert begin[:]
<ast.AugAssign object at 0x7da1b10c0040>
if name[self]._certName begin[:]
<ast.AugAssign object at 0x7da1b10c36d0>
if name[self]._certOrg begin[:]
<ast.AugAssign object at 0x7da1b10c1d50>
if name[self]._destination begin[:]
<ast.AugAssign object at 0x7da1b10c0880>
if name[self]._key begin[:]
<ast.AugAssign object at 0x7da1b10c02b0>
if name[self]._dest begin[:]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[self]._dest]]]]] begin[:]
<ast.AugAssign object at 0x7da2054a4a00>
if name[self]._generateCACert begin[:]
<ast.AugAssign object at 0x7da2054a6050>
if <ast.UnaryOp object at 0x7da2054a6f20> begin[:]
<ast.AugAssign object at 0x7da2054a4310>
call[name[logging].debug, parameter[binary_operation[constant[flags:] + call[name[str], parameter[name[flags]]]]]]
return[name[flags]] | keyword[def] identifier[__flags] ( identifier[self] ):
literal[string]
identifier[flags] =[]
keyword[if] identifier[self] . identifier[_capture] :
identifier[flags] . identifier[append] ( literal[string] )
keyword[if] identifier[self] . identifier[_spy] :
identifier[flags] . identifier[append] ( literal[string] )
keyword[if] identifier[self] . identifier[_dbpath] :
identifier[flags] +=[ literal[string] , identifier[self] . identifier[_dbpath] ]
identifier[flags] +=[ literal[string] , literal[string] ]
keyword[else] :
identifier[flags] +=[ literal[string] , literal[string] ]
keyword[if] identifier[self] . identifier[_synthesize] :
keyword[assert] ( identifier[self] . identifier[_middleware] )
identifier[flags] +=[ literal[string] ]
keyword[if] identifier[self] . identifier[_simulation] :
identifier[flags] +=[ literal[string] , identifier[self] . identifier[_simulation] ]
keyword[if] identifier[self] . identifier[_proxyPort] :
identifier[flags] +=[ literal[string] , identifier[str] ( identifier[self] . identifier[_proxyPort] )]
keyword[if] identifier[self] . identifier[_adminPort] :
identifier[flags] +=[ literal[string] , identifier[str] ( identifier[self] . identifier[_adminPort] )]
keyword[if] identifier[self] . identifier[_modify] :
identifier[flags] +=[ literal[string] ]
keyword[if] identifier[self] . identifier[_verbose] :
identifier[flags] +=[ literal[string] ]
keyword[if] identifier[self] . identifier[_dev] :
identifier[flags] +=[ literal[string] ]
keyword[if] identifier[self] . identifier[_metrics] :
identifier[flags] +=[ literal[string] ]
keyword[if] identifier[self] . identifier[_auth] :
identifier[flags] +=[ literal[string] ]
keyword[if] identifier[self] . identifier[_middleware] :
identifier[flags] +=[ literal[string] , identifier[self] . identifier[_middleware] ]
keyword[if] identifier[self] . identifier[_cert] :
identifier[flags] +=[ literal[string] , identifier[self] . identifier[_cert] ]
keyword[if] identifier[self] . identifier[_certName] :
identifier[flags] +=[ literal[string] , identifier[self] . identifier[_certName] ]
keyword[if] identifier[self] . identifier[_certOrg] :
identifier[flags] +=[ literal[string] , identifier[self] . identifier[_certOrg] ]
keyword[if] identifier[self] . identifier[_destination] :
identifier[flags] +=[ literal[string] , identifier[self] . identifier[_destination] ]
keyword[if] identifier[self] . identifier[_key] :
identifier[flags] +=[ literal[string] , identifier[self] . identifier[_key] ]
keyword[if] identifier[self] . identifier[_dest] :
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[self] . identifier[_dest] )):
identifier[flags] +=[ literal[string] , identifier[self] . identifier[_dest] [ identifier[i] ]]
keyword[if] identifier[self] . identifier[_generateCACert] :
identifier[flags] +=[ literal[string] ]
keyword[if] keyword[not] identifier[self] . identifier[_tlsVerification] :
identifier[flags] +=[ literal[string] , literal[string] ]
identifier[logging] . identifier[debug] ( literal[string] + identifier[str] ( identifier[flags] ))
keyword[return] identifier[flags] | def __flags(self):
"""
Internal method. Turns arguments into flags.
"""
flags = []
if self._capture:
flags.append('-capture') # depends on [control=['if'], data=[]]
if self._spy:
flags.append('-spy') # depends on [control=['if'], data=[]]
if self._dbpath:
flags += ['-db-path', self._dbpath]
flags += ['-db', 'boltdb'] # depends on [control=['if'], data=[]]
else:
flags += ['-db', 'memory']
if self._synthesize:
assert self._middleware
flags += ['-synthesize'] # depends on [control=['if'], data=[]]
if self._simulation:
flags += ['-import', self._simulation] # depends on [control=['if'], data=[]]
if self._proxyPort:
flags += ['-pp', str(self._proxyPort)] # depends on [control=['if'], data=[]]
if self._adminPort:
flags += ['-ap', str(self._adminPort)] # depends on [control=['if'], data=[]]
if self._modify:
flags += ['-modify'] # depends on [control=['if'], data=[]]
if self._verbose:
flags += ['-v'] # depends on [control=['if'], data=[]]
if self._dev:
flags += ['-dev'] # depends on [control=['if'], data=[]]
if self._metrics:
flags += ['-metrics'] # depends on [control=['if'], data=[]]
if self._auth:
flags += ['-auth'] # depends on [control=['if'], data=[]]
if self._middleware:
flags += ['-middleware', self._middleware] # depends on [control=['if'], data=[]]
if self._cert:
flags += ['-cert', self._cert] # depends on [control=['if'], data=[]]
if self._certName:
flags += ['-cert-name', self._certName] # depends on [control=['if'], data=[]]
if self._certOrg:
flags += ['-cert-org', self._certOrg] # depends on [control=['if'], data=[]]
if self._destination:
flags += ['-destination', self._destination] # depends on [control=['if'], data=[]]
if self._key:
flags += ['-key', self._key] # depends on [control=['if'], data=[]]
if self._dest:
for i in range(len(self._dest)):
flags += ['-dest', self._dest[i]] # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
if self._generateCACert:
flags += ['-generate-ca-cert'] # depends on [control=['if'], data=[]]
if not self._tlsVerification:
flags += ['-tls-verification', 'false'] # depends on [control=['if'], data=[]]
logging.debug('flags:' + str(flags))
return flags |
def __find_column(self, column_names, part_first_line):
"""
Finds the column for the column_name in sar type definition,
and returns its index.
:param column_names: Names of the column we look for (regex) put in
the list
:param part_first_line: First line of the SAR part
:return: ``Dictionary`` of names => position, None for not present
"""
part_parts = part_first_line.split()
### DEBUG
# print("Parts: %s" % (part_parts))
return_dict = {}
counter = 0
for piece in part_parts:
for colname in column_names:
pattern_re = re.compile(colname)
if pattern_re.search(piece):
return_dict[colname] = counter
break
counter += 1
# Verify the content of the return dictionary, fill the blanks
# with -1s :-)
for colver in column_names:
try:
tempval = return_dict[colver]
del tempval
except KeyError:
return_dict[colver] = None
return return_dict | def function[__find_column, parameter[self, column_names, part_first_line]]:
constant[
Finds the column for the column_name in sar type definition,
and returns its index.
:param column_names: Names of the column we look for (regex) put in
the list
:param part_first_line: First line of the SAR part
:return: ``Dictionary`` of names => position, None for not present
]
variable[part_parts] assign[=] call[name[part_first_line].split, parameter[]]
variable[return_dict] assign[=] dictionary[[], []]
variable[counter] assign[=] constant[0]
for taget[name[piece]] in starred[name[part_parts]] begin[:]
for taget[name[colname]] in starred[name[column_names]] begin[:]
variable[pattern_re] assign[=] call[name[re].compile, parameter[name[colname]]]
if call[name[pattern_re].search, parameter[name[piece]]] begin[:]
call[name[return_dict]][name[colname]] assign[=] name[counter]
break
<ast.AugAssign object at 0x7da18bc72bc0>
for taget[name[colver]] in starred[name[column_names]] begin[:]
<ast.Try object at 0x7da18bc71ff0>
return[name[return_dict]] | keyword[def] identifier[__find_column] ( identifier[self] , identifier[column_names] , identifier[part_first_line] ):
literal[string]
identifier[part_parts] = identifier[part_first_line] . identifier[split] ()
identifier[return_dict] ={}
identifier[counter] = literal[int]
keyword[for] identifier[piece] keyword[in] identifier[part_parts] :
keyword[for] identifier[colname] keyword[in] identifier[column_names] :
identifier[pattern_re] = identifier[re] . identifier[compile] ( identifier[colname] )
keyword[if] identifier[pattern_re] . identifier[search] ( identifier[piece] ):
identifier[return_dict] [ identifier[colname] ]= identifier[counter]
keyword[break]
identifier[counter] += literal[int]
keyword[for] identifier[colver] keyword[in] identifier[column_names] :
keyword[try] :
identifier[tempval] = identifier[return_dict] [ identifier[colver] ]
keyword[del] identifier[tempval]
keyword[except] identifier[KeyError] :
identifier[return_dict] [ identifier[colver] ]= keyword[None]
keyword[return] identifier[return_dict] | def __find_column(self, column_names, part_first_line):
"""
Finds the column for the column_name in sar type definition,
and returns its index.
:param column_names: Names of the column we look for (regex) put in
the list
:param part_first_line: First line of the SAR part
:return: ``Dictionary`` of names => position, None for not present
"""
part_parts = part_first_line.split()
### DEBUG
# print("Parts: %s" % (part_parts))
return_dict = {}
counter = 0
for piece in part_parts:
for colname in column_names:
pattern_re = re.compile(colname)
if pattern_re.search(piece):
return_dict[colname] = counter
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['colname']]
counter += 1 # depends on [control=['for'], data=['piece']]
# Verify the content of the return dictionary, fill the blanks
# with -1s :-)
for colver in column_names:
try:
tempval = return_dict[colver]
del tempval # depends on [control=['try'], data=[]]
except KeyError:
return_dict[colver] = None # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['colver']]
return return_dict |
def unzipper(filename, dir_tmp):
"""
Unzip .lpd file contents to tmp directory.
:param str filename: filename.lpd
:param str dir_tmp: Tmp folder to extract contents to
:return None:
"""
logger_zips.info("enter unzip")
# Unzip contents to the tmp directory
try:
with zipfile.ZipFile(filename) as f:
f.extractall(dir_tmp)
except FileNotFoundError as e:
logger_zips.debug("unzip: FileNotFound: {}, {}".format(filename, e))
shutil.rmtree(dir_tmp)
logger_zips.info("exit unzip")
return | def function[unzipper, parameter[filename, dir_tmp]]:
constant[
Unzip .lpd file contents to tmp directory.
:param str filename: filename.lpd
:param str dir_tmp: Tmp folder to extract contents to
:return None:
]
call[name[logger_zips].info, parameter[constant[enter unzip]]]
<ast.Try object at 0x7da20c76feb0>
call[name[logger_zips].info, parameter[constant[exit unzip]]]
return[None] | keyword[def] identifier[unzipper] ( identifier[filename] , identifier[dir_tmp] ):
literal[string]
identifier[logger_zips] . identifier[info] ( literal[string] )
keyword[try] :
keyword[with] identifier[zipfile] . identifier[ZipFile] ( identifier[filename] ) keyword[as] identifier[f] :
identifier[f] . identifier[extractall] ( identifier[dir_tmp] )
keyword[except] identifier[FileNotFoundError] keyword[as] identifier[e] :
identifier[logger_zips] . identifier[debug] ( literal[string] . identifier[format] ( identifier[filename] , identifier[e] ))
identifier[shutil] . identifier[rmtree] ( identifier[dir_tmp] )
identifier[logger_zips] . identifier[info] ( literal[string] )
keyword[return] | def unzipper(filename, dir_tmp):
"""
Unzip .lpd file contents to tmp directory.
:param str filename: filename.lpd
:param str dir_tmp: Tmp folder to extract contents to
:return None:
"""
logger_zips.info('enter unzip')
# Unzip contents to the tmp directory
try:
with zipfile.ZipFile(filename) as f:
f.extractall(dir_tmp) # depends on [control=['with'], data=['f']] # depends on [control=['try'], data=[]]
except FileNotFoundError as e:
logger_zips.debug('unzip: FileNotFound: {}, {}'.format(filename, e))
shutil.rmtree(dir_tmp) # depends on [control=['except'], data=['e']]
logger_zips.info('exit unzip')
return |
def environ(self):
"""
Add path_info to the request's META dictionary.
"""
environ = dict(self._request.META)
environ['PATH_INFO'] = self._request.path_info
return environ | def function[environ, parameter[self]]:
constant[
Add path_info to the request's META dictionary.
]
variable[environ] assign[=] call[name[dict], parameter[name[self]._request.META]]
call[name[environ]][constant[PATH_INFO]] assign[=] name[self]._request.path_info
return[name[environ]] | keyword[def] identifier[environ] ( identifier[self] ):
literal[string]
identifier[environ] = identifier[dict] ( identifier[self] . identifier[_request] . identifier[META] )
identifier[environ] [ literal[string] ]= identifier[self] . identifier[_request] . identifier[path_info]
keyword[return] identifier[environ] | def environ(self):
"""
Add path_info to the request's META dictionary.
"""
environ = dict(self._request.META)
environ['PATH_INFO'] = self._request.path_info
return environ |
def grant(self, fail_on_found=False, **kwargs):
"""Add a user or a team to a role. Required information:
1) Type of the role
2) Resource of the role, inventory, credential, or any other
3) A user or a team to add to the role
=====API DOCS=====
Add a user or a team to a role. Required information:
* Type of the role.
* Resource of the role, inventory, credential, or any other.
* A user or a team to add to the role.
:param fail_on_found: Flag that if set, the operation fails if a user/team already has the role.
:type fail_on_found: bool
:param `**kwargs`: The user to be associated and the role to associate.
:returns: parsed JSON of role grant.
:rtype: dict
=====API DOCS=====
"""
return self.role_write(fail_on_found=fail_on_found, **kwargs) | def function[grant, parameter[self, fail_on_found]]:
constant[Add a user or a team to a role. Required information:
1) Type of the role
2) Resource of the role, inventory, credential, or any other
3) A user or a team to add to the role
=====API DOCS=====
Add a user or a team to a role. Required information:
* Type of the role.
* Resource of the role, inventory, credential, or any other.
* A user or a team to add to the role.
:param fail_on_found: Flag that if set, the operation fails if a user/team already has the role.
:type fail_on_found: bool
:param `**kwargs`: The user to be associated and the role to associate.
:returns: parsed JSON of role grant.
:rtype: dict
=====API DOCS=====
]
return[call[name[self].role_write, parameter[]]] | keyword[def] identifier[grant] ( identifier[self] , identifier[fail_on_found] = keyword[False] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[self] . identifier[role_write] ( identifier[fail_on_found] = identifier[fail_on_found] ,** identifier[kwargs] ) | def grant(self, fail_on_found=False, **kwargs):
"""Add a user or a team to a role. Required information:
1) Type of the role
2) Resource of the role, inventory, credential, or any other
3) A user or a team to add to the role
=====API DOCS=====
Add a user or a team to a role. Required information:
* Type of the role.
* Resource of the role, inventory, credential, or any other.
* A user or a team to add to the role.
:param fail_on_found: Flag that if set, the operation fails if a user/team already has the role.
:type fail_on_found: bool
:param `**kwargs`: The user to be associated and the role to associate.
:returns: parsed JSON of role grant.
:rtype: dict
=====API DOCS=====
"""
return self.role_write(fail_on_found=fail_on_found, **kwargs) |
def connections(self, hotspot=None):
"""
Returns a list of all the connection instances that are in this scene.
:return <list> [ <XNodeConnection>, .. ]
"""
cons = self.findItems(XNodeConnection)
if hotspot is not None:
filt = lambda x: hotspot in (x.inputHotspot(), x.outputHotspot())
return filter(filt, cons)
return cons | def function[connections, parameter[self, hotspot]]:
constant[
Returns a list of all the connection instances that are in this scene.
:return <list> [ <XNodeConnection>, .. ]
]
variable[cons] assign[=] call[name[self].findItems, parameter[name[XNodeConnection]]]
if compare[name[hotspot] is_not constant[None]] begin[:]
variable[filt] assign[=] <ast.Lambda object at 0x7da20c796650>
return[call[name[filter], parameter[name[filt], name[cons]]]]
return[name[cons]] | keyword[def] identifier[connections] ( identifier[self] , identifier[hotspot] = keyword[None] ):
literal[string]
identifier[cons] = identifier[self] . identifier[findItems] ( identifier[XNodeConnection] )
keyword[if] identifier[hotspot] keyword[is] keyword[not] keyword[None] :
identifier[filt] = keyword[lambda] identifier[x] : identifier[hotspot] keyword[in] ( identifier[x] . identifier[inputHotspot] (), identifier[x] . identifier[outputHotspot] ())
keyword[return] identifier[filter] ( identifier[filt] , identifier[cons] )
keyword[return] identifier[cons] | def connections(self, hotspot=None):
"""
Returns a list of all the connection instances that are in this scene.
:return <list> [ <XNodeConnection>, .. ]
"""
cons = self.findItems(XNodeConnection)
if hotspot is not None:
filt = lambda x: hotspot in (x.inputHotspot(), x.outputHotspot())
return filter(filt, cons) # depends on [control=['if'], data=['hotspot']]
return cons |
def upd_textures(self, *args):
"""Create one :class:`SwatchButton` for each texture"""
if self.canvas is None:
Clock.schedule_once(self.upd_textures, 0)
return
for name in list(self.swatches.keys()):
if name not in self.atlas.textures:
self.remove_widget(self.swatches[name])
del self.swatches[name]
for (name, tex) in self.atlas.textures.items():
if name in self.swatches and self.swatches[name] != tex:
self.remove_widget(self.swatches[name])
if name not in self.swatches or self.swatches[name] != tex:
self.swatches[name] = SwatchButton(
name=name,
tex=tex,
size_hint=(None, None),
size=self.swatch_size
)
self.add_widget(self.swatches[name]) | def function[upd_textures, parameter[self]]:
constant[Create one :class:`SwatchButton` for each texture]
if compare[name[self].canvas is constant[None]] begin[:]
call[name[Clock].schedule_once, parameter[name[self].upd_textures, constant[0]]]
return[None]
for taget[name[name]] in starred[call[name[list], parameter[call[name[self].swatches.keys, parameter[]]]]] begin[:]
if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[self].atlas.textures] begin[:]
call[name[self].remove_widget, parameter[call[name[self].swatches][name[name]]]]
<ast.Delete object at 0x7da2047eb9a0>
for taget[tuple[[<ast.Name object at 0x7da2047e81c0>, <ast.Name object at 0x7da2047e80a0>]]] in starred[call[name[self].atlas.textures.items, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da2047e9480> begin[:]
call[name[self].remove_widget, parameter[call[name[self].swatches][name[name]]]]
if <ast.BoolOp object at 0x7da2047eaa40> begin[:]
call[name[self].swatches][name[name]] assign[=] call[name[SwatchButton], parameter[]]
call[name[self].add_widget, parameter[call[name[self].swatches][name[name]]]] | keyword[def] identifier[upd_textures] ( identifier[self] ,* identifier[args] ):
literal[string]
keyword[if] identifier[self] . identifier[canvas] keyword[is] keyword[None] :
identifier[Clock] . identifier[schedule_once] ( identifier[self] . identifier[upd_textures] , literal[int] )
keyword[return]
keyword[for] identifier[name] keyword[in] identifier[list] ( identifier[self] . identifier[swatches] . identifier[keys] ()):
keyword[if] identifier[name] keyword[not] keyword[in] identifier[self] . identifier[atlas] . identifier[textures] :
identifier[self] . identifier[remove_widget] ( identifier[self] . identifier[swatches] [ identifier[name] ])
keyword[del] identifier[self] . identifier[swatches] [ identifier[name] ]
keyword[for] ( identifier[name] , identifier[tex] ) keyword[in] identifier[self] . identifier[atlas] . identifier[textures] . identifier[items] ():
keyword[if] identifier[name] keyword[in] identifier[self] . identifier[swatches] keyword[and] identifier[self] . identifier[swatches] [ identifier[name] ]!= identifier[tex] :
identifier[self] . identifier[remove_widget] ( identifier[self] . identifier[swatches] [ identifier[name] ])
keyword[if] identifier[name] keyword[not] keyword[in] identifier[self] . identifier[swatches] keyword[or] identifier[self] . identifier[swatches] [ identifier[name] ]!= identifier[tex] :
identifier[self] . identifier[swatches] [ identifier[name] ]= identifier[SwatchButton] (
identifier[name] = identifier[name] ,
identifier[tex] = identifier[tex] ,
identifier[size_hint] =( keyword[None] , keyword[None] ),
identifier[size] = identifier[self] . identifier[swatch_size]
)
identifier[self] . identifier[add_widget] ( identifier[self] . identifier[swatches] [ identifier[name] ]) | def upd_textures(self, *args):
"""Create one :class:`SwatchButton` for each texture"""
if self.canvas is None:
Clock.schedule_once(self.upd_textures, 0)
return # depends on [control=['if'], data=[]]
for name in list(self.swatches.keys()):
if name not in self.atlas.textures:
self.remove_widget(self.swatches[name])
del self.swatches[name] # depends on [control=['if'], data=['name']] # depends on [control=['for'], data=['name']]
for (name, tex) in self.atlas.textures.items():
if name in self.swatches and self.swatches[name] != tex:
self.remove_widget(self.swatches[name]) # depends on [control=['if'], data=[]]
if name not in self.swatches or self.swatches[name] != tex:
self.swatches[name] = SwatchButton(name=name, tex=tex, size_hint=(None, None), size=self.swatch_size)
self.add_widget(self.swatches[name]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] |
def _authn_response(self, in_response_to, consumer_url,
sp_entity_id, identity=None, name_id=None,
status=None, authn=None, issuer=None, policy=None,
sign_assertion=False, sign_response=False,
best_effort=False, encrypt_assertion=False,
encrypt_cert_advice=None, encrypt_cert_assertion=None,
authn_statement=None,
encrypt_assertion_self_contained=False,
encrypted_advice_attributes=False,
pefim=False, sign_alg=None, digest_alg=None,
farg=None, session_not_on_or_after=None):
""" Create a response. A layer of indirection.
:param in_response_to: The session identifier of the request
:param consumer_url: The URL which should receive the response
:param sp_entity_id: The entity identifier of the SP
:param identity: A dictionary with attributes and values that are
expected to be the bases for the assertion in the response.
:param name_id: The identifier of the subject
:param status: The status of the response
:param authn: A dictionary containing information about the
authn context.
:param issuer: The issuer of the response
:param policy:
:param sign_assertion: Whether the assertion should be signed or not
:param sign_response: Whether the response should be signed or not
:param best_effort: Even if not the SPs demands can be met send a
response.
:param encrypt_assertion: True if assertions should be encrypted.
:param encrypt_assertion_self_contained: True if all encrypted
assertions should have alla namespaces
selfcontained.
:param encrypted_advice_attributes: True if assertions in the advice
element should be encrypted.
:param encrypt_cert_advice: Certificate to be used for encryption of
assertions in the advice element.
:param encrypt_cert_assertion: Certificate to be used for encryption
of assertions.
:param authn_statement: Authentication statement.
:param sign_assertion: True if assertions should be signed.
:param pefim: True if a response according to the PEFIM profile
should be created.
:param farg: Argument to pass on to the assertion constructor
:return: A response instance
"""
if farg is None:
assertion_args = {}
args = {}
# if identity:
_issuer = self._issuer(issuer)
# if encrypt_assertion and show_nameid:
# tmp_name_id = name_id
# name_id = None
# name_id = None
# tmp_authn = authn
# authn = None
# tmp_authn_statement = authn_statement
# authn_statement = None
if pefim:
encrypted_advice_attributes = True
encrypt_assertion_self_contained = True
assertion_attributes = self.setup_assertion(
None, sp_entity_id, None, None, None, policy, None, None,
identity, best_effort, sign_response, farg=farg)
assertion = self.setup_assertion(
authn, sp_entity_id, in_response_to, consumer_url, name_id,
policy, _issuer, authn_statement, [], True, sign_response,
farg=farg, session_not_on_or_after=session_not_on_or_after)
assertion.advice = saml.Advice()
# assertion.advice.assertion_id_ref.append(saml.AssertionIDRef())
# assertion.advice.assertion_uri_ref.append(saml.AssertionURIRef())
assertion.advice.assertion.append(assertion_attributes)
else:
assertion = self.setup_assertion(
authn, sp_entity_id, in_response_to, consumer_url, name_id,
policy, _issuer, authn_statement, identity, True,
sign_response, farg=farg,
session_not_on_or_after=session_not_on_or_after)
to_sign = []
if not encrypt_assertion:
if sign_assertion:
assertion.signature = pre_signature_part(assertion.id,
self.sec.my_cert, 2,
sign_alg=sign_alg,
digest_alg=digest_alg)
to_sign.append((class_name(assertion), assertion.id))
args["assertion"] = assertion
if (self.support_AssertionIDRequest() or self.support_AuthnQuery()):
self.session_db.store_assertion(assertion, to_sign)
return self._response(
in_response_to, consumer_url, status, issuer, sign_response,
to_sign, sp_entity_id=sp_entity_id,
encrypt_assertion=encrypt_assertion,
encrypt_cert_advice=encrypt_cert_advice,
encrypt_cert_assertion=encrypt_cert_assertion,
encrypt_assertion_self_contained=encrypt_assertion_self_contained,
encrypted_advice_attributes=encrypted_advice_attributes,
sign_assertion=sign_assertion,
pefim=pefim, sign_alg=sign_alg, digest_alg=digest_alg, **args) | def function[_authn_response, parameter[self, in_response_to, consumer_url, sp_entity_id, identity, name_id, status, authn, issuer, policy, sign_assertion, sign_response, best_effort, encrypt_assertion, encrypt_cert_advice, encrypt_cert_assertion, authn_statement, encrypt_assertion_self_contained, encrypted_advice_attributes, pefim, sign_alg, digest_alg, farg, session_not_on_or_after]]:
constant[ Create a response. A layer of indirection.
:param in_response_to: The session identifier of the request
:param consumer_url: The URL which should receive the response
:param sp_entity_id: The entity identifier of the SP
:param identity: A dictionary with attributes and values that are
expected to be the bases for the assertion in the response.
:param name_id: The identifier of the subject
:param status: The status of the response
:param authn: A dictionary containing information about the
authn context.
:param issuer: The issuer of the response
:param policy:
:param sign_assertion: Whether the assertion should be signed or not
:param sign_response: Whether the response should be signed or not
:param best_effort: Even if not the SPs demands can be met send a
response.
:param encrypt_assertion: True if assertions should be encrypted.
:param encrypt_assertion_self_contained: True if all encrypted
assertions should have alla namespaces
selfcontained.
:param encrypted_advice_attributes: True if assertions in the advice
element should be encrypted.
:param encrypt_cert_advice: Certificate to be used for encryption of
assertions in the advice element.
:param encrypt_cert_assertion: Certificate to be used for encryption
of assertions.
:param authn_statement: Authentication statement.
:param sign_assertion: True if assertions should be signed.
:param pefim: True if a response according to the PEFIM profile
should be created.
:param farg: Argument to pass on to the assertion constructor
:return: A response instance
]
if compare[name[farg] is constant[None]] begin[:]
variable[assertion_args] assign[=] dictionary[[], []]
variable[args] assign[=] dictionary[[], []]
variable[_issuer] assign[=] call[name[self]._issuer, parameter[name[issuer]]]
if name[pefim] begin[:]
variable[encrypted_advice_attributes] assign[=] constant[True]
variable[encrypt_assertion_self_contained] assign[=] constant[True]
variable[assertion_attributes] assign[=] call[name[self].setup_assertion, parameter[constant[None], name[sp_entity_id], constant[None], constant[None], constant[None], name[policy], constant[None], constant[None], name[identity], name[best_effort], name[sign_response]]]
variable[assertion] assign[=] call[name[self].setup_assertion, parameter[name[authn], name[sp_entity_id], name[in_response_to], name[consumer_url], name[name_id], name[policy], name[_issuer], name[authn_statement], list[[]], constant[True], name[sign_response]]]
name[assertion].advice assign[=] call[name[saml].Advice, parameter[]]
call[name[assertion].advice.assertion.append, parameter[name[assertion_attributes]]]
variable[to_sign] assign[=] list[[]]
if <ast.UnaryOp object at 0x7da18f8132e0> begin[:]
if name[sign_assertion] begin[:]
name[assertion].signature assign[=] call[name[pre_signature_part], parameter[name[assertion].id, name[self].sec.my_cert, constant[2]]]
call[name[to_sign].append, parameter[tuple[[<ast.Call object at 0x7da18f8102e0>, <ast.Attribute object at 0x7da18f812440>]]]]
call[name[args]][constant[assertion]] assign[=] name[assertion]
if <ast.BoolOp object at 0x7da18f813be0> begin[:]
call[name[self].session_db.store_assertion, parameter[name[assertion], name[to_sign]]]
return[call[name[self]._response, parameter[name[in_response_to], name[consumer_url], name[status], name[issuer], name[sign_response], name[to_sign]]]] | keyword[def] identifier[_authn_response] ( identifier[self] , identifier[in_response_to] , identifier[consumer_url] ,
identifier[sp_entity_id] , identifier[identity] = keyword[None] , identifier[name_id] = keyword[None] ,
identifier[status] = keyword[None] , identifier[authn] = keyword[None] , identifier[issuer] = keyword[None] , identifier[policy] = keyword[None] ,
identifier[sign_assertion] = keyword[False] , identifier[sign_response] = keyword[False] ,
identifier[best_effort] = keyword[False] , identifier[encrypt_assertion] = keyword[False] ,
identifier[encrypt_cert_advice] = keyword[None] , identifier[encrypt_cert_assertion] = keyword[None] ,
identifier[authn_statement] = keyword[None] ,
identifier[encrypt_assertion_self_contained] = keyword[False] ,
identifier[encrypted_advice_attributes] = keyword[False] ,
identifier[pefim] = keyword[False] , identifier[sign_alg] = keyword[None] , identifier[digest_alg] = keyword[None] ,
identifier[farg] = keyword[None] , identifier[session_not_on_or_after] = keyword[None] ):
literal[string]
keyword[if] identifier[farg] keyword[is] keyword[None] :
identifier[assertion_args] ={}
identifier[args] ={}
identifier[_issuer] = identifier[self] . identifier[_issuer] ( identifier[issuer] )
keyword[if] identifier[pefim] :
identifier[encrypted_advice_attributes] = keyword[True]
identifier[encrypt_assertion_self_contained] = keyword[True]
identifier[assertion_attributes] = identifier[self] . identifier[setup_assertion] (
keyword[None] , identifier[sp_entity_id] , keyword[None] , keyword[None] , keyword[None] , identifier[policy] , keyword[None] , keyword[None] ,
identifier[identity] , identifier[best_effort] , identifier[sign_response] , identifier[farg] = identifier[farg] )
identifier[assertion] = identifier[self] . identifier[setup_assertion] (
identifier[authn] , identifier[sp_entity_id] , identifier[in_response_to] , identifier[consumer_url] , identifier[name_id] ,
identifier[policy] , identifier[_issuer] , identifier[authn_statement] ,[], keyword[True] , identifier[sign_response] ,
identifier[farg] = identifier[farg] , identifier[session_not_on_or_after] = identifier[session_not_on_or_after] )
identifier[assertion] . identifier[advice] = identifier[saml] . identifier[Advice] ()
identifier[assertion] . identifier[advice] . identifier[assertion] . identifier[append] ( identifier[assertion_attributes] )
keyword[else] :
identifier[assertion] = identifier[self] . identifier[setup_assertion] (
identifier[authn] , identifier[sp_entity_id] , identifier[in_response_to] , identifier[consumer_url] , identifier[name_id] ,
identifier[policy] , identifier[_issuer] , identifier[authn_statement] , identifier[identity] , keyword[True] ,
identifier[sign_response] , identifier[farg] = identifier[farg] ,
identifier[session_not_on_or_after] = identifier[session_not_on_or_after] )
identifier[to_sign] =[]
keyword[if] keyword[not] identifier[encrypt_assertion] :
keyword[if] identifier[sign_assertion] :
identifier[assertion] . identifier[signature] = identifier[pre_signature_part] ( identifier[assertion] . identifier[id] ,
identifier[self] . identifier[sec] . identifier[my_cert] , literal[int] ,
identifier[sign_alg] = identifier[sign_alg] ,
identifier[digest_alg] = identifier[digest_alg] )
identifier[to_sign] . identifier[append] (( identifier[class_name] ( identifier[assertion] ), identifier[assertion] . identifier[id] ))
identifier[args] [ literal[string] ]= identifier[assertion]
keyword[if] ( identifier[self] . identifier[support_AssertionIDRequest] () keyword[or] identifier[self] . identifier[support_AuthnQuery] ()):
identifier[self] . identifier[session_db] . identifier[store_assertion] ( identifier[assertion] , identifier[to_sign] )
keyword[return] identifier[self] . identifier[_response] (
identifier[in_response_to] , identifier[consumer_url] , identifier[status] , identifier[issuer] , identifier[sign_response] ,
identifier[to_sign] , identifier[sp_entity_id] = identifier[sp_entity_id] ,
identifier[encrypt_assertion] = identifier[encrypt_assertion] ,
identifier[encrypt_cert_advice] = identifier[encrypt_cert_advice] ,
identifier[encrypt_cert_assertion] = identifier[encrypt_cert_assertion] ,
identifier[encrypt_assertion_self_contained] = identifier[encrypt_assertion_self_contained] ,
identifier[encrypted_advice_attributes] = identifier[encrypted_advice_attributes] ,
identifier[sign_assertion] = identifier[sign_assertion] ,
identifier[pefim] = identifier[pefim] , identifier[sign_alg] = identifier[sign_alg] , identifier[digest_alg] = identifier[digest_alg] ,** identifier[args] ) | def _authn_response(self, in_response_to, consumer_url, sp_entity_id, identity=None, name_id=None, status=None, authn=None, issuer=None, policy=None, sign_assertion=False, sign_response=False, best_effort=False, encrypt_assertion=False, encrypt_cert_advice=None, encrypt_cert_assertion=None, authn_statement=None, encrypt_assertion_self_contained=False, encrypted_advice_attributes=False, pefim=False, sign_alg=None, digest_alg=None, farg=None, session_not_on_or_after=None):
""" Create a response. A layer of indirection.
:param in_response_to: The session identifier of the request
:param consumer_url: The URL which should receive the response
:param sp_entity_id: The entity identifier of the SP
:param identity: A dictionary with attributes and values that are
expected to be the bases for the assertion in the response.
:param name_id: The identifier of the subject
:param status: The status of the response
:param authn: A dictionary containing information about the
authn context.
:param issuer: The issuer of the response
:param policy:
:param sign_assertion: Whether the assertion should be signed or not
:param sign_response: Whether the response should be signed or not
:param best_effort: Even if not the SPs demands can be met send a
response.
:param encrypt_assertion: True if assertions should be encrypted.
:param encrypt_assertion_self_contained: True if all encrypted
assertions should have alla namespaces
selfcontained.
:param encrypted_advice_attributes: True if assertions in the advice
element should be encrypted.
:param encrypt_cert_advice: Certificate to be used for encryption of
assertions in the advice element.
:param encrypt_cert_assertion: Certificate to be used for encryption
of assertions.
:param authn_statement: Authentication statement.
:param sign_assertion: True if assertions should be signed.
:param pefim: True if a response according to the PEFIM profile
should be created.
:param farg: Argument to pass on to the assertion constructor
:return: A response instance
"""
if farg is None:
assertion_args = {} # depends on [control=['if'], data=[]]
args = {}
# if identity:
_issuer = self._issuer(issuer)
# if encrypt_assertion and show_nameid:
# tmp_name_id = name_id
# name_id = None
# name_id = None
# tmp_authn = authn
# authn = None
# tmp_authn_statement = authn_statement
# authn_statement = None
if pefim:
encrypted_advice_attributes = True
encrypt_assertion_self_contained = True
assertion_attributes = self.setup_assertion(None, sp_entity_id, None, None, None, policy, None, None, identity, best_effort, sign_response, farg=farg)
assertion = self.setup_assertion(authn, sp_entity_id, in_response_to, consumer_url, name_id, policy, _issuer, authn_statement, [], True, sign_response, farg=farg, session_not_on_or_after=session_not_on_or_after)
assertion.advice = saml.Advice()
# assertion.advice.assertion_id_ref.append(saml.AssertionIDRef())
# assertion.advice.assertion_uri_ref.append(saml.AssertionURIRef())
assertion.advice.assertion.append(assertion_attributes) # depends on [control=['if'], data=[]]
else:
assertion = self.setup_assertion(authn, sp_entity_id, in_response_to, consumer_url, name_id, policy, _issuer, authn_statement, identity, True, sign_response, farg=farg, session_not_on_or_after=session_not_on_or_after)
to_sign = []
if not encrypt_assertion:
if sign_assertion:
assertion.signature = pre_signature_part(assertion.id, self.sec.my_cert, 2, sign_alg=sign_alg, digest_alg=digest_alg)
to_sign.append((class_name(assertion), assertion.id)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
args['assertion'] = assertion
if self.support_AssertionIDRequest() or self.support_AuthnQuery():
self.session_db.store_assertion(assertion, to_sign) # depends on [control=['if'], data=[]]
return self._response(in_response_to, consumer_url, status, issuer, sign_response, to_sign, sp_entity_id=sp_entity_id, encrypt_assertion=encrypt_assertion, encrypt_cert_advice=encrypt_cert_advice, encrypt_cert_assertion=encrypt_cert_assertion, encrypt_assertion_self_contained=encrypt_assertion_self_contained, encrypted_advice_attributes=encrypted_advice_attributes, sign_assertion=sign_assertion, pefim=pefim, sign_alg=sign_alg, digest_alg=digest_alg, **args) |
def remove_sources(self, sources):
""" Remove sources from the decomposition.
This function removes sources from the decomposition. Doing so invalidates currently fitted VAR models and
connectivity estimates.
Parameters
----------
sources : {slice, int, array of ints}
Indices of components to remove.
Returns
-------
self : Workspace
The Workspace object.
Raises
------
RuntimeError
If the :class:`Workspace` instance does not contain a source decomposition.
"""
if self.unmixing_ is None or self.mixing_ is None:
raise RuntimeError("No sources available (run do_mvarica first)")
self.mixing_ = np.delete(self.mixing_, sources, 0)
self.unmixing_ = np.delete(self.unmixing_, sources, 1)
if self.activations_ is not None:
self.activations_ = np.delete(self.activations_, sources, 1)
self.var_model_ = None
self.var_cov_ = None
self.connectivity_ = None
self.mixmaps_ = []
self.unmixmaps_ = []
return self | def function[remove_sources, parameter[self, sources]]:
constant[ Remove sources from the decomposition.
This function removes sources from the decomposition. Doing so invalidates currently fitted VAR models and
connectivity estimates.
Parameters
----------
sources : {slice, int, array of ints}
Indices of components to remove.
Returns
-------
self : Workspace
The Workspace object.
Raises
------
RuntimeError
If the :class:`Workspace` instance does not contain a source decomposition.
]
if <ast.BoolOp object at 0x7da1b2630040> begin[:]
<ast.Raise object at 0x7da1b2630190>
name[self].mixing_ assign[=] call[name[np].delete, parameter[name[self].mixing_, name[sources], constant[0]]]
name[self].unmixing_ assign[=] call[name[np].delete, parameter[name[self].unmixing_, name[sources], constant[1]]]
if compare[name[self].activations_ is_not constant[None]] begin[:]
name[self].activations_ assign[=] call[name[np].delete, parameter[name[self].activations_, name[sources], constant[1]]]
name[self].var_model_ assign[=] constant[None]
name[self].var_cov_ assign[=] constant[None]
name[self].connectivity_ assign[=] constant[None]
name[self].mixmaps_ assign[=] list[[]]
name[self].unmixmaps_ assign[=] list[[]]
return[name[self]] | keyword[def] identifier[remove_sources] ( identifier[self] , identifier[sources] ):
literal[string]
keyword[if] identifier[self] . identifier[unmixing_] keyword[is] keyword[None] keyword[or] identifier[self] . identifier[mixing_] keyword[is] keyword[None] :
keyword[raise] identifier[RuntimeError] ( literal[string] )
identifier[self] . identifier[mixing_] = identifier[np] . identifier[delete] ( identifier[self] . identifier[mixing_] , identifier[sources] , literal[int] )
identifier[self] . identifier[unmixing_] = identifier[np] . identifier[delete] ( identifier[self] . identifier[unmixing_] , identifier[sources] , literal[int] )
keyword[if] identifier[self] . identifier[activations_] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[activations_] = identifier[np] . identifier[delete] ( identifier[self] . identifier[activations_] , identifier[sources] , literal[int] )
identifier[self] . identifier[var_model_] = keyword[None]
identifier[self] . identifier[var_cov_] = keyword[None]
identifier[self] . identifier[connectivity_] = keyword[None]
identifier[self] . identifier[mixmaps_] =[]
identifier[self] . identifier[unmixmaps_] =[]
keyword[return] identifier[self] | def remove_sources(self, sources):
""" Remove sources from the decomposition.
This function removes sources from the decomposition. Doing so invalidates currently fitted VAR models and
connectivity estimates.
Parameters
----------
sources : {slice, int, array of ints}
Indices of components to remove.
Returns
-------
self : Workspace
The Workspace object.
Raises
------
RuntimeError
If the :class:`Workspace` instance does not contain a source decomposition.
"""
if self.unmixing_ is None or self.mixing_ is None:
raise RuntimeError('No sources available (run do_mvarica first)') # depends on [control=['if'], data=[]]
self.mixing_ = np.delete(self.mixing_, sources, 0)
self.unmixing_ = np.delete(self.unmixing_, sources, 1)
if self.activations_ is not None:
self.activations_ = np.delete(self.activations_, sources, 1) # depends on [control=['if'], data=[]]
self.var_model_ = None
self.var_cov_ = None
self.connectivity_ = None
self.mixmaps_ = []
self.unmixmaps_ = []
return self |
def setdefault(self, key, default=None):
"""
If *key* is in the dictionary, return its value. If not, insert *key*
with a value of *default* and return *default*. *default* defaults to
``None``.
"""
if key in self:
return self[key]
else:
self._setitem(key, default)
self._list_add(key)
return default | def function[setdefault, parameter[self, key, default]]:
constant[
If *key* is in the dictionary, return its value. If not, insert *key*
with a value of *default* and return *default*. *default* defaults to
``None``.
]
if compare[name[key] in name[self]] begin[:]
return[call[name[self]][name[key]]] | keyword[def] identifier[setdefault] ( identifier[self] , identifier[key] , identifier[default] = keyword[None] ):
literal[string]
keyword[if] identifier[key] keyword[in] identifier[self] :
keyword[return] identifier[self] [ identifier[key] ]
keyword[else] :
identifier[self] . identifier[_setitem] ( identifier[key] , identifier[default] )
identifier[self] . identifier[_list_add] ( identifier[key] )
keyword[return] identifier[default] | def setdefault(self, key, default=None):
"""
If *key* is in the dictionary, return its value. If not, insert *key*
with a value of *default* and return *default*. *default* defaults to
``None``.
"""
if key in self:
return self[key] # depends on [control=['if'], data=['key', 'self']]
else:
self._setitem(key, default)
self._list_add(key)
return default |
def get_redirect_args(self, request, callback):
"""Get request parameters for redirect url."""
callback = request.build_absolute_uri(callback)
args = {
'client_id': self.consumer_key,
'redirect_uri': callback,
'response_type': 'code',
}
scope = self.get_scope(request)
if scope:
args['scope'] = self.scope_separator.join(self.get_scope(request))
state = self.get_application_state(request, callback)
if state is not None:
args['state'] = state
request.session[self.session_key] = state
auth_params = self.get_auth_params(request)
if auth_params:
args.update(auth_params)
return args | def function[get_redirect_args, parameter[self, request, callback]]:
constant[Get request parameters for redirect url.]
variable[callback] assign[=] call[name[request].build_absolute_uri, parameter[name[callback]]]
variable[args] assign[=] dictionary[[<ast.Constant object at 0x7da1b26a5ea0>, <ast.Constant object at 0x7da1b26a6f20>, <ast.Constant object at 0x7da1b26a7910>], [<ast.Attribute object at 0x7da1b26a77f0>, <ast.Name object at 0x7da1b26a5090>, <ast.Constant object at 0x7da1b26a5e70>]]
variable[scope] assign[=] call[name[self].get_scope, parameter[name[request]]]
if name[scope] begin[:]
call[name[args]][constant[scope]] assign[=] call[name[self].scope_separator.join, parameter[call[name[self].get_scope, parameter[name[request]]]]]
variable[state] assign[=] call[name[self].get_application_state, parameter[name[request], name[callback]]]
if compare[name[state] is_not constant[None]] begin[:]
call[name[args]][constant[state]] assign[=] name[state]
call[name[request].session][name[self].session_key] assign[=] name[state]
variable[auth_params] assign[=] call[name[self].get_auth_params, parameter[name[request]]]
if name[auth_params] begin[:]
call[name[args].update, parameter[name[auth_params]]]
return[name[args]] | keyword[def] identifier[get_redirect_args] ( identifier[self] , identifier[request] , identifier[callback] ):
literal[string]
identifier[callback] = identifier[request] . identifier[build_absolute_uri] ( identifier[callback] )
identifier[args] ={
literal[string] : identifier[self] . identifier[consumer_key] ,
literal[string] : identifier[callback] ,
literal[string] : literal[string] ,
}
identifier[scope] = identifier[self] . identifier[get_scope] ( identifier[request] )
keyword[if] identifier[scope] :
identifier[args] [ literal[string] ]= identifier[self] . identifier[scope_separator] . identifier[join] ( identifier[self] . identifier[get_scope] ( identifier[request] ))
identifier[state] = identifier[self] . identifier[get_application_state] ( identifier[request] , identifier[callback] )
keyword[if] identifier[state] keyword[is] keyword[not] keyword[None] :
identifier[args] [ literal[string] ]= identifier[state]
identifier[request] . identifier[session] [ identifier[self] . identifier[session_key] ]= identifier[state]
identifier[auth_params] = identifier[self] . identifier[get_auth_params] ( identifier[request] )
keyword[if] identifier[auth_params] :
identifier[args] . identifier[update] ( identifier[auth_params] )
keyword[return] identifier[args] | def get_redirect_args(self, request, callback):
"""Get request parameters for redirect url."""
callback = request.build_absolute_uri(callback)
args = {'client_id': self.consumer_key, 'redirect_uri': callback, 'response_type': 'code'}
scope = self.get_scope(request)
if scope:
args['scope'] = self.scope_separator.join(self.get_scope(request)) # depends on [control=['if'], data=[]]
state = self.get_application_state(request, callback)
if state is not None:
args['state'] = state
request.session[self.session_key] = state # depends on [control=['if'], data=['state']]
auth_params = self.get_auth_params(request)
if auth_params:
args.update(auth_params) # depends on [control=['if'], data=[]]
return args |
def calculate_statistics(self):
"Jam some data through to generate statistics"
rev_ids = range(0, 100, 1)
feature_values = zip(rev_ids, [0] * 100)
scores = [self.score(f) for f in feature_values]
labels = [s['prediction'] for s in scores]
statistics = Classification(labels, threshold_ndigits=1, decision_key='probability')
score_labels = list(zip(scores, labels))
statistics.fit(score_labels)
return statistics | def function[calculate_statistics, parameter[self]]:
constant[Jam some data through to generate statistics]
variable[rev_ids] assign[=] call[name[range], parameter[constant[0], constant[100], constant[1]]]
variable[feature_values] assign[=] call[name[zip], parameter[name[rev_ids], binary_operation[list[[<ast.Constant object at 0x7da204345300>]] * constant[100]]]]
variable[scores] assign[=] <ast.ListComp object at 0x7da204346770>
variable[labels] assign[=] <ast.ListComp object at 0x7da204346ad0>
variable[statistics] assign[=] call[name[Classification], parameter[name[labels]]]
variable[score_labels] assign[=] call[name[list], parameter[call[name[zip], parameter[name[scores], name[labels]]]]]
call[name[statistics].fit, parameter[name[score_labels]]]
return[name[statistics]] | keyword[def] identifier[calculate_statistics] ( identifier[self] ):
literal[string]
identifier[rev_ids] = identifier[range] ( literal[int] , literal[int] , literal[int] )
identifier[feature_values] = identifier[zip] ( identifier[rev_ids] ,[ literal[int] ]* literal[int] )
identifier[scores] =[ identifier[self] . identifier[score] ( identifier[f] ) keyword[for] identifier[f] keyword[in] identifier[feature_values] ]
identifier[labels] =[ identifier[s] [ literal[string] ] keyword[for] identifier[s] keyword[in] identifier[scores] ]
identifier[statistics] = identifier[Classification] ( identifier[labels] , identifier[threshold_ndigits] = literal[int] , identifier[decision_key] = literal[string] )
identifier[score_labels] = identifier[list] ( identifier[zip] ( identifier[scores] , identifier[labels] ))
identifier[statistics] . identifier[fit] ( identifier[score_labels] )
keyword[return] identifier[statistics] | def calculate_statistics(self):
"""Jam some data through to generate statistics"""
rev_ids = range(0, 100, 1)
feature_values = zip(rev_ids, [0] * 100)
scores = [self.score(f) for f in feature_values]
labels = [s['prediction'] for s in scores]
statistics = Classification(labels, threshold_ndigits=1, decision_key='probability')
score_labels = list(zip(scores, labels))
statistics.fit(score_labels)
return statistics |
def reply_inform(self, inform, orig_req):
"""Send an inform as part of the reply to an earlier request.
Parameters
----------
inform : Message object
The inform message to send.
orig_req : Message object
The request message being replied to. The inform message's
id is overridden with the id from orig_req before the
inform is sent.
"""
assert (inform.mtype == Message.INFORM)
assert (inform.name == orig_req.name)
inform.mid = orig_req.mid
return self._send_message(inform) | def function[reply_inform, parameter[self, inform, orig_req]]:
constant[Send an inform as part of the reply to an earlier request.
Parameters
----------
inform : Message object
The inform message to send.
orig_req : Message object
The request message being replied to. The inform message's
id is overridden with the id from orig_req before the
inform is sent.
]
assert[compare[name[inform].mtype equal[==] name[Message].INFORM]]
assert[compare[name[inform].name equal[==] name[orig_req].name]]
name[inform].mid assign[=] name[orig_req].mid
return[call[name[self]._send_message, parameter[name[inform]]]] | keyword[def] identifier[reply_inform] ( identifier[self] , identifier[inform] , identifier[orig_req] ):
literal[string]
keyword[assert] ( identifier[inform] . identifier[mtype] == identifier[Message] . identifier[INFORM] )
keyword[assert] ( identifier[inform] . identifier[name] == identifier[orig_req] . identifier[name] )
identifier[inform] . identifier[mid] = identifier[orig_req] . identifier[mid]
keyword[return] identifier[self] . identifier[_send_message] ( identifier[inform] ) | def reply_inform(self, inform, orig_req):
"""Send an inform as part of the reply to an earlier request.
Parameters
----------
inform : Message object
The inform message to send.
orig_req : Message object
The request message being replied to. The inform message's
id is overridden with the id from orig_req before the
inform is sent.
"""
assert inform.mtype == Message.INFORM
assert inform.name == orig_req.name
inform.mid = orig_req.mid
return self._send_message(inform) |
def create_data(datatype='ChanTime', n_trial=1, s_freq=256,
chan_name=None, n_chan=8,
time=None, freq=None, start_time=None,
signal='random', amplitude=1, color=0, sine_freq=10,
attr=None):
"""Create data of different datatype from scratch.
Parameters
----------
datatype : str
one of 'ChanTime', 'ChanFreq', 'ChanTimeFreq'
n_trial : int
number of trials
s_freq : int
sampling frequency
chan_name : list of str
names of the channels
n_chan : int
if chan_name is not specified, this defines the number of channels
time : numpy.ndarray or tuple of two numbers
if tuple, the first and second numbers indicate beginning and end
freq : numpy.ndarray or tuple of two numbers
if tuple, the first and second numbers indicate beginning and end
start_time : datetime.datetime, optional
starting time of the recordings
attr : list of str
list of possible attributes (currently only 'channels')
Only for datatype == 'ChanTime'
signal : str
'random', 'sine'
amplitude : float
amplitude (peak-to-peak) of the signal
color : float
noise color to generate (white noise is 0, pink is 1, brown is 2).
This is only appropriate if signal == 'random'
sine_freq : float
frequency of the sine wave (only if signal == 'sine'), where phase
is random for each channel
Returns
-------
data : instance of specified datatype
Notes
-----
ChanTime uses randn (to have normally distributed noise), while when you
have freq, it uses random (which gives always positive values).
You can only color noise for ChanTime, not for the other datatypes.
"""
possible_datatypes = ('ChanTime', 'ChanFreq', 'ChanTimeFreq')
if datatype not in possible_datatypes:
raise ValueError('Datatype should be one of ' +
', '.join(possible_datatypes))
if time is not None:
if isinstance(time, tuple) and len(time) == 2:
time = arange(time[0], time[1], 1. / s_freq)
else:
time = arange(0, 1, 1. / s_freq)
if freq is not None:
if isinstance(freq, tuple) and len(freq) == 2:
freq = arange(freq[0], freq[1])
else:
freq = arange(0, s_freq / 2. + 1)
if chan_name is None:
chan_name = _make_chan_name(n_chan)
else:
n_chan = len(chan_name)
if start_time is None:
start_time = datetime.now()
if datatype == 'ChanTime':
data = ChanTime()
data.data = empty(n_trial, dtype='O')
for i in range(n_trial):
if signal == 'random':
values = random.randn(*(len(chan_name), len(time)))
for i_ch, x in enumerate(values):
values[i_ch, :] = _color_noise(x, s_freq, color)
elif signal == 'sine':
values = empty((n_chan, time.shape[0]))
for i_ch in range(n_chan):
values[i_ch, :] = sin(2 * pi * sine_freq * time +
random.randn())
data.data[i] = values / ptp(values, axis=1)[:, None] * amplitude
if datatype == 'ChanFreq':
data = ChanFreq()
data.data = empty(n_trial, dtype='O')
for i in range(n_trial):
data.data[i] = random.random((len(chan_name), len(freq)))
if datatype == 'ChanTimeFreq':
data = ChanTimeFreq()
data.data = empty(n_trial, dtype='O')
for i in range(n_trial):
data.data[i] = random.random((len(chan_name), len(time), len(freq)))
data.start_time = start_time
data.s_freq = s_freq
data.axis['chan'] = empty(n_trial, dtype='O')
for i in range(n_trial):
data.axis['chan'][i] = asarray(chan_name, dtype='U')
if datatype in ('ChanTime', 'ChanTimeFreq'):
data.axis['time'] = empty(n_trial, dtype='O')
for i in range(n_trial):
data.axis['time'][i] = time
if datatype in ('ChanFreq', 'ChanTimeFreq'):
data.axis['freq'] = empty(n_trial, dtype='O')
for i in range(n_trial):
data.axis['freq'][i] = freq
if attr is not None:
if 'chan' in attr:
data.attr['chan'] = create_channels(data.chan[0])
return data | def function[create_data, parameter[datatype, n_trial, s_freq, chan_name, n_chan, time, freq, start_time, signal, amplitude, color, sine_freq, attr]]:
constant[Create data of different datatype from scratch.
Parameters
----------
datatype : str
one of 'ChanTime', 'ChanFreq', 'ChanTimeFreq'
n_trial : int
number of trials
s_freq : int
sampling frequency
chan_name : list of str
names of the channels
n_chan : int
if chan_name is not specified, this defines the number of channels
time : numpy.ndarray or tuple of two numbers
if tuple, the first and second numbers indicate beginning and end
freq : numpy.ndarray or tuple of two numbers
if tuple, the first and second numbers indicate beginning and end
start_time : datetime.datetime, optional
starting time of the recordings
attr : list of str
list of possible attributes (currently only 'channels')
Only for datatype == 'ChanTime'
signal : str
'random', 'sine'
amplitude : float
amplitude (peak-to-peak) of the signal
color : float
noise color to generate (white noise is 0, pink is 1, brown is 2).
This is only appropriate if signal == 'random'
sine_freq : float
frequency of the sine wave (only if signal == 'sine'), where phase
is random for each channel
Returns
-------
data : instance of specified datatype
Notes
-----
ChanTime uses randn (to have normally distributed noise), while when you
have freq, it uses random (which gives always positive values).
You can only color noise for ChanTime, not for the other datatypes.
]
variable[possible_datatypes] assign[=] tuple[[<ast.Constant object at 0x7da2047ebd90>, <ast.Constant object at 0x7da2047e8b80>, <ast.Constant object at 0x7da2047ebe20>]]
if compare[name[datatype] <ast.NotIn object at 0x7da2590d7190> name[possible_datatypes]] begin[:]
<ast.Raise object at 0x7da2047e9510>
if compare[name[time] is_not constant[None]] begin[:]
if <ast.BoolOp object at 0x7da2047e92a0> begin[:]
variable[time] assign[=] call[name[arange], parameter[call[name[time]][constant[0]], call[name[time]][constant[1]], binary_operation[constant[1.0] / name[s_freq]]]]
if compare[name[freq] is_not constant[None]] begin[:]
if <ast.BoolOp object at 0x7da2047e9e70> begin[:]
variable[freq] assign[=] call[name[arange], parameter[call[name[freq]][constant[0]], call[name[freq]][constant[1]]]]
if compare[name[chan_name] is constant[None]] begin[:]
variable[chan_name] assign[=] call[name[_make_chan_name], parameter[name[n_chan]]]
if compare[name[start_time] is constant[None]] begin[:]
variable[start_time] assign[=] call[name[datetime].now, parameter[]]
if compare[name[datatype] equal[==] constant[ChanTime]] begin[:]
variable[data] assign[=] call[name[ChanTime], parameter[]]
name[data].data assign[=] call[name[empty], parameter[name[n_trial]]]
for taget[name[i]] in starred[call[name[range], parameter[name[n_trial]]]] begin[:]
if compare[name[signal] equal[==] constant[random]] begin[:]
variable[values] assign[=] call[name[random].randn, parameter[<ast.Starred object at 0x7da20c795570>]]
for taget[tuple[[<ast.Name object at 0x7da20c7961a0>, <ast.Name object at 0x7da20c7946a0>]]] in starred[call[name[enumerate], parameter[name[values]]]] begin[:]
call[name[values]][tuple[[<ast.Name object at 0x7da20c796140>, <ast.Slice object at 0x7da20c7943d0>]]] assign[=] call[name[_color_noise], parameter[name[x], name[s_freq], name[color]]]
call[name[data].data][name[i]] assign[=] binary_operation[binary_operation[name[values] / call[call[name[ptp], parameter[name[values]]]][tuple[[<ast.Slice object at 0x7da20c794b80>, <ast.Constant object at 0x7da20c794100>]]]] * name[amplitude]]
if compare[name[datatype] equal[==] constant[ChanFreq]] begin[:]
variable[data] assign[=] call[name[ChanFreq], parameter[]]
name[data].data assign[=] call[name[empty], parameter[name[n_trial]]]
for taget[name[i]] in starred[call[name[range], parameter[name[n_trial]]]] begin[:]
call[name[data].data][name[i]] assign[=] call[name[random].random, parameter[tuple[[<ast.Call object at 0x7da20c796860>, <ast.Call object at 0x7da20c7959c0>]]]]
if compare[name[datatype] equal[==] constant[ChanTimeFreq]] begin[:]
variable[data] assign[=] call[name[ChanTimeFreq], parameter[]]
name[data].data assign[=] call[name[empty], parameter[name[n_trial]]]
for taget[name[i]] in starred[call[name[range], parameter[name[n_trial]]]] begin[:]
call[name[data].data][name[i]] assign[=] call[name[random].random, parameter[tuple[[<ast.Call object at 0x7da1b0dee5f0>, <ast.Call object at 0x7da1b0deeda0>, <ast.Call object at 0x7da1b0dee1a0>]]]]
name[data].start_time assign[=] name[start_time]
name[data].s_freq assign[=] name[s_freq]
call[name[data].axis][constant[chan]] assign[=] call[name[empty], parameter[name[n_trial]]]
for taget[name[i]] in starred[call[name[range], parameter[name[n_trial]]]] begin[:]
call[call[name[data].axis][constant[chan]]][name[i]] assign[=] call[name[asarray], parameter[name[chan_name]]]
if compare[name[datatype] in tuple[[<ast.Constant object at 0x7da20c6e5ab0>, <ast.Constant object at 0x7da20c6e7ee0>]]] begin[:]
call[name[data].axis][constant[time]] assign[=] call[name[empty], parameter[name[n_trial]]]
for taget[name[i]] in starred[call[name[range], parameter[name[n_trial]]]] begin[:]
call[call[name[data].axis][constant[time]]][name[i]] assign[=] name[time]
if compare[name[datatype] in tuple[[<ast.Constant object at 0x7da20c6e6a10>, <ast.Constant object at 0x7da20c6e72b0>]]] begin[:]
call[name[data].axis][constant[freq]] assign[=] call[name[empty], parameter[name[n_trial]]]
for taget[name[i]] in starred[call[name[range], parameter[name[n_trial]]]] begin[:]
call[call[name[data].axis][constant[freq]]][name[i]] assign[=] name[freq]
if compare[name[attr] is_not constant[None]] begin[:]
if compare[constant[chan] in name[attr]] begin[:]
call[name[data].attr][constant[chan]] assign[=] call[name[create_channels], parameter[call[name[data].chan][constant[0]]]]
return[name[data]] | keyword[def] identifier[create_data] ( identifier[datatype] = literal[string] , identifier[n_trial] = literal[int] , identifier[s_freq] = literal[int] ,
identifier[chan_name] = keyword[None] , identifier[n_chan] = literal[int] ,
identifier[time] = keyword[None] , identifier[freq] = keyword[None] , identifier[start_time] = keyword[None] ,
identifier[signal] = literal[string] , identifier[amplitude] = literal[int] , identifier[color] = literal[int] , identifier[sine_freq] = literal[int] ,
identifier[attr] = keyword[None] ):
literal[string]
identifier[possible_datatypes] =( literal[string] , literal[string] , literal[string] )
keyword[if] identifier[datatype] keyword[not] keyword[in] identifier[possible_datatypes] :
keyword[raise] identifier[ValueError] ( literal[string] +
literal[string] . identifier[join] ( identifier[possible_datatypes] ))
keyword[if] identifier[time] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[isinstance] ( identifier[time] , identifier[tuple] ) keyword[and] identifier[len] ( identifier[time] )== literal[int] :
identifier[time] = identifier[arange] ( identifier[time] [ literal[int] ], identifier[time] [ literal[int] ], literal[int] / identifier[s_freq] )
keyword[else] :
identifier[time] = identifier[arange] ( literal[int] , literal[int] , literal[int] / identifier[s_freq] )
keyword[if] identifier[freq] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[isinstance] ( identifier[freq] , identifier[tuple] ) keyword[and] identifier[len] ( identifier[freq] )== literal[int] :
identifier[freq] = identifier[arange] ( identifier[freq] [ literal[int] ], identifier[freq] [ literal[int] ])
keyword[else] :
identifier[freq] = identifier[arange] ( literal[int] , identifier[s_freq] / literal[int] + literal[int] )
keyword[if] identifier[chan_name] keyword[is] keyword[None] :
identifier[chan_name] = identifier[_make_chan_name] ( identifier[n_chan] )
keyword[else] :
identifier[n_chan] = identifier[len] ( identifier[chan_name] )
keyword[if] identifier[start_time] keyword[is] keyword[None] :
identifier[start_time] = identifier[datetime] . identifier[now] ()
keyword[if] identifier[datatype] == literal[string] :
identifier[data] = identifier[ChanTime] ()
identifier[data] . identifier[data] = identifier[empty] ( identifier[n_trial] , identifier[dtype] = literal[string] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n_trial] ):
keyword[if] identifier[signal] == literal[string] :
identifier[values] = identifier[random] . identifier[randn] (*( identifier[len] ( identifier[chan_name] ), identifier[len] ( identifier[time] )))
keyword[for] identifier[i_ch] , identifier[x] keyword[in] identifier[enumerate] ( identifier[values] ):
identifier[values] [ identifier[i_ch] ,:]= identifier[_color_noise] ( identifier[x] , identifier[s_freq] , identifier[color] )
keyword[elif] identifier[signal] == literal[string] :
identifier[values] = identifier[empty] (( identifier[n_chan] , identifier[time] . identifier[shape] [ literal[int] ]))
keyword[for] identifier[i_ch] keyword[in] identifier[range] ( identifier[n_chan] ):
identifier[values] [ identifier[i_ch] ,:]= identifier[sin] ( literal[int] * identifier[pi] * identifier[sine_freq] * identifier[time] +
identifier[random] . identifier[randn] ())
identifier[data] . identifier[data] [ identifier[i] ]= identifier[values] / identifier[ptp] ( identifier[values] , identifier[axis] = literal[int] )[:, keyword[None] ]* identifier[amplitude]
keyword[if] identifier[datatype] == literal[string] :
identifier[data] = identifier[ChanFreq] ()
identifier[data] . identifier[data] = identifier[empty] ( identifier[n_trial] , identifier[dtype] = literal[string] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n_trial] ):
identifier[data] . identifier[data] [ identifier[i] ]= identifier[random] . identifier[random] (( identifier[len] ( identifier[chan_name] ), identifier[len] ( identifier[freq] )))
keyword[if] identifier[datatype] == literal[string] :
identifier[data] = identifier[ChanTimeFreq] ()
identifier[data] . identifier[data] = identifier[empty] ( identifier[n_trial] , identifier[dtype] = literal[string] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n_trial] ):
identifier[data] . identifier[data] [ identifier[i] ]= identifier[random] . identifier[random] (( identifier[len] ( identifier[chan_name] ), identifier[len] ( identifier[time] ), identifier[len] ( identifier[freq] )))
identifier[data] . identifier[start_time] = identifier[start_time]
identifier[data] . identifier[s_freq] = identifier[s_freq]
identifier[data] . identifier[axis] [ literal[string] ]= identifier[empty] ( identifier[n_trial] , identifier[dtype] = literal[string] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n_trial] ):
identifier[data] . identifier[axis] [ literal[string] ][ identifier[i] ]= identifier[asarray] ( identifier[chan_name] , identifier[dtype] = literal[string] )
keyword[if] identifier[datatype] keyword[in] ( literal[string] , literal[string] ):
identifier[data] . identifier[axis] [ literal[string] ]= identifier[empty] ( identifier[n_trial] , identifier[dtype] = literal[string] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n_trial] ):
identifier[data] . identifier[axis] [ literal[string] ][ identifier[i] ]= identifier[time]
keyword[if] identifier[datatype] keyword[in] ( literal[string] , literal[string] ):
identifier[data] . identifier[axis] [ literal[string] ]= identifier[empty] ( identifier[n_trial] , identifier[dtype] = literal[string] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n_trial] ):
identifier[data] . identifier[axis] [ literal[string] ][ identifier[i] ]= identifier[freq]
keyword[if] identifier[attr] keyword[is] keyword[not] keyword[None] :
keyword[if] literal[string] keyword[in] identifier[attr] :
identifier[data] . identifier[attr] [ literal[string] ]= identifier[create_channels] ( identifier[data] . identifier[chan] [ literal[int] ])
keyword[return] identifier[data] | def create_data(datatype='ChanTime', n_trial=1, s_freq=256, chan_name=None, n_chan=8, time=None, freq=None, start_time=None, signal='random', amplitude=1, color=0, sine_freq=10, attr=None):
"""Create data of different datatype from scratch.
Parameters
----------
datatype : str
one of 'ChanTime', 'ChanFreq', 'ChanTimeFreq'
n_trial : int
number of trials
s_freq : int
sampling frequency
chan_name : list of str
names of the channels
n_chan : int
if chan_name is not specified, this defines the number of channels
time : numpy.ndarray or tuple of two numbers
if tuple, the first and second numbers indicate beginning and end
freq : numpy.ndarray or tuple of two numbers
if tuple, the first and second numbers indicate beginning and end
start_time : datetime.datetime, optional
starting time of the recordings
attr : list of str
list of possible attributes (currently only 'channels')
Only for datatype == 'ChanTime'
signal : str
'random', 'sine'
amplitude : float
amplitude (peak-to-peak) of the signal
color : float
noise color to generate (white noise is 0, pink is 1, brown is 2).
This is only appropriate if signal == 'random'
sine_freq : float
frequency of the sine wave (only if signal == 'sine'), where phase
is random for each channel
Returns
-------
data : instance of specified datatype
Notes
-----
ChanTime uses randn (to have normally distributed noise), while when you
have freq, it uses random (which gives always positive values).
You can only color noise for ChanTime, not for the other datatypes.
"""
possible_datatypes = ('ChanTime', 'ChanFreq', 'ChanTimeFreq')
if datatype not in possible_datatypes:
raise ValueError('Datatype should be one of ' + ', '.join(possible_datatypes)) # depends on [control=['if'], data=['possible_datatypes']]
if time is not None:
if isinstance(time, tuple) and len(time) == 2:
time = arange(time[0], time[1], 1.0 / s_freq) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['time']]
else:
time = arange(0, 1, 1.0 / s_freq)
if freq is not None:
if isinstance(freq, tuple) and len(freq) == 2:
freq = arange(freq[0], freq[1]) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['freq']]
else:
freq = arange(0, s_freq / 2.0 + 1)
if chan_name is None:
chan_name = _make_chan_name(n_chan) # depends on [control=['if'], data=['chan_name']]
else:
n_chan = len(chan_name)
if start_time is None:
start_time = datetime.now() # depends on [control=['if'], data=['start_time']]
if datatype == 'ChanTime':
data = ChanTime()
data.data = empty(n_trial, dtype='O')
for i in range(n_trial):
if signal == 'random':
values = random.randn(*(len(chan_name), len(time)))
for (i_ch, x) in enumerate(values):
values[i_ch, :] = _color_noise(x, s_freq, color) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
elif signal == 'sine':
values = empty((n_chan, time.shape[0]))
for i_ch in range(n_chan):
values[i_ch, :] = sin(2 * pi * sine_freq * time + random.randn()) # depends on [control=['for'], data=['i_ch']] # depends on [control=['if'], data=[]]
data.data[i] = values / ptp(values, axis=1)[:, None] * amplitude # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
if datatype == 'ChanFreq':
data = ChanFreq()
data.data = empty(n_trial, dtype='O')
for i in range(n_trial):
data.data[i] = random.random((len(chan_name), len(freq))) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
if datatype == 'ChanTimeFreq':
data = ChanTimeFreq()
data.data = empty(n_trial, dtype='O')
for i in range(n_trial):
data.data[i] = random.random((len(chan_name), len(time), len(freq))) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
data.start_time = start_time
data.s_freq = s_freq
data.axis['chan'] = empty(n_trial, dtype='O')
for i in range(n_trial):
data.axis['chan'][i] = asarray(chan_name, dtype='U') # depends on [control=['for'], data=['i']]
if datatype in ('ChanTime', 'ChanTimeFreq'):
data.axis['time'] = empty(n_trial, dtype='O')
for i in range(n_trial):
data.axis['time'][i] = time # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
if datatype in ('ChanFreq', 'ChanTimeFreq'):
data.axis['freq'] = empty(n_trial, dtype='O')
for i in range(n_trial):
data.axis['freq'][i] = freq # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
if attr is not None:
if 'chan' in attr:
data.attr['chan'] = create_channels(data.chan[0]) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['attr']]
return data |
def metadata_and_language_from_option_line(self, line):
"""Parse code options on the given line. When a start of a code cell
is found, self.metadata is set to a dictionary."""
if self.start_code_re.match(line):
self.language, self.metadata = self.options_to_metadata(self.start_code_re.findall(line)[0]) | def function[metadata_and_language_from_option_line, parameter[self, line]]:
constant[Parse code options on the given line. When a start of a code cell
is found, self.metadata is set to a dictionary.]
if call[name[self].start_code_re.match, parameter[name[line]]] begin[:]
<ast.Tuple object at 0x7da2054a6f50> assign[=] call[name[self].options_to_metadata, parameter[call[call[name[self].start_code_re.findall, parameter[name[line]]]][constant[0]]]] | keyword[def] identifier[metadata_and_language_from_option_line] ( identifier[self] , identifier[line] ):
literal[string]
keyword[if] identifier[self] . identifier[start_code_re] . identifier[match] ( identifier[line] ):
identifier[self] . identifier[language] , identifier[self] . identifier[metadata] = identifier[self] . identifier[options_to_metadata] ( identifier[self] . identifier[start_code_re] . identifier[findall] ( identifier[line] )[ literal[int] ]) | def metadata_and_language_from_option_line(self, line):
"""Parse code options on the given line. When a start of a code cell
is found, self.metadata is set to a dictionary."""
if self.start_code_re.match(line):
(self.language, self.metadata) = self.options_to_metadata(self.start_code_re.findall(line)[0]) # depends on [control=['if'], data=[]] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.