code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def query(options, collection_name, num_to_skip, num_to_return,
query, field_selector, opts, check_keys=False, ctx=None):
"""Get a **query** message."""
if ctx:
return _query_compressed(options, collection_name, num_to_skip,
num_to_return, query, field_selector,
opts, check_keys, ctx)
return _query_uncompressed(options, collection_name, num_to_skip,
num_to_return, query, field_selector, opts,
check_keys) | def function[query, parameter[options, collection_name, num_to_skip, num_to_return, query, field_selector, opts, check_keys, ctx]]:
constant[Get a **query** message.]
if name[ctx] begin[:]
return[call[name[_query_compressed], parameter[name[options], name[collection_name], name[num_to_skip], name[num_to_return], name[query], name[field_selector], name[opts], name[check_keys], name[ctx]]]]
return[call[name[_query_uncompressed], parameter[name[options], name[collection_name], name[num_to_skip], name[num_to_return], name[query], name[field_selector], name[opts], name[check_keys]]]] | keyword[def] identifier[query] ( identifier[options] , identifier[collection_name] , identifier[num_to_skip] , identifier[num_to_return] ,
identifier[query] , identifier[field_selector] , identifier[opts] , identifier[check_keys] = keyword[False] , identifier[ctx] = keyword[None] ):
literal[string]
keyword[if] identifier[ctx] :
keyword[return] identifier[_query_compressed] ( identifier[options] , identifier[collection_name] , identifier[num_to_skip] ,
identifier[num_to_return] , identifier[query] , identifier[field_selector] ,
identifier[opts] , identifier[check_keys] , identifier[ctx] )
keyword[return] identifier[_query_uncompressed] ( identifier[options] , identifier[collection_name] , identifier[num_to_skip] ,
identifier[num_to_return] , identifier[query] , identifier[field_selector] , identifier[opts] ,
identifier[check_keys] ) | def query(options, collection_name, num_to_skip, num_to_return, query, field_selector, opts, check_keys=False, ctx=None):
"""Get a **query** message."""
if ctx:
return _query_compressed(options, collection_name, num_to_skip, num_to_return, query, field_selector, opts, check_keys, ctx) # depends on [control=['if'], data=[]]
return _query_uncompressed(options, collection_name, num_to_skip, num_to_return, query, field_selector, opts, check_keys) |
def post(self, *args, **kwargs):
"""Save file and return saved info or report errors."""
if self.upload_allowed():
form = self.get_upload_form()
result = {}
if form.is_valid():
storage = self.get_storage()
result['is_valid'] = True
info = form.stash(storage, self.request.path)
result.update(info)
else:
result.update({
'is_valid': False,
'errors': form.errors,
})
return HttpResponse(json.dumps(result), content_type='application/json')
else:
return HttpResponseForbidden() | def function[post, parameter[self]]:
constant[Save file and return saved info or report errors.]
if call[name[self].upload_allowed, parameter[]] begin[:]
variable[form] assign[=] call[name[self].get_upload_form, parameter[]]
variable[result] assign[=] dictionary[[], []]
if call[name[form].is_valid, parameter[]] begin[:]
variable[storage] assign[=] call[name[self].get_storage, parameter[]]
call[name[result]][constant[is_valid]] assign[=] constant[True]
variable[info] assign[=] call[name[form].stash, parameter[name[storage], name[self].request.path]]
call[name[result].update, parameter[name[info]]]
return[call[name[HttpResponse], parameter[call[name[json].dumps, parameter[name[result]]]]]] | keyword[def] identifier[post] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[self] . identifier[upload_allowed] ():
identifier[form] = identifier[self] . identifier[get_upload_form] ()
identifier[result] ={}
keyword[if] identifier[form] . identifier[is_valid] ():
identifier[storage] = identifier[self] . identifier[get_storage] ()
identifier[result] [ literal[string] ]= keyword[True]
identifier[info] = identifier[form] . identifier[stash] ( identifier[storage] , identifier[self] . identifier[request] . identifier[path] )
identifier[result] . identifier[update] ( identifier[info] )
keyword[else] :
identifier[result] . identifier[update] ({
literal[string] : keyword[False] ,
literal[string] : identifier[form] . identifier[errors] ,
})
keyword[return] identifier[HttpResponse] ( identifier[json] . identifier[dumps] ( identifier[result] ), identifier[content_type] = literal[string] )
keyword[else] :
keyword[return] identifier[HttpResponseForbidden] () | def post(self, *args, **kwargs):
"""Save file and return saved info or report errors."""
if self.upload_allowed():
form = self.get_upload_form()
result = {}
if form.is_valid():
storage = self.get_storage()
result['is_valid'] = True
info = form.stash(storage, self.request.path)
result.update(info) # depends on [control=['if'], data=[]]
else:
result.update({'is_valid': False, 'errors': form.errors})
return HttpResponse(json.dumps(result), content_type='application/json') # depends on [control=['if'], data=[]]
else:
return HttpResponseForbidden() |
def parse_from_calc(self):
"""
Parses the datafolder, stores results.
This parser for this simple code does simply store in the DB a node
representing the file of forces in real space
"""
from aiida.common.exceptions import InvalidOperation
from aiida.common import aiidalogger
from aiida.backends.djsite.utils import get_dblogger_extra
import ase, ase.io
parserlogger = aiidalogger.getChild('aseparser')
logger_extra = get_dblogger_extra(self._calc)
# suppose at the start that the job is successful
successful = True
# check that calculation is in the right state
state = self._calc.get_state()
if state != calc_states.PARSING:
raise InvalidOperation("Calculation not in {} state"
.format(calc_states.PARSING) )
# select the folder object
out_folder = self._calc.get_retrieved_node()
# check what is inside the folder
list_of_files = out_folder.get_folder_list()
# at least the stdout should exist
if not self._calc._OUTPUT_FILE_NAME in list_of_files:
successful = False
parserlogger.error("Standard output not found",extra=logger_extra)
return successful,()
# output structure
has_out_atoms = True if self._calc._output_aseatoms in list_of_files else False
if has_out_atoms:
out_atoms = ase.io.read( out_folder.get_abs_path( self._calc._output_aseatoms ) )
out_structure = StructureData().set_ase(out_atoms)
# load the results dictionary
json_outfile = out_folder.get_abs_path( self._calc._OUTPUT_FILE_NAME )
with open(json_outfile,'r') as f:
json_params = json.load(f)
# extract arrays from json_params
dictionary_array = {}
for k,v in list(json_params.iteritems()):
if isinstance(v, (list,tuple)):
dictionary_array[k] = json_params.pop(k)
# look at warnings
warnings = []
with open(out_folder.get_abs_path( self._calc._SCHED_ERROR_FILE )) as f:
errors = f.read()
if errors:
warnings = [errors]
json_params['warnings'] = warnings
# save the outputs
new_nodes_list= []
# save the arrays
if dictionary_array:
array_data = ArrayData()
for k,v in dictionary_array.iteritems():
array_data.set_array(k,numpy.array(v))
new_nodes_list.append( (self._outarray_name, array_data) )
# save the parameters
if json_params:
parameter_data = ParameterData( dict=json_params )
new_nodes_list.append( (self._outdict_name, parameter_data) )
if has_out_atoms:
structure_data = StructureData()
new_nodes_list.append( (self._outstruc_name, structure_data) )
return successful,new_nodes_list | def function[parse_from_calc, parameter[self]]:
constant[
Parses the datafolder, stores results.
This parser for this simple code does simply store in the DB a node
representing the file of forces in real space
]
from relative_module[aiida.common.exceptions] import module[InvalidOperation]
from relative_module[aiida.common] import module[aiidalogger]
from relative_module[aiida.backends.djsite.utils] import module[get_dblogger_extra]
import module[ase], module[ase.io]
variable[parserlogger] assign[=] call[name[aiidalogger].getChild, parameter[constant[aseparser]]]
variable[logger_extra] assign[=] call[name[get_dblogger_extra], parameter[name[self]._calc]]
variable[successful] assign[=] constant[True]
variable[state] assign[=] call[name[self]._calc.get_state, parameter[]]
if compare[name[state] not_equal[!=] name[calc_states].PARSING] begin[:]
<ast.Raise object at 0x7da1b0b71b40>
variable[out_folder] assign[=] call[name[self]._calc.get_retrieved_node, parameter[]]
variable[list_of_files] assign[=] call[name[out_folder].get_folder_list, parameter[]]
if <ast.UnaryOp object at 0x7da1b0b720b0> begin[:]
variable[successful] assign[=] constant[False]
call[name[parserlogger].error, parameter[constant[Standard output not found]]]
return[tuple[[<ast.Name object at 0x7da1b0b711e0>, <ast.Tuple object at 0x7da1b0b70ac0>]]]
variable[has_out_atoms] assign[=] <ast.IfExp object at 0x7da1b0b73d60>
if name[has_out_atoms] begin[:]
variable[out_atoms] assign[=] call[name[ase].io.read, parameter[call[name[out_folder].get_abs_path, parameter[name[self]._calc._output_aseatoms]]]]
variable[out_structure] assign[=] call[call[name[StructureData], parameter[]].set_ase, parameter[name[out_atoms]]]
variable[json_outfile] assign[=] call[name[out_folder].get_abs_path, parameter[name[self]._calc._OUTPUT_FILE_NAME]]
with call[name[open], parameter[name[json_outfile], constant[r]]] begin[:]
variable[json_params] assign[=] call[name[json].load, parameter[name[f]]]
variable[dictionary_array] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b0b722c0>, <ast.Name object at 0x7da1b0b705b0>]]] in starred[call[name[list], parameter[call[name[json_params].iteritems, parameter[]]]]] begin[:]
if call[name[isinstance], parameter[name[v], tuple[[<ast.Name object at 0x7da1b0b71780>, <ast.Name object at 0x7da1b0b736a0>]]]] begin[:]
call[name[dictionary_array]][name[k]] assign[=] call[name[json_params].pop, parameter[name[k]]]
variable[warnings] assign[=] list[[]]
with call[name[open], parameter[call[name[out_folder].get_abs_path, parameter[name[self]._calc._SCHED_ERROR_FILE]]]] begin[:]
variable[errors] assign[=] call[name[f].read, parameter[]]
if name[errors] begin[:]
variable[warnings] assign[=] list[[<ast.Name object at 0x7da18bccbf40>]]
call[name[json_params]][constant[warnings]] assign[=] name[warnings]
variable[new_nodes_list] assign[=] list[[]]
if name[dictionary_array] begin[:]
variable[array_data] assign[=] call[name[ArrayData], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da18bccbb20>, <ast.Name object at 0x7da18bcc9a80>]]] in starred[call[name[dictionary_array].iteritems, parameter[]]] begin[:]
call[name[array_data].set_array, parameter[name[k], call[name[numpy].array, parameter[name[v]]]]]
call[name[new_nodes_list].append, parameter[tuple[[<ast.Attribute object at 0x7da18bcc8760>, <ast.Name object at 0x7da18bcc8250>]]]]
if name[json_params] begin[:]
variable[parameter_data] assign[=] call[name[ParameterData], parameter[]]
call[name[new_nodes_list].append, parameter[tuple[[<ast.Attribute object at 0x7da18bcc97e0>, <ast.Name object at 0x7da18bccb160>]]]]
if name[has_out_atoms] begin[:]
variable[structure_data] assign[=] call[name[StructureData], parameter[]]
call[name[new_nodes_list].append, parameter[tuple[[<ast.Attribute object at 0x7da18bcc88e0>, <ast.Name object at 0x7da18bcc9e10>]]]]
return[tuple[[<ast.Name object at 0x7da18bccafe0>, <ast.Name object at 0x7da18bcc9ae0>]]] | keyword[def] identifier[parse_from_calc] ( identifier[self] ):
literal[string]
keyword[from] identifier[aiida] . identifier[common] . identifier[exceptions] keyword[import] identifier[InvalidOperation]
keyword[from] identifier[aiida] . identifier[common] keyword[import] identifier[aiidalogger]
keyword[from] identifier[aiida] . identifier[backends] . identifier[djsite] . identifier[utils] keyword[import] identifier[get_dblogger_extra]
keyword[import] identifier[ase] , identifier[ase] . identifier[io]
identifier[parserlogger] = identifier[aiidalogger] . identifier[getChild] ( literal[string] )
identifier[logger_extra] = identifier[get_dblogger_extra] ( identifier[self] . identifier[_calc] )
identifier[successful] = keyword[True]
identifier[state] = identifier[self] . identifier[_calc] . identifier[get_state] ()
keyword[if] identifier[state] != identifier[calc_states] . identifier[PARSING] :
keyword[raise] identifier[InvalidOperation] ( literal[string]
. identifier[format] ( identifier[calc_states] . identifier[PARSING] ))
identifier[out_folder] = identifier[self] . identifier[_calc] . identifier[get_retrieved_node] ()
identifier[list_of_files] = identifier[out_folder] . identifier[get_folder_list] ()
keyword[if] keyword[not] identifier[self] . identifier[_calc] . identifier[_OUTPUT_FILE_NAME] keyword[in] identifier[list_of_files] :
identifier[successful] = keyword[False]
identifier[parserlogger] . identifier[error] ( literal[string] , identifier[extra] = identifier[logger_extra] )
keyword[return] identifier[successful] ,()
identifier[has_out_atoms] = keyword[True] keyword[if] identifier[self] . identifier[_calc] . identifier[_output_aseatoms] keyword[in] identifier[list_of_files] keyword[else] keyword[False]
keyword[if] identifier[has_out_atoms] :
identifier[out_atoms] = identifier[ase] . identifier[io] . identifier[read] ( identifier[out_folder] . identifier[get_abs_path] ( identifier[self] . identifier[_calc] . identifier[_output_aseatoms] ))
identifier[out_structure] = identifier[StructureData] (). identifier[set_ase] ( identifier[out_atoms] )
identifier[json_outfile] = identifier[out_folder] . identifier[get_abs_path] ( identifier[self] . identifier[_calc] . identifier[_OUTPUT_FILE_NAME] )
keyword[with] identifier[open] ( identifier[json_outfile] , literal[string] ) keyword[as] identifier[f] :
identifier[json_params] = identifier[json] . identifier[load] ( identifier[f] )
identifier[dictionary_array] ={}
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[list] ( identifier[json_params] . identifier[iteritems] ()):
keyword[if] identifier[isinstance] ( identifier[v] ,( identifier[list] , identifier[tuple] )):
identifier[dictionary_array] [ identifier[k] ]= identifier[json_params] . identifier[pop] ( identifier[k] )
identifier[warnings] =[]
keyword[with] identifier[open] ( identifier[out_folder] . identifier[get_abs_path] ( identifier[self] . identifier[_calc] . identifier[_SCHED_ERROR_FILE] )) keyword[as] identifier[f] :
identifier[errors] = identifier[f] . identifier[read] ()
keyword[if] identifier[errors] :
identifier[warnings] =[ identifier[errors] ]
identifier[json_params] [ literal[string] ]= identifier[warnings]
identifier[new_nodes_list] =[]
keyword[if] identifier[dictionary_array] :
identifier[array_data] = identifier[ArrayData] ()
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[dictionary_array] . identifier[iteritems] ():
identifier[array_data] . identifier[set_array] ( identifier[k] , identifier[numpy] . identifier[array] ( identifier[v] ))
identifier[new_nodes_list] . identifier[append] (( identifier[self] . identifier[_outarray_name] , identifier[array_data] ))
keyword[if] identifier[json_params] :
identifier[parameter_data] = identifier[ParameterData] ( identifier[dict] = identifier[json_params] )
identifier[new_nodes_list] . identifier[append] (( identifier[self] . identifier[_outdict_name] , identifier[parameter_data] ))
keyword[if] identifier[has_out_atoms] :
identifier[structure_data] = identifier[StructureData] ()
identifier[new_nodes_list] . identifier[append] (( identifier[self] . identifier[_outstruc_name] , identifier[structure_data] ))
keyword[return] identifier[successful] , identifier[new_nodes_list] | def parse_from_calc(self):
"""
Parses the datafolder, stores results.
This parser for this simple code does simply store in the DB a node
representing the file of forces in real space
"""
from aiida.common.exceptions import InvalidOperation
from aiida.common import aiidalogger
from aiida.backends.djsite.utils import get_dblogger_extra
import ase, ase.io
parserlogger = aiidalogger.getChild('aseparser')
logger_extra = get_dblogger_extra(self._calc)
# suppose at the start that the job is successful
successful = True
# check that calculation is in the right state
state = self._calc.get_state()
if state != calc_states.PARSING:
raise InvalidOperation('Calculation not in {} state'.format(calc_states.PARSING)) # depends on [control=['if'], data=[]]
# select the folder object
out_folder = self._calc.get_retrieved_node()
# check what is inside the folder
list_of_files = out_folder.get_folder_list()
# at least the stdout should exist
if not self._calc._OUTPUT_FILE_NAME in list_of_files:
successful = False
parserlogger.error('Standard output not found', extra=logger_extra)
return (successful, ()) # depends on [control=['if'], data=[]]
# output structure
has_out_atoms = True if self._calc._output_aseatoms in list_of_files else False
if has_out_atoms:
out_atoms = ase.io.read(out_folder.get_abs_path(self._calc._output_aseatoms))
out_structure = StructureData().set_ase(out_atoms) # depends on [control=['if'], data=[]]
# load the results dictionary
json_outfile = out_folder.get_abs_path(self._calc._OUTPUT_FILE_NAME)
with open(json_outfile, 'r') as f:
json_params = json.load(f) # depends on [control=['with'], data=['f']]
# extract arrays from json_params
dictionary_array = {}
for (k, v) in list(json_params.iteritems()):
if isinstance(v, (list, tuple)):
dictionary_array[k] = json_params.pop(k) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
# look at warnings
warnings = []
with open(out_folder.get_abs_path(self._calc._SCHED_ERROR_FILE)) as f:
errors = f.read() # depends on [control=['with'], data=['f']]
if errors:
warnings = [errors] # depends on [control=['if'], data=[]]
json_params['warnings'] = warnings
# save the outputs
new_nodes_list = []
# save the arrays
if dictionary_array:
array_data = ArrayData()
for (k, v) in dictionary_array.iteritems():
array_data.set_array(k, numpy.array(v)) # depends on [control=['for'], data=[]]
new_nodes_list.append((self._outarray_name, array_data)) # depends on [control=['if'], data=[]]
# save the parameters
if json_params:
parameter_data = ParameterData(dict=json_params)
new_nodes_list.append((self._outdict_name, parameter_data)) # depends on [control=['if'], data=[]]
if has_out_atoms:
structure_data = StructureData()
new_nodes_list.append((self._outstruc_name, structure_data)) # depends on [control=['if'], data=[]]
return (successful, new_nodes_list) |
def _error(self, x):
"""Error function.
Once self.y_desired has been defined, compute the error
of input x using the forward model.
"""
y_pred = self.fmodel.predict_y(x)
err_v = y_pred - self.goal
error = sum(e*e for e in err_v)
return error | def function[_error, parameter[self, x]]:
constant[Error function.
Once self.y_desired has been defined, compute the error
of input x using the forward model.
]
variable[y_pred] assign[=] call[name[self].fmodel.predict_y, parameter[name[x]]]
variable[err_v] assign[=] binary_operation[name[y_pred] - name[self].goal]
variable[error] assign[=] call[name[sum], parameter[<ast.GeneratorExp object at 0x7da1b0c9f5b0>]]
return[name[error]] | keyword[def] identifier[_error] ( identifier[self] , identifier[x] ):
literal[string]
identifier[y_pred] = identifier[self] . identifier[fmodel] . identifier[predict_y] ( identifier[x] )
identifier[err_v] = identifier[y_pred] - identifier[self] . identifier[goal]
identifier[error] = identifier[sum] ( identifier[e] * identifier[e] keyword[for] identifier[e] keyword[in] identifier[err_v] )
keyword[return] identifier[error] | def _error(self, x):
"""Error function.
Once self.y_desired has been defined, compute the error
of input x using the forward model.
"""
y_pred = self.fmodel.predict_y(x)
err_v = y_pred - self.goal
error = sum((e * e for e in err_v))
return error |
def _batches(iterable, size):
"""
Take an iterator and yield its contents in groups of `size` items.
"""
sourceiter = iter(iterable)
while True:
try:
batchiter = islice(sourceiter, size)
yield chain([next(batchiter)], batchiter)
except StopIteration:
return | def function[_batches, parameter[iterable, size]]:
constant[
Take an iterator and yield its contents in groups of `size` items.
]
variable[sourceiter] assign[=] call[name[iter], parameter[name[iterable]]]
while constant[True] begin[:]
<ast.Try object at 0x7da1b0394730> | keyword[def] identifier[_batches] ( identifier[iterable] , identifier[size] ):
literal[string]
identifier[sourceiter] = identifier[iter] ( identifier[iterable] )
keyword[while] keyword[True] :
keyword[try] :
identifier[batchiter] = identifier[islice] ( identifier[sourceiter] , identifier[size] )
keyword[yield] identifier[chain] ([ identifier[next] ( identifier[batchiter] )], identifier[batchiter] )
keyword[except] identifier[StopIteration] :
keyword[return] | def _batches(iterable, size):
"""
Take an iterator and yield its contents in groups of `size` items.
"""
sourceiter = iter(iterable)
while True:
try:
batchiter = islice(sourceiter, size)
yield chain([next(batchiter)], batchiter) # depends on [control=['try'], data=[]]
except StopIteration:
return # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]] |
def ndxlist(self):
"""Return a list of groups in the same format as :func:`gromacs.cbook.get_ndx_groups`.
Format:
[ {'name': group_name, 'natoms': number_atoms, 'nr': # group_number}, ....]
"""
return [{'name': name, 'natoms': len(atomnumbers), 'nr': nr+1} for
nr,(name,atomnumbers) in enumerate(self.items())] | def function[ndxlist, parameter[self]]:
constant[Return a list of groups in the same format as :func:`gromacs.cbook.get_ndx_groups`.
Format:
[ {'name': group_name, 'natoms': number_atoms, 'nr': # group_number}, ....]
]
return[<ast.ListComp object at 0x7da18bc72a70>] | keyword[def] identifier[ndxlist] ( identifier[self] ):
literal[string]
keyword[return] [{ literal[string] : identifier[name] , literal[string] : identifier[len] ( identifier[atomnumbers] ), literal[string] : identifier[nr] + literal[int] } keyword[for]
identifier[nr] ,( identifier[name] , identifier[atomnumbers] ) keyword[in] identifier[enumerate] ( identifier[self] . identifier[items] ())] | def ndxlist(self):
"""Return a list of groups in the same format as :func:`gromacs.cbook.get_ndx_groups`.
Format:
[ {'name': group_name, 'natoms': number_atoms, 'nr': # group_number}, ....]
"""
return [{'name': name, 'natoms': len(atomnumbers), 'nr': nr + 1} for (nr, (name, atomnumbers)) in enumerate(self.items())] |
def _cleanup(self, lr_decay_opt_states_reset: str, process_manager: Optional['DecoderProcessManager'] = None,
keep_training_state = False):
"""
Cleans parameter files, training state directory and waits for remaining decoding processes.
"""
utils.cleanup_params_files(self.model.output_dir, self.max_params_files_to_keep,
self.state.checkpoint, self.state.best_checkpoint, self.keep_initializations)
if process_manager is not None:
result = process_manager.collect_results()
if result is not None:
decoded_checkpoint, decoder_metrics = result
self.state.metrics[decoded_checkpoint - 1].update(decoder_metrics)
self.tflogger.log_metrics(decoder_metrics, decoded_checkpoint)
utils.write_metrics_file(self.state.metrics, self.metrics_fname)
self.state.save(os.path.join(self.training_state_dirname, C.TRAINING_STATE_NAME))
if not keep_training_state:
final_training_state_dirname = os.path.join(self.model.output_dir, C.TRAINING_STATE_DIRNAME)
if os.path.exists(final_training_state_dirname):
shutil.rmtree(final_training_state_dirname)
if lr_decay_opt_states_reset == C.LR_DECAY_OPT_STATES_RESET_BEST:
best_opt_states_fname = os.path.join(self.model.output_dir, C.OPT_STATES_BEST)
if os.path.exists(best_opt_states_fname):
os.remove(best_opt_states_fname)
if lr_decay_opt_states_reset == C.LR_DECAY_OPT_STATES_RESET_INITIAL:
initial_opt_states_fname = os.path.join(self.model.output_dir, C.OPT_STATES_INITIAL)
if os.path.exists(initial_opt_states_fname):
os.remove(initial_opt_states_fname) | def function[_cleanup, parameter[self, lr_decay_opt_states_reset, process_manager, keep_training_state]]:
constant[
Cleans parameter files, training state directory and waits for remaining decoding processes.
]
call[name[utils].cleanup_params_files, parameter[name[self].model.output_dir, name[self].max_params_files_to_keep, name[self].state.checkpoint, name[self].state.best_checkpoint, name[self].keep_initializations]]
if compare[name[process_manager] is_not constant[None]] begin[:]
variable[result] assign[=] call[name[process_manager].collect_results, parameter[]]
if compare[name[result] is_not constant[None]] begin[:]
<ast.Tuple object at 0x7da2044c35b0> assign[=] name[result]
call[call[name[self].state.metrics][binary_operation[name[decoded_checkpoint] - constant[1]]].update, parameter[name[decoder_metrics]]]
call[name[self].tflogger.log_metrics, parameter[name[decoder_metrics], name[decoded_checkpoint]]]
call[name[utils].write_metrics_file, parameter[name[self].state.metrics, name[self].metrics_fname]]
call[name[self].state.save, parameter[call[name[os].path.join, parameter[name[self].training_state_dirname, name[C].TRAINING_STATE_NAME]]]]
if <ast.UnaryOp object at 0x7da1b1d6ec80> begin[:]
variable[final_training_state_dirname] assign[=] call[name[os].path.join, parameter[name[self].model.output_dir, name[C].TRAINING_STATE_DIRNAME]]
if call[name[os].path.exists, parameter[name[final_training_state_dirname]]] begin[:]
call[name[shutil].rmtree, parameter[name[final_training_state_dirname]]]
if compare[name[lr_decay_opt_states_reset] equal[==] name[C].LR_DECAY_OPT_STATES_RESET_BEST] begin[:]
variable[best_opt_states_fname] assign[=] call[name[os].path.join, parameter[name[self].model.output_dir, name[C].OPT_STATES_BEST]]
if call[name[os].path.exists, parameter[name[best_opt_states_fname]]] begin[:]
call[name[os].remove, parameter[name[best_opt_states_fname]]]
if compare[name[lr_decay_opt_states_reset] equal[==] name[C].LR_DECAY_OPT_STATES_RESET_INITIAL] begin[:]
variable[initial_opt_states_fname] assign[=] call[name[os].path.join, parameter[name[self].model.output_dir, name[C].OPT_STATES_INITIAL]]
if call[name[os].path.exists, parameter[name[initial_opt_states_fname]]] begin[:]
call[name[os].remove, parameter[name[initial_opt_states_fname]]] | keyword[def] identifier[_cleanup] ( identifier[self] , identifier[lr_decay_opt_states_reset] : identifier[str] , identifier[process_manager] : identifier[Optional] [ literal[string] ]= keyword[None] ,
identifier[keep_training_state] = keyword[False] ):
literal[string]
identifier[utils] . identifier[cleanup_params_files] ( identifier[self] . identifier[model] . identifier[output_dir] , identifier[self] . identifier[max_params_files_to_keep] ,
identifier[self] . identifier[state] . identifier[checkpoint] , identifier[self] . identifier[state] . identifier[best_checkpoint] , identifier[self] . identifier[keep_initializations] )
keyword[if] identifier[process_manager] keyword[is] keyword[not] keyword[None] :
identifier[result] = identifier[process_manager] . identifier[collect_results] ()
keyword[if] identifier[result] keyword[is] keyword[not] keyword[None] :
identifier[decoded_checkpoint] , identifier[decoder_metrics] = identifier[result]
identifier[self] . identifier[state] . identifier[metrics] [ identifier[decoded_checkpoint] - literal[int] ]. identifier[update] ( identifier[decoder_metrics] )
identifier[self] . identifier[tflogger] . identifier[log_metrics] ( identifier[decoder_metrics] , identifier[decoded_checkpoint] )
identifier[utils] . identifier[write_metrics_file] ( identifier[self] . identifier[state] . identifier[metrics] , identifier[self] . identifier[metrics_fname] )
identifier[self] . identifier[state] . identifier[save] ( identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[training_state_dirname] , identifier[C] . identifier[TRAINING_STATE_NAME] ))
keyword[if] keyword[not] identifier[keep_training_state] :
identifier[final_training_state_dirname] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[model] . identifier[output_dir] , identifier[C] . identifier[TRAINING_STATE_DIRNAME] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[final_training_state_dirname] ):
identifier[shutil] . identifier[rmtree] ( identifier[final_training_state_dirname] )
keyword[if] identifier[lr_decay_opt_states_reset] == identifier[C] . identifier[LR_DECAY_OPT_STATES_RESET_BEST] :
identifier[best_opt_states_fname] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[model] . identifier[output_dir] , identifier[C] . identifier[OPT_STATES_BEST] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[best_opt_states_fname] ):
identifier[os] . identifier[remove] ( identifier[best_opt_states_fname] )
keyword[if] identifier[lr_decay_opt_states_reset] == identifier[C] . identifier[LR_DECAY_OPT_STATES_RESET_INITIAL] :
identifier[initial_opt_states_fname] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[model] . identifier[output_dir] , identifier[C] . identifier[OPT_STATES_INITIAL] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[initial_opt_states_fname] ):
identifier[os] . identifier[remove] ( identifier[initial_opt_states_fname] ) | def _cleanup(self, lr_decay_opt_states_reset: str, process_manager: Optional['DecoderProcessManager']=None, keep_training_state=False):
"""
Cleans parameter files, training state directory and waits for remaining decoding processes.
"""
utils.cleanup_params_files(self.model.output_dir, self.max_params_files_to_keep, self.state.checkpoint, self.state.best_checkpoint, self.keep_initializations)
if process_manager is not None:
result = process_manager.collect_results()
if result is not None:
(decoded_checkpoint, decoder_metrics) = result
self.state.metrics[decoded_checkpoint - 1].update(decoder_metrics)
self.tflogger.log_metrics(decoder_metrics, decoded_checkpoint)
utils.write_metrics_file(self.state.metrics, self.metrics_fname)
self.state.save(os.path.join(self.training_state_dirname, C.TRAINING_STATE_NAME)) # depends on [control=['if'], data=['result']] # depends on [control=['if'], data=['process_manager']]
if not keep_training_state:
final_training_state_dirname = os.path.join(self.model.output_dir, C.TRAINING_STATE_DIRNAME)
if os.path.exists(final_training_state_dirname):
shutil.rmtree(final_training_state_dirname) # depends on [control=['if'], data=[]]
if lr_decay_opt_states_reset == C.LR_DECAY_OPT_STATES_RESET_BEST:
best_opt_states_fname = os.path.join(self.model.output_dir, C.OPT_STATES_BEST)
if os.path.exists(best_opt_states_fname):
os.remove(best_opt_states_fname) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if lr_decay_opt_states_reset == C.LR_DECAY_OPT_STATES_RESET_INITIAL:
initial_opt_states_fname = os.path.join(self.model.output_dir, C.OPT_STATES_INITIAL)
if os.path.exists(initial_opt_states_fname):
os.remove(initial_opt_states_fname) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def detection(reference_tempi, reference_weight, estimated_tempi, tol=0.08):
"""Compute the tempo detection accuracy metric.
Parameters
----------
reference_tempi : np.ndarray, shape=(2,)
Two non-negative reference tempi
reference_weight : float > 0
The relative strength of ``reference_tempi[0]`` vs
``reference_tempi[1]``.
estimated_tempi : np.ndarray, shape=(2,)
Two non-negative estimated tempi.
tol : float in [0, 1]:
The maximum allowable deviation from a reference tempo to
count as a hit.
``|est_t - ref_t| <= tol * ref_t``
(Default value = 0.08)
Returns
-------
p_score : float in [0, 1]
Weighted average of recalls:
``reference_weight * hits[0] + (1 - reference_weight) * hits[1]``
one_correct : bool
True if at least one reference tempo was correctly estimated
both_correct : bool
True if both reference tempi were correctly estimated
Raises
------
ValueError
If the input tempi are ill-formed
If the reference weight is not in the range [0, 1]
If ``tol < 0`` or ``tol > 1``.
"""
validate(reference_tempi, reference_weight, estimated_tempi)
if tol < 0 or tol > 1:
raise ValueError('invalid tolerance {}: must lie in the range '
'[0, 1]'.format(tol))
if tol == 0.:
warnings.warn('A tolerance of 0.0 may not '
'lead to the results you expect.')
hits = [False, False]
for i, ref_t in enumerate(reference_tempi):
if ref_t > 0:
# Compute the relative error for this reference tempo
f_ref_t = float(ref_t)
relative_error = np.min(np.abs(ref_t - estimated_tempi) / f_ref_t)
# Count the hits
hits[i] = relative_error <= tol
p_score = reference_weight * hits[0] + (1.0-reference_weight) * hits[1]
one_correct = bool(np.max(hits))
both_correct = bool(np.min(hits))
return p_score, one_correct, both_correct | def function[detection, parameter[reference_tempi, reference_weight, estimated_tempi, tol]]:
constant[Compute the tempo detection accuracy metric.
Parameters
----------
reference_tempi : np.ndarray, shape=(2,)
Two non-negative reference tempi
reference_weight : float > 0
The relative strength of ``reference_tempi[0]`` vs
``reference_tempi[1]``.
estimated_tempi : np.ndarray, shape=(2,)
Two non-negative estimated tempi.
tol : float in [0, 1]:
The maximum allowable deviation from a reference tempo to
count as a hit.
``|est_t - ref_t| <= tol * ref_t``
(Default value = 0.08)
Returns
-------
p_score : float in [0, 1]
Weighted average of recalls:
``reference_weight * hits[0] + (1 - reference_weight) * hits[1]``
one_correct : bool
True if at least one reference tempo was correctly estimated
both_correct : bool
True if both reference tempi were correctly estimated
Raises
------
ValueError
If the input tempi are ill-formed
If the reference weight is not in the range [0, 1]
If ``tol < 0`` or ``tol > 1``.
]
call[name[validate], parameter[name[reference_tempi], name[reference_weight], name[estimated_tempi]]]
if <ast.BoolOp object at 0x7da1b0ff3a00> begin[:]
<ast.Raise object at 0x7da1b0ff1cc0>
if compare[name[tol] equal[==] constant[0.0]] begin[:]
call[name[warnings].warn, parameter[constant[A tolerance of 0.0 may not lead to the results you expect.]]]
variable[hits] assign[=] list[[<ast.Constant object at 0x7da1b0ff38e0>, <ast.Constant object at 0x7da1b0ff22c0>]]
for taget[tuple[[<ast.Name object at 0x7da1b0ff1d80>, <ast.Name object at 0x7da1b0ff1db0>]]] in starred[call[name[enumerate], parameter[name[reference_tempi]]]] begin[:]
if compare[name[ref_t] greater[>] constant[0]] begin[:]
variable[f_ref_t] assign[=] call[name[float], parameter[name[ref_t]]]
variable[relative_error] assign[=] call[name[np].min, parameter[binary_operation[call[name[np].abs, parameter[binary_operation[name[ref_t] - name[estimated_tempi]]]] / name[f_ref_t]]]]
call[name[hits]][name[i]] assign[=] compare[name[relative_error] less_or_equal[<=] name[tol]]
variable[p_score] assign[=] binary_operation[binary_operation[name[reference_weight] * call[name[hits]][constant[0]]] + binary_operation[binary_operation[constant[1.0] - name[reference_weight]] * call[name[hits]][constant[1]]]]
variable[one_correct] assign[=] call[name[bool], parameter[call[name[np].max, parameter[name[hits]]]]]
variable[both_correct] assign[=] call[name[bool], parameter[call[name[np].min, parameter[name[hits]]]]]
return[tuple[[<ast.Name object at 0x7da1b0ff24d0>, <ast.Name object at 0x7da1b0ff1810>, <ast.Name object at 0x7da1b0ff0760>]]] | keyword[def] identifier[detection] ( identifier[reference_tempi] , identifier[reference_weight] , identifier[estimated_tempi] , identifier[tol] = literal[int] ):
literal[string]
identifier[validate] ( identifier[reference_tempi] , identifier[reference_weight] , identifier[estimated_tempi] )
keyword[if] identifier[tol] < literal[int] keyword[or] identifier[tol] > literal[int] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] . identifier[format] ( identifier[tol] ))
keyword[if] identifier[tol] == literal[int] :
identifier[warnings] . identifier[warn] ( literal[string]
literal[string] )
identifier[hits] =[ keyword[False] , keyword[False] ]
keyword[for] identifier[i] , identifier[ref_t] keyword[in] identifier[enumerate] ( identifier[reference_tempi] ):
keyword[if] identifier[ref_t] > literal[int] :
identifier[f_ref_t] = identifier[float] ( identifier[ref_t] )
identifier[relative_error] = identifier[np] . identifier[min] ( identifier[np] . identifier[abs] ( identifier[ref_t] - identifier[estimated_tempi] )/ identifier[f_ref_t] )
identifier[hits] [ identifier[i] ]= identifier[relative_error] <= identifier[tol]
identifier[p_score] = identifier[reference_weight] * identifier[hits] [ literal[int] ]+( literal[int] - identifier[reference_weight] )* identifier[hits] [ literal[int] ]
identifier[one_correct] = identifier[bool] ( identifier[np] . identifier[max] ( identifier[hits] ))
identifier[both_correct] = identifier[bool] ( identifier[np] . identifier[min] ( identifier[hits] ))
keyword[return] identifier[p_score] , identifier[one_correct] , identifier[both_correct] | def detection(reference_tempi, reference_weight, estimated_tempi, tol=0.08):
"""Compute the tempo detection accuracy metric.
Parameters
----------
reference_tempi : np.ndarray, shape=(2,)
Two non-negative reference tempi
reference_weight : float > 0
The relative strength of ``reference_tempi[0]`` vs
``reference_tempi[1]``.
estimated_tempi : np.ndarray, shape=(2,)
Two non-negative estimated tempi.
tol : float in [0, 1]:
The maximum allowable deviation from a reference tempo to
count as a hit.
``|est_t - ref_t| <= tol * ref_t``
(Default value = 0.08)
Returns
-------
p_score : float in [0, 1]
Weighted average of recalls:
``reference_weight * hits[0] + (1 - reference_weight) * hits[1]``
one_correct : bool
True if at least one reference tempo was correctly estimated
both_correct : bool
True if both reference tempi were correctly estimated
Raises
------
ValueError
If the input tempi are ill-formed
If the reference weight is not in the range [0, 1]
If ``tol < 0`` or ``tol > 1``.
"""
validate(reference_tempi, reference_weight, estimated_tempi)
if tol < 0 or tol > 1:
raise ValueError('invalid tolerance {}: must lie in the range [0, 1]'.format(tol)) # depends on [control=['if'], data=[]]
if tol == 0.0:
warnings.warn('A tolerance of 0.0 may not lead to the results you expect.') # depends on [control=['if'], data=[]]
hits = [False, False]
for (i, ref_t) in enumerate(reference_tempi):
if ref_t > 0:
# Compute the relative error for this reference tempo
f_ref_t = float(ref_t)
relative_error = np.min(np.abs(ref_t - estimated_tempi) / f_ref_t)
# Count the hits
hits[i] = relative_error <= tol # depends on [control=['if'], data=['ref_t']] # depends on [control=['for'], data=[]]
p_score = reference_weight * hits[0] + (1.0 - reference_weight) * hits[1]
one_correct = bool(np.max(hits))
both_correct = bool(np.min(hits))
return (p_score, one_correct, both_correct) |
def match_files(files, pattern: Pattern):
"""Yields file name if matches a regular expression pattern."""
for name in files:
if re.match(pattern, name):
yield name | def function[match_files, parameter[files, pattern]]:
constant[Yields file name if matches a regular expression pattern.]
for taget[name[name]] in starred[name[files]] begin[:]
if call[name[re].match, parameter[name[pattern], name[name]]] begin[:]
<ast.Yield object at 0x7da20c7cb4f0> | keyword[def] identifier[match_files] ( identifier[files] , identifier[pattern] : identifier[Pattern] ):
literal[string]
keyword[for] identifier[name] keyword[in] identifier[files] :
keyword[if] identifier[re] . identifier[match] ( identifier[pattern] , identifier[name] ):
keyword[yield] identifier[name] | def match_files(files, pattern: Pattern):
"""Yields file name if matches a regular expression pattern."""
for name in files:
if re.match(pattern, name):
yield name # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['name']] |
def isUpdated(self):
"""
Figures out if the file had previously errored and hasn't
been fixed since given a numerical time
"""
modified_time = self.getmtime()
valid = modified_time > self.__stamp
return valid | def function[isUpdated, parameter[self]]:
constant[
Figures out if the file had previously errored and hasn't
been fixed since given a numerical time
]
variable[modified_time] assign[=] call[name[self].getmtime, parameter[]]
variable[valid] assign[=] compare[name[modified_time] greater[>] name[self].__stamp]
return[name[valid]] | keyword[def] identifier[isUpdated] ( identifier[self] ):
literal[string]
identifier[modified_time] = identifier[self] . identifier[getmtime] ()
identifier[valid] = identifier[modified_time] > identifier[self] . identifier[__stamp]
keyword[return] identifier[valid] | def isUpdated(self):
"""
Figures out if the file had previously errored and hasn't
been fixed since given a numerical time
"""
modified_time = self.getmtime()
valid = modified_time > self.__stamp
return valid |
def numbafy(fn, args, compiler="jit", **nbkws):
"""
Compile a string, sympy expression or symengine expression using numba.
Not all functions are supported by Python's numerical package (numpy). For
difficult cases, valid Python code (as string) may be more suitable than
symbolic expressions coming from sympy, symengine, etc. When compiling
vectorized functions, include valid signatures (see `numba`_ documentation).
Args:
fn: Symbolic expression as sympy/symengine expression or string
args (iterable): Symbolic arguments
compiler: String name or callable numba compiler
nbkws: Compiler keyword arguments (if none provided, smart defaults are used)
Returns:
func: Compiled function
Warning:
For vectorized functions, valid signatures are (almost always) required.
"""
kwargs = {} # Numba kwargs to be updated by user
if not isinstance(args, (tuple, list)):
args = (args, )
# Parameterize compiler
if isinstance(compiler, six.string_types):
compiler_ = getattr(nb, compiler, None)
if compiler is None:
raise AttributeError("No numba function with name {}.".format(compiler))
compiler = compiler_
if compiler in (nb.jit, nb.njit, nb.autojit):
kwargs.update(jitkwargs)
sig = nbkws.pop("signature", None)
else:
kwargs.update(veckwargs)
sig = nbkws.pop("signatures", None)
if sig is None:
warn("Vectorization without 'signatures' can lead to wrong results!")
kwargs.update(nbkws)
# Expand sympy expressions and create string for eval
if isinstance(fn, sy.Expr):
fn = sy.expand_func(fn)
func = sy.lambdify(args, fn, modules='numpy')
# Machine code compilation
if sig is None:
try:
func = compiler(**kwargs)(func)
except RuntimeError:
kwargs['cache'] = False
func = compiler(**kwargs)(func)
else:
try:
func = compiler(sig, **kwargs)(func)
except RuntimeError:
kwargs['cache'] = False
func = compiler(sig, **kwargs)(func)
return func | def function[numbafy, parameter[fn, args, compiler]]:
constant[
Compile a string, sympy expression or symengine expression using numba.
Not all functions are supported by Python's numerical package (numpy). For
difficult cases, valid Python code (as string) may be more suitable than
symbolic expressions coming from sympy, symengine, etc. When compiling
vectorized functions, include valid signatures (see `numba`_ documentation).
Args:
fn: Symbolic expression as sympy/symengine expression or string
args (iterable): Symbolic arguments
compiler: String name or callable numba compiler
nbkws: Compiler keyword arguments (if none provided, smart defaults are used)
Returns:
func: Compiled function
Warning:
For vectorized functions, valid signatures are (almost always) required.
]
variable[kwargs] assign[=] dictionary[[], []]
if <ast.UnaryOp object at 0x7da2044c0c40> begin[:]
variable[args] assign[=] tuple[[<ast.Name object at 0x7da2044c3ac0>]]
if call[name[isinstance], parameter[name[compiler], name[six].string_types]] begin[:]
variable[compiler_] assign[=] call[name[getattr], parameter[name[nb], name[compiler], constant[None]]]
if compare[name[compiler] is constant[None]] begin[:]
<ast.Raise object at 0x7da20c6c7f70>
variable[compiler] assign[=] name[compiler_]
if compare[name[compiler] in tuple[[<ast.Attribute object at 0x7da20c6c5a80>, <ast.Attribute object at 0x7da20c6c4640>, <ast.Attribute object at 0x7da20c6c7190>]]] begin[:]
call[name[kwargs].update, parameter[name[jitkwargs]]]
variable[sig] assign[=] call[name[nbkws].pop, parameter[constant[signature], constant[None]]]
call[name[kwargs].update, parameter[name[nbkws]]]
if call[name[isinstance], parameter[name[fn], name[sy].Expr]] begin[:]
variable[fn] assign[=] call[name[sy].expand_func, parameter[name[fn]]]
variable[func] assign[=] call[name[sy].lambdify, parameter[name[args], name[fn]]]
if compare[name[sig] is constant[None]] begin[:]
<ast.Try object at 0x7da20c6c7610>
return[name[func]] | keyword[def] identifier[numbafy] ( identifier[fn] , identifier[args] , identifier[compiler] = literal[string] ,** identifier[nbkws] ):
literal[string]
identifier[kwargs] ={}
keyword[if] keyword[not] identifier[isinstance] ( identifier[args] ,( identifier[tuple] , identifier[list] )):
identifier[args] =( identifier[args] ,)
keyword[if] identifier[isinstance] ( identifier[compiler] , identifier[six] . identifier[string_types] ):
identifier[compiler_] = identifier[getattr] ( identifier[nb] , identifier[compiler] , keyword[None] )
keyword[if] identifier[compiler] keyword[is] keyword[None] :
keyword[raise] identifier[AttributeError] ( literal[string] . identifier[format] ( identifier[compiler] ))
identifier[compiler] = identifier[compiler_]
keyword[if] identifier[compiler] keyword[in] ( identifier[nb] . identifier[jit] , identifier[nb] . identifier[njit] , identifier[nb] . identifier[autojit] ):
identifier[kwargs] . identifier[update] ( identifier[jitkwargs] )
identifier[sig] = identifier[nbkws] . identifier[pop] ( literal[string] , keyword[None] )
keyword[else] :
identifier[kwargs] . identifier[update] ( identifier[veckwargs] )
identifier[sig] = identifier[nbkws] . identifier[pop] ( literal[string] , keyword[None] )
keyword[if] identifier[sig] keyword[is] keyword[None] :
identifier[warn] ( literal[string] )
identifier[kwargs] . identifier[update] ( identifier[nbkws] )
keyword[if] identifier[isinstance] ( identifier[fn] , identifier[sy] . identifier[Expr] ):
identifier[fn] = identifier[sy] . identifier[expand_func] ( identifier[fn] )
identifier[func] = identifier[sy] . identifier[lambdify] ( identifier[args] , identifier[fn] , identifier[modules] = literal[string] )
keyword[if] identifier[sig] keyword[is] keyword[None] :
keyword[try] :
identifier[func] = identifier[compiler] (** identifier[kwargs] )( identifier[func] )
keyword[except] identifier[RuntimeError] :
identifier[kwargs] [ literal[string] ]= keyword[False]
identifier[func] = identifier[compiler] (** identifier[kwargs] )( identifier[func] )
keyword[else] :
keyword[try] :
identifier[func] = identifier[compiler] ( identifier[sig] ,** identifier[kwargs] )( identifier[func] )
keyword[except] identifier[RuntimeError] :
identifier[kwargs] [ literal[string] ]= keyword[False]
identifier[func] = identifier[compiler] ( identifier[sig] ,** identifier[kwargs] )( identifier[func] )
keyword[return] identifier[func] | def numbafy(fn, args, compiler='jit', **nbkws):
"""
Compile a string, sympy expression or symengine expression using numba.
Not all functions are supported by Python's numerical package (numpy). For
difficult cases, valid Python code (as string) may be more suitable than
symbolic expressions coming from sympy, symengine, etc. When compiling
vectorized functions, include valid signatures (see `numba`_ documentation).
Args:
fn: Symbolic expression as sympy/symengine expression or string
args (iterable): Symbolic arguments
compiler: String name or callable numba compiler
nbkws: Compiler keyword arguments (if none provided, smart defaults are used)
Returns:
func: Compiled function
Warning:
For vectorized functions, valid signatures are (almost always) required.
"""
kwargs = {} # Numba kwargs to be updated by user
if not isinstance(args, (tuple, list)):
args = (args,) # depends on [control=['if'], data=[]]
# Parameterize compiler
if isinstance(compiler, six.string_types):
compiler_ = getattr(nb, compiler, None)
if compiler is None:
raise AttributeError('No numba function with name {}.'.format(compiler)) # depends on [control=['if'], data=['compiler']]
compiler = compiler_ # depends on [control=['if'], data=[]]
if compiler in (nb.jit, nb.njit, nb.autojit):
kwargs.update(jitkwargs)
sig = nbkws.pop('signature', None) # depends on [control=['if'], data=[]]
else:
kwargs.update(veckwargs)
sig = nbkws.pop('signatures', None)
if sig is None:
warn("Vectorization without 'signatures' can lead to wrong results!") # depends on [control=['if'], data=[]]
kwargs.update(nbkws)
# Expand sympy expressions and create string for eval
if isinstance(fn, sy.Expr):
fn = sy.expand_func(fn) # depends on [control=['if'], data=[]]
func = sy.lambdify(args, fn, modules='numpy')
# Machine code compilation
if sig is None:
try:
func = compiler(**kwargs)(func) # depends on [control=['try'], data=[]]
except RuntimeError:
kwargs['cache'] = False
func = compiler(**kwargs)(func) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
try:
func = compiler(sig, **kwargs)(func) # depends on [control=['try'], data=[]]
except RuntimeError:
kwargs['cache'] = False
func = compiler(sig, **kwargs)(func) # depends on [control=['except'], data=[]]
return func |
def model_metrics(self, timeoutSecs=60, **kwargs):
'''
ModelMetrics list.
'''
result = self.do_json_request('/3/ModelMetrics.json', cmd='get', timeout=timeoutSecs)
h2o_sandbox.check_sandbox_for_errors()
return result | def function[model_metrics, parameter[self, timeoutSecs]]:
constant[
ModelMetrics list.
]
variable[result] assign[=] call[name[self].do_json_request, parameter[constant[/3/ModelMetrics.json]]]
call[name[h2o_sandbox].check_sandbox_for_errors, parameter[]]
return[name[result]] | keyword[def] identifier[model_metrics] ( identifier[self] , identifier[timeoutSecs] = literal[int] ,** identifier[kwargs] ):
literal[string]
identifier[result] = identifier[self] . identifier[do_json_request] ( literal[string] , identifier[cmd] = literal[string] , identifier[timeout] = identifier[timeoutSecs] )
identifier[h2o_sandbox] . identifier[check_sandbox_for_errors] ()
keyword[return] identifier[result] | def model_metrics(self, timeoutSecs=60, **kwargs):
"""
ModelMetrics list.
"""
result = self.do_json_request('/3/ModelMetrics.json', cmd='get', timeout=timeoutSecs)
h2o_sandbox.check_sandbox_for_errors()
return result |
def demean_forward_returns(factor_data, grouper=None):
"""
Convert forward returns to returns relative to mean
period wise all-universe or group returns.
group-wise normalization incorporates the assumption of a
group neutral portfolio constraint and thus allows allows the
factor to be evaluated across groups.
For example, if AAPL 5 period return is 0.1% and mean 5 period
return for the Technology stocks in our universe was 0.5% in the
same period, the group adjusted 5 period return for AAPL in this
period is -0.4%.
Parameters
----------
factor_data : pd.DataFrame - MultiIndex
Forward returns indexed by date and asset.
Separate column for each forward return window.
grouper : list
If True, demean according to group.
Returns
-------
adjusted_forward_returns : pd.DataFrame - MultiIndex
DataFrame of the same format as the input, but with each
security's returns normalized by group.
"""
factor_data = factor_data.copy()
if not grouper:
grouper = factor_data.index.get_level_values('date')
cols = get_forward_returns_columns(factor_data.columns)
factor_data[cols] = factor_data.groupby(grouper)[cols] \
.transform(lambda x: x - x.mean())
return factor_data | def function[demean_forward_returns, parameter[factor_data, grouper]]:
constant[
Convert forward returns to returns relative to mean
period wise all-universe or group returns.
group-wise normalization incorporates the assumption of a
group neutral portfolio constraint and thus allows allows the
factor to be evaluated across groups.
For example, if AAPL 5 period return is 0.1% and mean 5 period
return for the Technology stocks in our universe was 0.5% in the
same period, the group adjusted 5 period return for AAPL in this
period is -0.4%.
Parameters
----------
factor_data : pd.DataFrame - MultiIndex
Forward returns indexed by date and asset.
Separate column for each forward return window.
grouper : list
If True, demean according to group.
Returns
-------
adjusted_forward_returns : pd.DataFrame - MultiIndex
DataFrame of the same format as the input, but with each
security's returns normalized by group.
]
variable[factor_data] assign[=] call[name[factor_data].copy, parameter[]]
if <ast.UnaryOp object at 0x7da20c76d000> begin[:]
variable[grouper] assign[=] call[name[factor_data].index.get_level_values, parameter[constant[date]]]
variable[cols] assign[=] call[name[get_forward_returns_columns], parameter[name[factor_data].columns]]
call[name[factor_data]][name[cols]] assign[=] call[call[call[name[factor_data].groupby, parameter[name[grouper]]]][name[cols]].transform, parameter[<ast.Lambda object at 0x7da2041dbe80>]]
return[name[factor_data]] | keyword[def] identifier[demean_forward_returns] ( identifier[factor_data] , identifier[grouper] = keyword[None] ):
literal[string]
identifier[factor_data] = identifier[factor_data] . identifier[copy] ()
keyword[if] keyword[not] identifier[grouper] :
identifier[grouper] = identifier[factor_data] . identifier[index] . identifier[get_level_values] ( literal[string] )
identifier[cols] = identifier[get_forward_returns_columns] ( identifier[factor_data] . identifier[columns] )
identifier[factor_data] [ identifier[cols] ]= identifier[factor_data] . identifier[groupby] ( identifier[grouper] )[ identifier[cols] ]. identifier[transform] ( keyword[lambda] identifier[x] : identifier[x] - identifier[x] . identifier[mean] ())
keyword[return] identifier[factor_data] | def demean_forward_returns(factor_data, grouper=None):
"""
Convert forward returns to returns relative to mean
period wise all-universe or group returns.
group-wise normalization incorporates the assumption of a
group neutral portfolio constraint and thus allows allows the
factor to be evaluated across groups.
For example, if AAPL 5 period return is 0.1% and mean 5 period
return for the Technology stocks in our universe was 0.5% in the
same period, the group adjusted 5 period return for AAPL in this
period is -0.4%.
Parameters
----------
factor_data : pd.DataFrame - MultiIndex
Forward returns indexed by date and asset.
Separate column for each forward return window.
grouper : list
If True, demean according to group.
Returns
-------
adjusted_forward_returns : pd.DataFrame - MultiIndex
DataFrame of the same format as the input, but with each
security's returns normalized by group.
"""
factor_data = factor_data.copy()
if not grouper:
grouper = factor_data.index.get_level_values('date') # depends on [control=['if'], data=[]]
cols = get_forward_returns_columns(factor_data.columns)
factor_data[cols] = factor_data.groupby(grouper)[cols].transform(lambda x: x - x.mean())
return factor_data |
def parse_uci(self, uci: str) -> Move:
"""
Parses the given move in UCI notation.
Supports both Chess960 and standard UCI notation.
The returned move is guaranteed to be either legal or a null move.
:raises: :exc:`ValueError` if the move is invalid or illegal in the
current position (but not a null move).
"""
move = Move.from_uci(uci)
if not move:
return move
move = self._to_chess960(move)
move = self._from_chess960(self.chess960, move.from_square, move.to_square, move.promotion, move.drop)
if not self.is_legal(move):
raise ValueError("illegal uci: {!r} in {}".format(uci, self.fen()))
return move | def function[parse_uci, parameter[self, uci]]:
constant[
Parses the given move in UCI notation.
Supports both Chess960 and standard UCI notation.
The returned move is guaranteed to be either legal or a null move.
:raises: :exc:`ValueError` if the move is invalid or illegal in the
current position (but not a null move).
]
variable[move] assign[=] call[name[Move].from_uci, parameter[name[uci]]]
if <ast.UnaryOp object at 0x7da1b17d62c0> begin[:]
return[name[move]]
variable[move] assign[=] call[name[self]._to_chess960, parameter[name[move]]]
variable[move] assign[=] call[name[self]._from_chess960, parameter[name[self].chess960, name[move].from_square, name[move].to_square, name[move].promotion, name[move].drop]]
if <ast.UnaryOp object at 0x7da1b17d69b0> begin[:]
<ast.Raise object at 0x7da1b17d87c0>
return[name[move]] | keyword[def] identifier[parse_uci] ( identifier[self] , identifier[uci] : identifier[str] )-> identifier[Move] :
literal[string]
identifier[move] = identifier[Move] . identifier[from_uci] ( identifier[uci] )
keyword[if] keyword[not] identifier[move] :
keyword[return] identifier[move]
identifier[move] = identifier[self] . identifier[_to_chess960] ( identifier[move] )
identifier[move] = identifier[self] . identifier[_from_chess960] ( identifier[self] . identifier[chess960] , identifier[move] . identifier[from_square] , identifier[move] . identifier[to_square] , identifier[move] . identifier[promotion] , identifier[move] . identifier[drop] )
keyword[if] keyword[not] identifier[self] . identifier[is_legal] ( identifier[move] ):
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[uci] , identifier[self] . identifier[fen] ()))
keyword[return] identifier[move] | def parse_uci(self, uci: str) -> Move:
"""
Parses the given move in UCI notation.
Supports both Chess960 and standard UCI notation.
The returned move is guaranteed to be either legal or a null move.
:raises: :exc:`ValueError` if the move is invalid or illegal in the
current position (but not a null move).
"""
move = Move.from_uci(uci)
if not move:
return move # depends on [control=['if'], data=[]]
move = self._to_chess960(move)
move = self._from_chess960(self.chess960, move.from_square, move.to_square, move.promotion, move.drop)
if not self.is_legal(move):
raise ValueError('illegal uci: {!r} in {}'.format(uci, self.fen())) # depends on [control=['if'], data=[]]
return move |
def signUserCsr(self, xcsr, signas, outp=None):
'''
Signs a user CSR with a CA keypair.
Args:
cert (OpenSSL.crypto.X509Req): The certificate signing request.
signas (str): The CA keypair name to sign the CSR with.
outp (synapse.lib.output.Output): The output buffer.
Examples:
cdir.signUserCsr(mycsr, 'myca')
Returns:
((OpenSSL.crypto.PKey, OpenSSL.crypto.X509)): Tuple containing the public key and certificate objects.
'''
pkey = xcsr.get_pubkey()
name = xcsr.get_subject().CN
return self.genUserCert(name, csr=pkey, signas=signas, outp=outp) | def function[signUserCsr, parameter[self, xcsr, signas, outp]]:
constant[
Signs a user CSR with a CA keypair.
Args:
cert (OpenSSL.crypto.X509Req): The certificate signing request.
signas (str): The CA keypair name to sign the CSR with.
outp (synapse.lib.output.Output): The output buffer.
Examples:
cdir.signUserCsr(mycsr, 'myca')
Returns:
((OpenSSL.crypto.PKey, OpenSSL.crypto.X509)): Tuple containing the public key and certificate objects.
]
variable[pkey] assign[=] call[name[xcsr].get_pubkey, parameter[]]
variable[name] assign[=] call[name[xcsr].get_subject, parameter[]].CN
return[call[name[self].genUserCert, parameter[name[name]]]] | keyword[def] identifier[signUserCsr] ( identifier[self] , identifier[xcsr] , identifier[signas] , identifier[outp] = keyword[None] ):
literal[string]
identifier[pkey] = identifier[xcsr] . identifier[get_pubkey] ()
identifier[name] = identifier[xcsr] . identifier[get_subject] (). identifier[CN]
keyword[return] identifier[self] . identifier[genUserCert] ( identifier[name] , identifier[csr] = identifier[pkey] , identifier[signas] = identifier[signas] , identifier[outp] = identifier[outp] ) | def signUserCsr(self, xcsr, signas, outp=None):
"""
Signs a user CSR with a CA keypair.
Args:
cert (OpenSSL.crypto.X509Req): The certificate signing request.
signas (str): The CA keypair name to sign the CSR with.
outp (synapse.lib.output.Output): The output buffer.
Examples:
cdir.signUserCsr(mycsr, 'myca')
Returns:
((OpenSSL.crypto.PKey, OpenSSL.crypto.X509)): Tuple containing the public key and certificate objects.
"""
pkey = xcsr.get_pubkey()
name = xcsr.get_subject().CN
return self.genUserCert(name, csr=pkey, signas=signas, outp=outp) |
def get_assessment_part_form_for_create_for_assessment_part(self, assessment_part_id, assessment_part_record_types):
"""Gets the assessment part form for creating new assessment parts under another assessment part.
A new form should be requested for each create transaction.
arg: assessment_part_id (osid.id.Id): an assessment part
``Id``
arg: assessment_part_record_types (osid.type.Type[]): array
of assessment part record types to be included in the
create operation or an empty list if none
return: (osid.assessment.authoring.AssessmentPartForm) - the
assessment part form
raise: NotFound - ``assessment_part_id`` is not found
raise: NullArgument - ``assessment_part_id`` or
``assessment_part_record_types`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure occurred
raise: Unsupported - unable to get form for requested record
types
*compliance: mandatory -- This method must be implemented.*
"""
if not isinstance(assessment_part_id, ABCId):
raise errors.InvalidArgument('argument is not a valid OSID Id')
for arg in assessment_part_record_types:
if not isinstance(arg, ABCType):
raise errors.InvalidArgument('one or more argument array elements is not a valid OSID Type')
if assessment_part_record_types == []:
assessment_part_record_types = None
mgr = self._get_provider_manager('ASSESSMENT_AUTHORING', local=True)
lookup_session = mgr.get_assessment_part_lookup_session_for_bank(self._catalog_id, proxy=self._proxy)
child_parts = lookup_session.get_assessment_parts_for_assessment_part(assessment_part_id)
mdata = {}
# Check for underlying Parts, whether Sections and set appropriate mdata overrides:
if child_parts.available == 0:
pass
else:
mdata['sequestered'] = {}
mdata['sequestered']['is_read_only'] = True
mdata['sequestered']['is_required'] = True
if child_parts.available() > 0 and child_parts.next().is_section():
mdata['sequestered']['default_boolean_values'] = [False]
else:
mdata['sequestered']['default_boolean_values'] = [True]
# WHY are we passing bank_id = self._catalog_id below, seems redundant:
obj_form = objects.AssessmentPartForm(
bank_id=self._catalog_id,
record_types=assessment_part_record_types,
assessment_part_id=assessment_part_id,
catalog_id=self._catalog_id,
runtime=self._runtime,
mdata=mdata)
obj_form._for_update = False
self._forms[obj_form.get_id().get_identifier()] = not CREATED
return obj_form | def function[get_assessment_part_form_for_create_for_assessment_part, parameter[self, assessment_part_id, assessment_part_record_types]]:
constant[Gets the assessment part form for creating new assessment parts under another assessment part.
A new form should be requested for each create transaction.
arg: assessment_part_id (osid.id.Id): an assessment part
``Id``
arg: assessment_part_record_types (osid.type.Type[]): array
of assessment part record types to be included in the
create operation or an empty list if none
return: (osid.assessment.authoring.AssessmentPartForm) - the
assessment part form
raise: NotFound - ``assessment_part_id`` is not found
raise: NullArgument - ``assessment_part_id`` or
``assessment_part_record_types`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure occurred
raise: Unsupported - unable to get form for requested record
types
*compliance: mandatory -- This method must be implemented.*
]
if <ast.UnaryOp object at 0x7da18dc05db0> begin[:]
<ast.Raise object at 0x7da18dc05c90>
for taget[name[arg]] in starred[name[assessment_part_record_types]] begin[:]
if <ast.UnaryOp object at 0x7da18dc066e0> begin[:]
<ast.Raise object at 0x7da18dc04eb0>
if compare[name[assessment_part_record_types] equal[==] list[[]]] begin[:]
variable[assessment_part_record_types] assign[=] constant[None]
variable[mgr] assign[=] call[name[self]._get_provider_manager, parameter[constant[ASSESSMENT_AUTHORING]]]
variable[lookup_session] assign[=] call[name[mgr].get_assessment_part_lookup_session_for_bank, parameter[name[self]._catalog_id]]
variable[child_parts] assign[=] call[name[lookup_session].get_assessment_parts_for_assessment_part, parameter[name[assessment_part_id]]]
variable[mdata] assign[=] dictionary[[], []]
if compare[name[child_parts].available equal[==] constant[0]] begin[:]
pass
variable[obj_form] assign[=] call[name[objects].AssessmentPartForm, parameter[]]
name[obj_form]._for_update assign[=] constant[False]
call[name[self]._forms][call[call[name[obj_form].get_id, parameter[]].get_identifier, parameter[]]] assign[=] <ast.UnaryOp object at 0x7da1b092ecb0>
return[name[obj_form]] | keyword[def] identifier[get_assessment_part_form_for_create_for_assessment_part] ( identifier[self] , identifier[assessment_part_id] , identifier[assessment_part_record_types] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[assessment_part_id] , identifier[ABCId] ):
keyword[raise] identifier[errors] . identifier[InvalidArgument] ( literal[string] )
keyword[for] identifier[arg] keyword[in] identifier[assessment_part_record_types] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[arg] , identifier[ABCType] ):
keyword[raise] identifier[errors] . identifier[InvalidArgument] ( literal[string] )
keyword[if] identifier[assessment_part_record_types] ==[]:
identifier[assessment_part_record_types] = keyword[None]
identifier[mgr] = identifier[self] . identifier[_get_provider_manager] ( literal[string] , identifier[local] = keyword[True] )
identifier[lookup_session] = identifier[mgr] . identifier[get_assessment_part_lookup_session_for_bank] ( identifier[self] . identifier[_catalog_id] , identifier[proxy] = identifier[self] . identifier[_proxy] )
identifier[child_parts] = identifier[lookup_session] . identifier[get_assessment_parts_for_assessment_part] ( identifier[assessment_part_id] )
identifier[mdata] ={}
keyword[if] identifier[child_parts] . identifier[available] == literal[int] :
keyword[pass]
keyword[else] :
identifier[mdata] [ literal[string] ]={}
identifier[mdata] [ literal[string] ][ literal[string] ]= keyword[True]
identifier[mdata] [ literal[string] ][ literal[string] ]= keyword[True]
keyword[if] identifier[child_parts] . identifier[available] ()> literal[int] keyword[and] identifier[child_parts] . identifier[next] (). identifier[is_section] ():
identifier[mdata] [ literal[string] ][ literal[string] ]=[ keyword[False] ]
keyword[else] :
identifier[mdata] [ literal[string] ][ literal[string] ]=[ keyword[True] ]
identifier[obj_form] = identifier[objects] . identifier[AssessmentPartForm] (
identifier[bank_id] = identifier[self] . identifier[_catalog_id] ,
identifier[record_types] = identifier[assessment_part_record_types] ,
identifier[assessment_part_id] = identifier[assessment_part_id] ,
identifier[catalog_id] = identifier[self] . identifier[_catalog_id] ,
identifier[runtime] = identifier[self] . identifier[_runtime] ,
identifier[mdata] = identifier[mdata] )
identifier[obj_form] . identifier[_for_update] = keyword[False]
identifier[self] . identifier[_forms] [ identifier[obj_form] . identifier[get_id] (). identifier[get_identifier] ()]= keyword[not] identifier[CREATED]
keyword[return] identifier[obj_form] | def get_assessment_part_form_for_create_for_assessment_part(self, assessment_part_id, assessment_part_record_types):
"""Gets the assessment part form for creating new assessment parts under another assessment part.
A new form should be requested for each create transaction.
arg: assessment_part_id (osid.id.Id): an assessment part
``Id``
arg: assessment_part_record_types (osid.type.Type[]): array
of assessment part record types to be included in the
create operation or an empty list if none
return: (osid.assessment.authoring.AssessmentPartForm) - the
assessment part form
raise: NotFound - ``assessment_part_id`` is not found
raise: NullArgument - ``assessment_part_id`` or
``assessment_part_record_types`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure occurred
raise: Unsupported - unable to get form for requested record
types
*compliance: mandatory -- This method must be implemented.*
"""
if not isinstance(assessment_part_id, ABCId):
raise errors.InvalidArgument('argument is not a valid OSID Id') # depends on [control=['if'], data=[]]
for arg in assessment_part_record_types:
if not isinstance(arg, ABCType):
raise errors.InvalidArgument('one or more argument array elements is not a valid OSID Type') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['arg']]
if assessment_part_record_types == []:
assessment_part_record_types = None # depends on [control=['if'], data=['assessment_part_record_types']]
mgr = self._get_provider_manager('ASSESSMENT_AUTHORING', local=True)
lookup_session = mgr.get_assessment_part_lookup_session_for_bank(self._catalog_id, proxy=self._proxy)
child_parts = lookup_session.get_assessment_parts_for_assessment_part(assessment_part_id)
mdata = {}
# Check for underlying Parts, whether Sections and set appropriate mdata overrides:
if child_parts.available == 0:
pass # depends on [control=['if'], data=[]]
else:
mdata['sequestered'] = {}
mdata['sequestered']['is_read_only'] = True
mdata['sequestered']['is_required'] = True
if child_parts.available() > 0 and child_parts.next().is_section():
mdata['sequestered']['default_boolean_values'] = [False] # depends on [control=['if'], data=[]]
else:
mdata['sequestered']['default_boolean_values'] = [True]
# WHY are we passing bank_id = self._catalog_id below, seems redundant:
obj_form = objects.AssessmentPartForm(bank_id=self._catalog_id, record_types=assessment_part_record_types, assessment_part_id=assessment_part_id, catalog_id=self._catalog_id, runtime=self._runtime, mdata=mdata)
obj_form._for_update = False
self._forms[obj_form.get_id().get_identifier()] = not CREATED
return obj_form |
def example_lab_to_rgb():
"""
Conversions to RGB are a little more complex mathematically. There are also
several kinds of RGB color spaces. When converting from a device-independent
color space to RGB, sRGB is assumed unless otherwise specified with the
target_rgb keyword arg.
"""
print("=== RGB Example: Lab->RGB ===")
# Instantiate an Lab color object with the given values.
lab = LabColor(0.903, 16.296, -2.217)
# Show a string representation.
print(lab)
# Convert to XYZ.
rgb = convert_color(lab, sRGBColor)
print(rgb)
print("=== End Example ===\n") | def function[example_lab_to_rgb, parameter[]]:
constant[
Conversions to RGB are a little more complex mathematically. There are also
several kinds of RGB color spaces. When converting from a device-independent
color space to RGB, sRGB is assumed unless otherwise specified with the
target_rgb keyword arg.
]
call[name[print], parameter[constant[=== RGB Example: Lab->RGB ===]]]
variable[lab] assign[=] call[name[LabColor], parameter[constant[0.903], constant[16.296], <ast.UnaryOp object at 0x7da2045650c0>]]
call[name[print], parameter[name[lab]]]
variable[rgb] assign[=] call[name[convert_color], parameter[name[lab], name[sRGBColor]]]
call[name[print], parameter[name[rgb]]]
call[name[print], parameter[constant[=== End Example ===
]]] | keyword[def] identifier[example_lab_to_rgb] ():
literal[string]
identifier[print] ( literal[string] )
identifier[lab] = identifier[LabColor] ( literal[int] , literal[int] ,- literal[int] )
identifier[print] ( identifier[lab] )
identifier[rgb] = identifier[convert_color] ( identifier[lab] , identifier[sRGBColor] )
identifier[print] ( identifier[rgb] )
identifier[print] ( literal[string] ) | def example_lab_to_rgb():
"""
Conversions to RGB are a little more complex mathematically. There are also
several kinds of RGB color spaces. When converting from a device-independent
color space to RGB, sRGB is assumed unless otherwise specified with the
target_rgb keyword arg.
"""
print('=== RGB Example: Lab->RGB ===')
# Instantiate an Lab color object with the given values.
lab = LabColor(0.903, 16.296, -2.217)
# Show a string representation.
print(lab)
# Convert to XYZ.
rgb = convert_color(lab, sRGBColor)
print(rgb)
print('=== End Example ===\n') |
def packed_workflow(self, packed): # type: (Text) -> None
"""Pack CWL description to generate re-runnable CWL object in RO."""
self.self_check()
rel_path = posixpath.join(_posix_path(WORKFLOW), "packed.cwl")
# Write as binary
with self.write_bag_file(rel_path, encoding=None) as write_pack:
# YAML is always UTF8, but json.dumps gives us str in py2
write_pack.write(packed.encode(ENCODING))
_logger.debug(u"[provenance] Added packed workflow: %s", rel_path) | def function[packed_workflow, parameter[self, packed]]:
constant[Pack CWL description to generate re-runnable CWL object in RO.]
call[name[self].self_check, parameter[]]
variable[rel_path] assign[=] call[name[posixpath].join, parameter[call[name[_posix_path], parameter[name[WORKFLOW]]], constant[packed.cwl]]]
with call[name[self].write_bag_file, parameter[name[rel_path]]] begin[:]
call[name[write_pack].write, parameter[call[name[packed].encode, parameter[name[ENCODING]]]]]
call[name[_logger].debug, parameter[constant[[provenance] Added packed workflow: %s], name[rel_path]]] | keyword[def] identifier[packed_workflow] ( identifier[self] , identifier[packed] ):
literal[string]
identifier[self] . identifier[self_check] ()
identifier[rel_path] = identifier[posixpath] . identifier[join] ( identifier[_posix_path] ( identifier[WORKFLOW] ), literal[string] )
keyword[with] identifier[self] . identifier[write_bag_file] ( identifier[rel_path] , identifier[encoding] = keyword[None] ) keyword[as] identifier[write_pack] :
identifier[write_pack] . identifier[write] ( identifier[packed] . identifier[encode] ( identifier[ENCODING] ))
identifier[_logger] . identifier[debug] ( literal[string] , identifier[rel_path] ) | def packed_workflow(self, packed): # type: (Text) -> None
'Pack CWL description to generate re-runnable CWL object in RO.'
self.self_check()
rel_path = posixpath.join(_posix_path(WORKFLOW), 'packed.cwl')
# Write as binary
with self.write_bag_file(rel_path, encoding=None) as write_pack:
# YAML is always UTF8, but json.dumps gives us str in py2
write_pack.write(packed.encode(ENCODING)) # depends on [control=['with'], data=['write_pack']]
_logger.debug(u'[provenance] Added packed workflow: %s', rel_path) |
def hash_algo(self):
"""
:return:
A unicode string of "md2", "md5", "sha1", "sha224", "sha256",
"sha384", "sha512", "sha512_224", "sha512_256"
"""
algorithm = self['algorithm'].native
algo_map = {
'md2_rsa': 'md2',
'md5_rsa': 'md5',
'sha1_rsa': 'sha1',
'sha224_rsa': 'sha224',
'sha256_rsa': 'sha256',
'sha384_rsa': 'sha384',
'sha512_rsa': 'sha512',
'sha1_dsa': 'sha1',
'sha224_dsa': 'sha224',
'sha256_dsa': 'sha256',
'sha1_ecdsa': 'sha1',
'sha224_ecdsa': 'sha224',
'sha256_ecdsa': 'sha256',
'sha384_ecdsa': 'sha384',
'sha512_ecdsa': 'sha512',
}
if algorithm in algo_map:
return algo_map[algorithm]
if algorithm == 'rsassa_pss':
return self['parameters']['hash_algorithm']['algorithm'].native
raise ValueError(unwrap(
'''
Hash algorithm not known for %s
''',
algorithm
)) | def function[hash_algo, parameter[self]]:
constant[
:return:
A unicode string of "md2", "md5", "sha1", "sha224", "sha256",
"sha384", "sha512", "sha512_224", "sha512_256"
]
variable[algorithm] assign[=] call[name[self]][constant[algorithm]].native
variable[algo_map] assign[=] dictionary[[<ast.Constant object at 0x7da18bc73fd0>, <ast.Constant object at 0x7da18bc71c60>, <ast.Constant object at 0x7da18bc71f00>, <ast.Constant object at 0x7da18bc704f0>, <ast.Constant object at 0x7da18bc73640>, <ast.Constant object at 0x7da18bc71ab0>, <ast.Constant object at 0x7da18bc723e0>, <ast.Constant object at 0x7da18bc729e0>, <ast.Constant object at 0x7da18bc72920>, <ast.Constant object at 0x7da18bc72530>, <ast.Constant object at 0x7da18bc702b0>, <ast.Constant object at 0x7da18bc72d70>, <ast.Constant object at 0x7da18bc73400>, <ast.Constant object at 0x7da18bc722c0>, <ast.Constant object at 0x7da18bc727a0>], [<ast.Constant object at 0x7da18bc72110>, <ast.Constant object at 0x7da18bc70d90>, <ast.Constant object at 0x7da18bc708e0>, <ast.Constant object at 0x7da18bc73ca0>, <ast.Constant object at 0x7da18bc70400>, <ast.Constant object at 0x7da18bc71de0>, <ast.Constant object at 0x7da18bc71000>, <ast.Constant object at 0x7da18bc71030>, <ast.Constant object at 0x7da18bc73580>, <ast.Constant object at 0x7da18bc72200>, <ast.Constant object at 0x7da18bc71750>, <ast.Constant object at 0x7da18bc70eb0>, <ast.Constant object at 0x7da18bc73a30>, <ast.Constant object at 0x7da18bc71b10>, <ast.Constant object at 0x7da18bc71600>]]
if compare[name[algorithm] in name[algo_map]] begin[:]
return[call[name[algo_map]][name[algorithm]]]
if compare[name[algorithm] equal[==] constant[rsassa_pss]] begin[:]
return[call[call[call[name[self]][constant[parameters]]][constant[hash_algorithm]]][constant[algorithm]].native]
<ast.Raise object at 0x7da18bc72a10> | keyword[def] identifier[hash_algo] ( identifier[self] ):
literal[string]
identifier[algorithm] = identifier[self] [ literal[string] ]. identifier[native]
identifier[algo_map] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
}
keyword[if] identifier[algorithm] keyword[in] identifier[algo_map] :
keyword[return] identifier[algo_map] [ identifier[algorithm] ]
keyword[if] identifier[algorithm] == literal[string] :
keyword[return] identifier[self] [ literal[string] ][ literal[string] ][ literal[string] ]. identifier[native]
keyword[raise] identifier[ValueError] ( identifier[unwrap] (
literal[string] ,
identifier[algorithm]
)) | def hash_algo(self):
"""
:return:
A unicode string of "md2", "md5", "sha1", "sha224", "sha256",
"sha384", "sha512", "sha512_224", "sha512_256"
"""
algorithm = self['algorithm'].native
algo_map = {'md2_rsa': 'md2', 'md5_rsa': 'md5', 'sha1_rsa': 'sha1', 'sha224_rsa': 'sha224', 'sha256_rsa': 'sha256', 'sha384_rsa': 'sha384', 'sha512_rsa': 'sha512', 'sha1_dsa': 'sha1', 'sha224_dsa': 'sha224', 'sha256_dsa': 'sha256', 'sha1_ecdsa': 'sha1', 'sha224_ecdsa': 'sha224', 'sha256_ecdsa': 'sha256', 'sha384_ecdsa': 'sha384', 'sha512_ecdsa': 'sha512'}
if algorithm in algo_map:
return algo_map[algorithm] # depends on [control=['if'], data=['algorithm', 'algo_map']]
if algorithm == 'rsassa_pss':
return self['parameters']['hash_algorithm']['algorithm'].native # depends on [control=['if'], data=[]]
raise ValueError(unwrap('\n Hash algorithm not known for %s\n ', algorithm)) |
def compute_exports(exports):
"""Compute a dictionary of exports given one of the parameters
to the Export() function or the exports argument to SConscript()."""
loc, glob = get_calling_namespaces()
retval = {}
try:
for export in exports:
if SCons.Util.is_Dict(export):
retval.update(export)
else:
try:
retval[export] = loc[export]
except KeyError:
retval[export] = glob[export]
except KeyError as x:
raise SCons.Errors.UserError("Export of non-existent variable '%s'"%x)
return retval | def function[compute_exports, parameter[exports]]:
constant[Compute a dictionary of exports given one of the parameters
to the Export() function or the exports argument to SConscript().]
<ast.Tuple object at 0x7da20c76eb30> assign[=] call[name[get_calling_namespaces], parameter[]]
variable[retval] assign[=] dictionary[[], []]
<ast.Try object at 0x7da20c76f9d0>
return[name[retval]] | keyword[def] identifier[compute_exports] ( identifier[exports] ):
literal[string]
identifier[loc] , identifier[glob] = identifier[get_calling_namespaces] ()
identifier[retval] ={}
keyword[try] :
keyword[for] identifier[export] keyword[in] identifier[exports] :
keyword[if] identifier[SCons] . identifier[Util] . identifier[is_Dict] ( identifier[export] ):
identifier[retval] . identifier[update] ( identifier[export] )
keyword[else] :
keyword[try] :
identifier[retval] [ identifier[export] ]= identifier[loc] [ identifier[export] ]
keyword[except] identifier[KeyError] :
identifier[retval] [ identifier[export] ]= identifier[glob] [ identifier[export] ]
keyword[except] identifier[KeyError] keyword[as] identifier[x] :
keyword[raise] identifier[SCons] . identifier[Errors] . identifier[UserError] ( literal[string] % identifier[x] )
keyword[return] identifier[retval] | def compute_exports(exports):
"""Compute a dictionary of exports given one of the parameters
to the Export() function or the exports argument to SConscript()."""
(loc, glob) = get_calling_namespaces()
retval = {}
try:
for export in exports:
if SCons.Util.is_Dict(export):
retval.update(export) # depends on [control=['if'], data=[]]
else:
try:
retval[export] = loc[export] # depends on [control=['try'], data=[]]
except KeyError:
retval[export] = glob[export] # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['export']] # depends on [control=['try'], data=[]]
except KeyError as x:
raise SCons.Errors.UserError("Export of non-existent variable '%s'" % x) # depends on [control=['except'], data=['x']]
return retval |
def safe(self,x):
"""removes nans and infs from outputs."""
x[np.isinf(x)] = 1
x[np.isnan(x)] = 1
return x | def function[safe, parameter[self, x]]:
constant[removes nans and infs from outputs.]
call[name[x]][call[name[np].isinf, parameter[name[x]]]] assign[=] constant[1]
call[name[x]][call[name[np].isnan, parameter[name[x]]]] assign[=] constant[1]
return[name[x]] | keyword[def] identifier[safe] ( identifier[self] , identifier[x] ):
literal[string]
identifier[x] [ identifier[np] . identifier[isinf] ( identifier[x] )]= literal[int]
identifier[x] [ identifier[np] . identifier[isnan] ( identifier[x] )]= literal[int]
keyword[return] identifier[x] | def safe(self, x):
"""removes nans and infs from outputs."""
x[np.isinf(x)] = 1
x[np.isnan(x)] = 1
return x |
def image_predict_proba(self, X):
"""
Predicts class probabilities for the entire image.
Parameters:
-----------
X: array, shape = [n_samples, n_pixels_x, n_pixels_y, n_bands]
Array of training images
y: array, shape = [n_samples] or [n_samples, n_pixels_x, n_pixels_y, n_classes]
Target probabilities
"""
self._check_image(X)
new_shape = (X.shape[0] * X.shape[1] * X.shape[2],)
if len(X.shape) == 4:
new_shape += (X.shape[3],)
pixels = X.reshape(new_shape)
probabilities = self.classifier.predict_proba(self._transform_input(pixels))
return probabilities.reshape(X.shape[0], X.shape[1], X.shape[2],
probabilities.shape[1]) | def function[image_predict_proba, parameter[self, X]]:
constant[
Predicts class probabilities for the entire image.
Parameters:
-----------
X: array, shape = [n_samples, n_pixels_x, n_pixels_y, n_bands]
Array of training images
y: array, shape = [n_samples] or [n_samples, n_pixels_x, n_pixels_y, n_classes]
Target probabilities
]
call[name[self]._check_image, parameter[name[X]]]
variable[new_shape] assign[=] tuple[[<ast.BinOp object at 0x7da18f8130d0>]]
if compare[call[name[len], parameter[name[X].shape]] equal[==] constant[4]] begin[:]
<ast.AugAssign object at 0x7da18f8130a0>
variable[pixels] assign[=] call[name[X].reshape, parameter[name[new_shape]]]
variable[probabilities] assign[=] call[name[self].classifier.predict_proba, parameter[call[name[self]._transform_input, parameter[name[pixels]]]]]
return[call[name[probabilities].reshape, parameter[call[name[X].shape][constant[0]], call[name[X].shape][constant[1]], call[name[X].shape][constant[2]], call[name[probabilities].shape][constant[1]]]]] | keyword[def] identifier[image_predict_proba] ( identifier[self] , identifier[X] ):
literal[string]
identifier[self] . identifier[_check_image] ( identifier[X] )
identifier[new_shape] =( identifier[X] . identifier[shape] [ literal[int] ]* identifier[X] . identifier[shape] [ literal[int] ]* identifier[X] . identifier[shape] [ literal[int] ],)
keyword[if] identifier[len] ( identifier[X] . identifier[shape] )== literal[int] :
identifier[new_shape] +=( identifier[X] . identifier[shape] [ literal[int] ],)
identifier[pixels] = identifier[X] . identifier[reshape] ( identifier[new_shape] )
identifier[probabilities] = identifier[self] . identifier[classifier] . identifier[predict_proba] ( identifier[self] . identifier[_transform_input] ( identifier[pixels] ))
keyword[return] identifier[probabilities] . identifier[reshape] ( identifier[X] . identifier[shape] [ literal[int] ], identifier[X] . identifier[shape] [ literal[int] ], identifier[X] . identifier[shape] [ literal[int] ],
identifier[probabilities] . identifier[shape] [ literal[int] ]) | def image_predict_proba(self, X):
"""
Predicts class probabilities for the entire image.
Parameters:
-----------
X: array, shape = [n_samples, n_pixels_x, n_pixels_y, n_bands]
Array of training images
y: array, shape = [n_samples] or [n_samples, n_pixels_x, n_pixels_y, n_classes]
Target probabilities
"""
self._check_image(X)
new_shape = (X.shape[0] * X.shape[1] * X.shape[2],)
if len(X.shape) == 4:
new_shape += (X.shape[3],) # depends on [control=['if'], data=[]]
pixels = X.reshape(new_shape)
probabilities = self.classifier.predict_proba(self._transform_input(pixels))
return probabilities.reshape(X.shape[0], X.shape[1], X.shape[2], probabilities.shape[1]) |
def fetch_object(self, doc_id):
"""Fetch the document by its PK."""
try:
return self.object_class.objects.get(pk=doc_id)
except self.object_class.DoesNotExist:
raise ReferenceNotFoundError | def function[fetch_object, parameter[self, doc_id]]:
constant[Fetch the document by its PK.]
<ast.Try object at 0x7da1b0ca78e0> | keyword[def] identifier[fetch_object] ( identifier[self] , identifier[doc_id] ):
literal[string]
keyword[try] :
keyword[return] identifier[self] . identifier[object_class] . identifier[objects] . identifier[get] ( identifier[pk] = identifier[doc_id] )
keyword[except] identifier[self] . identifier[object_class] . identifier[DoesNotExist] :
keyword[raise] identifier[ReferenceNotFoundError] | def fetch_object(self, doc_id):
"""Fetch the document by its PK."""
try:
return self.object_class.objects.get(pk=doc_id) # depends on [control=['try'], data=[]]
except self.object_class.DoesNotExist:
raise ReferenceNotFoundError # depends on [control=['except'], data=[]] |
def reset(self, iface=None, client_mac=None, xid=None, scriptfile=None):
"""Reset object attributes when state is INIT."""
logger.debug('Reseting attributes.')
if iface is None:
iface = conf.iface
if client_mac is None:
# scapy for python 3 returns byte, not tuple
tempmac = get_if_raw_hwaddr(iface)
if isinstance(tempmac, tuple) and len(tempmac) == 2:
mac = tempmac[1]
else:
mac = tempmac
client_mac = str2mac(mac)
self.client = DHCPCAP(iface=iface, client_mac=client_mac, xid=xid)
if scriptfile is not None:
self.script = ClientScript(scriptfile)
else:
self.script = None
self.time_sent_request = None
self.discover_attempts = 0
self.request_attempts = 0
self.current_state = STATE_PREINIT
self.offers = list() | def function[reset, parameter[self, iface, client_mac, xid, scriptfile]]:
constant[Reset object attributes when state is INIT.]
call[name[logger].debug, parameter[constant[Reseting attributes.]]]
if compare[name[iface] is constant[None]] begin[:]
variable[iface] assign[=] name[conf].iface
if compare[name[client_mac] is constant[None]] begin[:]
variable[tempmac] assign[=] call[name[get_if_raw_hwaddr], parameter[name[iface]]]
if <ast.BoolOp object at 0x7da1b03b8730> begin[:]
variable[mac] assign[=] call[name[tempmac]][constant[1]]
variable[client_mac] assign[=] call[name[str2mac], parameter[name[mac]]]
name[self].client assign[=] call[name[DHCPCAP], parameter[]]
if compare[name[scriptfile] is_not constant[None]] begin[:]
name[self].script assign[=] call[name[ClientScript], parameter[name[scriptfile]]]
name[self].time_sent_request assign[=] constant[None]
name[self].discover_attempts assign[=] constant[0]
name[self].request_attempts assign[=] constant[0]
name[self].current_state assign[=] name[STATE_PREINIT]
name[self].offers assign[=] call[name[list], parameter[]] | keyword[def] identifier[reset] ( identifier[self] , identifier[iface] = keyword[None] , identifier[client_mac] = keyword[None] , identifier[xid] = keyword[None] , identifier[scriptfile] = keyword[None] ):
literal[string]
identifier[logger] . identifier[debug] ( literal[string] )
keyword[if] identifier[iface] keyword[is] keyword[None] :
identifier[iface] = identifier[conf] . identifier[iface]
keyword[if] identifier[client_mac] keyword[is] keyword[None] :
identifier[tempmac] = identifier[get_if_raw_hwaddr] ( identifier[iface] )
keyword[if] identifier[isinstance] ( identifier[tempmac] , identifier[tuple] ) keyword[and] identifier[len] ( identifier[tempmac] )== literal[int] :
identifier[mac] = identifier[tempmac] [ literal[int] ]
keyword[else] :
identifier[mac] = identifier[tempmac]
identifier[client_mac] = identifier[str2mac] ( identifier[mac] )
identifier[self] . identifier[client] = identifier[DHCPCAP] ( identifier[iface] = identifier[iface] , identifier[client_mac] = identifier[client_mac] , identifier[xid] = identifier[xid] )
keyword[if] identifier[scriptfile] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[script] = identifier[ClientScript] ( identifier[scriptfile] )
keyword[else] :
identifier[self] . identifier[script] = keyword[None]
identifier[self] . identifier[time_sent_request] = keyword[None]
identifier[self] . identifier[discover_attempts] = literal[int]
identifier[self] . identifier[request_attempts] = literal[int]
identifier[self] . identifier[current_state] = identifier[STATE_PREINIT]
identifier[self] . identifier[offers] = identifier[list] () | def reset(self, iface=None, client_mac=None, xid=None, scriptfile=None):
"""Reset object attributes when state is INIT."""
logger.debug('Reseting attributes.')
if iface is None:
iface = conf.iface # depends on [control=['if'], data=['iface']]
if client_mac is None:
# scapy for python 3 returns byte, not tuple
tempmac = get_if_raw_hwaddr(iface)
if isinstance(tempmac, tuple) and len(tempmac) == 2:
mac = tempmac[1] # depends on [control=['if'], data=[]]
else:
mac = tempmac
client_mac = str2mac(mac) # depends on [control=['if'], data=['client_mac']]
self.client = DHCPCAP(iface=iface, client_mac=client_mac, xid=xid)
if scriptfile is not None:
self.script = ClientScript(scriptfile) # depends on [control=['if'], data=['scriptfile']]
else:
self.script = None
self.time_sent_request = None
self.discover_attempts = 0
self.request_attempts = 0
self.current_state = STATE_PREINIT
self.offers = list() |
def create_app():
""" Flask application factory """
# Setup Flask app and app.config
app = Flask(__name__)
app.config.from_object(__name__+'.ConfigClass')
# Initialize Flask extensions
db = SQLAlchemy(app) # Initialize Flask-SQLAlchemy
# Define the User data-model. Make sure to add flask_user UserMixin !!!
class User(db.Model, UserMixin):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
# User authentication information
username = db.Column(db.String(50, collation='NOCASE'), nullable=False, unique=True)
password = db.Column(db.String(255), nullable=False, server_default='')
# User information
active = db.Column('is_active', db.Boolean(), nullable=False, server_default='0')
first_name = db.Column(db.String(100, collation='NOCASE'), nullable=False, server_default='')
last_name = db.Column(db.String(100, collation='NOCASE'), nullable=False, server_default='')
# Relationship
user_emails = db.relationship('UserEmail')
# Define UserEmail DataModel.
class UserEmail(db.Model):
__tablename__ = 'user_emails'
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
# User email information
email = db.Column(db.String(255, collation='NOCASE'), nullable=False, unique=True)
email_confirmed_at = db.Column(db.DateTime())
is_primary = db.Column(db.Boolean(), nullable=False, default=False)
# Relationship
user = db.relationship('User', uselist=False)
# Create all database tables
db.create_all()
# Setup Flask-User
db_adapter = SQLAlchemyAdapter(db, User, UserEmailClass=UserEmail) # Register the User data-model
user_manager = UserManager(db_adapter, app) # Initialize Flask-User
# The Home page is accessible to anyone
@app.route('/')
def home_page():
return render_template_string("""
{% extends "flask_user_layout.html" %}
{% block content %}
<h2>Home page</h2>
<p>This page can be accessed by anyone.</p><br/>
<p><a href={{ url_for('home_page') }}>Home page</a> (anyone)</p>
<p><a href={{ url_for('member_page') }}>Members page</a> (login required)</p>
{% endblock %}
""")
# The Members page is only accessible to authenticated users
@app.route('/members')
@login_required # Use of @login_required decorator
def member_page():
return render_template_string("""
{% extends "flask_user_layout.html" %}
{% block content %}
<h2>Members page</h2>
<p>This page can only be accessed by authenticated users.</p><br/>
<p><a href={{ url_for('home_page') }}>Home page</a> (anyone)</p>
<p><a href={{ url_for('member_page') }}>Members page</a> (login required)</p>
{% endblock %}
""")
return app | def function[create_app, parameter[]]:
constant[ Flask application factory ]
variable[app] assign[=] call[name[Flask], parameter[name[__name__]]]
call[name[app].config.from_object, parameter[binary_operation[name[__name__] + constant[.ConfigClass]]]]
variable[db] assign[=] call[name[SQLAlchemy], parameter[name[app]]]
class class[User, parameter[]] begin[:]
variable[__tablename__] assign[=] constant[users]
variable[id] assign[=] call[name[db].Column, parameter[name[db].Integer]]
variable[username] assign[=] call[name[db].Column, parameter[call[name[db].String, parameter[constant[50]]]]]
variable[password] assign[=] call[name[db].Column, parameter[call[name[db].String, parameter[constant[255]]]]]
variable[active] assign[=] call[name[db].Column, parameter[constant[is_active], call[name[db].Boolean, parameter[]]]]
variable[first_name] assign[=] call[name[db].Column, parameter[call[name[db].String, parameter[constant[100]]]]]
variable[last_name] assign[=] call[name[db].Column, parameter[call[name[db].String, parameter[constant[100]]]]]
variable[user_emails] assign[=] call[name[db].relationship, parameter[constant[UserEmail]]]
class class[UserEmail, parameter[]] begin[:]
variable[__tablename__] assign[=] constant[user_emails]
variable[id] assign[=] call[name[db].Column, parameter[name[db].Integer]]
variable[user_id] assign[=] call[name[db].Column, parameter[name[db].Integer, call[name[db].ForeignKey, parameter[constant[users.id]]]]]
variable[email] assign[=] call[name[db].Column, parameter[call[name[db].String, parameter[constant[255]]]]]
variable[email_confirmed_at] assign[=] call[name[db].Column, parameter[call[name[db].DateTime, parameter[]]]]
variable[is_primary] assign[=] call[name[db].Column, parameter[call[name[db].Boolean, parameter[]]]]
variable[user] assign[=] call[name[db].relationship, parameter[constant[User]]]
call[name[db].create_all, parameter[]]
variable[db_adapter] assign[=] call[name[SQLAlchemyAdapter], parameter[name[db], name[User]]]
variable[user_manager] assign[=] call[name[UserManager], parameter[name[db_adapter], name[app]]]
def function[home_page, parameter[]]:
return[call[name[render_template_string], parameter[constant[
{% extends "flask_user_layout.html" %}
{% block content %}
<h2>Home page</h2>
<p>This page can be accessed by anyone.</p><br/>
<p><a href={{ url_for('home_page') }}>Home page</a> (anyone)</p>
<p><a href={{ url_for('member_page') }}>Members page</a> (login required)</p>
{% endblock %}
]]]]
def function[member_page, parameter[]]:
return[call[name[render_template_string], parameter[constant[
{% extends "flask_user_layout.html" %}
{% block content %}
<h2>Members page</h2>
<p>This page can only be accessed by authenticated users.</p><br/>
<p><a href={{ url_for('home_page') }}>Home page</a> (anyone)</p>
<p><a href={{ url_for('member_page') }}>Members page</a> (login required)</p>
{% endblock %}
]]]]
return[name[app]] | keyword[def] identifier[create_app] ():
literal[string]
identifier[app] = identifier[Flask] ( identifier[__name__] )
identifier[app] . identifier[config] . identifier[from_object] ( identifier[__name__] + literal[string] )
identifier[db] = identifier[SQLAlchemy] ( identifier[app] )
keyword[class] identifier[User] ( identifier[db] . identifier[Model] , identifier[UserMixin] ):
identifier[__tablename__] = literal[string]
identifier[id] = identifier[db] . identifier[Column] ( identifier[db] . identifier[Integer] , identifier[primary_key] = keyword[True] )
identifier[username] = identifier[db] . identifier[Column] ( identifier[db] . identifier[String] ( literal[int] , identifier[collation] = literal[string] ), identifier[nullable] = keyword[False] , identifier[unique] = keyword[True] )
identifier[password] = identifier[db] . identifier[Column] ( identifier[db] . identifier[String] ( literal[int] ), identifier[nullable] = keyword[False] , identifier[server_default] = literal[string] )
identifier[active] = identifier[db] . identifier[Column] ( literal[string] , identifier[db] . identifier[Boolean] (), identifier[nullable] = keyword[False] , identifier[server_default] = literal[string] )
identifier[first_name] = identifier[db] . identifier[Column] ( identifier[db] . identifier[String] ( literal[int] , identifier[collation] = literal[string] ), identifier[nullable] = keyword[False] , identifier[server_default] = literal[string] )
identifier[last_name] = identifier[db] . identifier[Column] ( identifier[db] . identifier[String] ( literal[int] , identifier[collation] = literal[string] ), identifier[nullable] = keyword[False] , identifier[server_default] = literal[string] )
identifier[user_emails] = identifier[db] . identifier[relationship] ( literal[string] )
keyword[class] identifier[UserEmail] ( identifier[db] . identifier[Model] ):
identifier[__tablename__] = literal[string]
identifier[id] = identifier[db] . identifier[Column] ( identifier[db] . identifier[Integer] , identifier[primary_key] = keyword[True] )
identifier[user_id] = identifier[db] . identifier[Column] ( identifier[db] . identifier[Integer] , identifier[db] . identifier[ForeignKey] ( literal[string] ))
identifier[email] = identifier[db] . identifier[Column] ( identifier[db] . identifier[String] ( literal[int] , identifier[collation] = literal[string] ), identifier[nullable] = keyword[False] , identifier[unique] = keyword[True] )
identifier[email_confirmed_at] = identifier[db] . identifier[Column] ( identifier[db] . identifier[DateTime] ())
identifier[is_primary] = identifier[db] . identifier[Column] ( identifier[db] . identifier[Boolean] (), identifier[nullable] = keyword[False] , identifier[default] = keyword[False] )
identifier[user] = identifier[db] . identifier[relationship] ( literal[string] , identifier[uselist] = keyword[False] )
identifier[db] . identifier[create_all] ()
identifier[db_adapter] = identifier[SQLAlchemyAdapter] ( identifier[db] , identifier[User] , identifier[UserEmailClass] = identifier[UserEmail] )
identifier[user_manager] = identifier[UserManager] ( identifier[db_adapter] , identifier[app] )
@ identifier[app] . identifier[route] ( literal[string] )
keyword[def] identifier[home_page] ():
keyword[return] identifier[render_template_string] ( literal[string] )
@ identifier[app] . identifier[route] ( literal[string] )
@ identifier[login_required]
keyword[def] identifier[member_page] ():
keyword[return] identifier[render_template_string] ( literal[string] )
keyword[return] identifier[app] | def create_app():
""" Flask application factory """
# Setup Flask app and app.config
app = Flask(__name__)
app.config.from_object(__name__ + '.ConfigClass')
# Initialize Flask extensions
db = SQLAlchemy(app) # Initialize Flask-SQLAlchemy
# Define the User data-model. Make sure to add flask_user UserMixin !!!
class User(db.Model, UserMixin):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
# User authentication information
username = db.Column(db.String(50, collation='NOCASE'), nullable=False, unique=True)
password = db.Column(db.String(255), nullable=False, server_default='')
# User information
active = db.Column('is_active', db.Boolean(), nullable=False, server_default='0')
first_name = db.Column(db.String(100, collation='NOCASE'), nullable=False, server_default='')
last_name = db.Column(db.String(100, collation='NOCASE'), nullable=False, server_default='')
# Relationship
user_emails = db.relationship('UserEmail')
# Define UserEmail DataModel.
class UserEmail(db.Model):
__tablename__ = 'user_emails'
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
# User email information
email = db.Column(db.String(255, collation='NOCASE'), nullable=False, unique=True)
email_confirmed_at = db.Column(db.DateTime())
is_primary = db.Column(db.Boolean(), nullable=False, default=False)
# Relationship
user = db.relationship('User', uselist=False)
# Create all database tables
db.create_all()
# Setup Flask-User
db_adapter = SQLAlchemyAdapter(db, User, UserEmailClass=UserEmail) # Register the User data-model
user_manager = UserManager(db_adapter, app) # Initialize Flask-User
# The Home page is accessible to anyone
@app.route('/')
def home_page():
return render_template_string('\n {% extends "flask_user_layout.html" %}\n {% block content %}\n <h2>Home page</h2>\n <p>This page can be accessed by anyone.</p><br/>\n <p><a href={{ url_for(\'home_page\') }}>Home page</a> (anyone)</p>\n <p><a href={{ url_for(\'member_page\') }}>Members page</a> (login required)</p>\n {% endblock %}\n ')
# The Members page is only accessible to authenticated users
# Use of @login_required decorator
@app.route('/members')
@login_required
def member_page():
return render_template_string('\n {% extends "flask_user_layout.html" %}\n {% block content %}\n <h2>Members page</h2>\n <p>This page can only be accessed by authenticated users.</p><br/>\n <p><a href={{ url_for(\'home_page\') }}>Home page</a> (anyone)</p>\n <p><a href={{ url_for(\'member_page\') }}>Members page</a> (login required)</p>\n {% endblock %}\n ')
return app |
def _sync_folder_to_container(self, folder_path, container, prefix, delete,
include_hidden, ignore, ignore_timestamps, object_prefix, verbose):
"""
This is the internal method that is called recursively to handle
nested folder structures.
"""
fnames = os.listdir(folder_path)
ignore = utils.coerce_to_list(ignore)
log = logging.getLogger("pyrax")
if not include_hidden:
ignore.append(".*")
for fname in fnames:
if utils.match_pattern(fname, ignore):
self._sync_summary["ignored"] += 1
continue
pth = os.path.join(folder_path, fname)
if os.path.isdir(pth):
subprefix = fname
if prefix:
subprefix = os.path.join(prefix, subprefix)
self._sync_folder_to_container(pth, container, prefix=subprefix,
delete=delete, include_hidden=include_hidden,
ignore=ignore, ignore_timestamps=ignore_timestamps,
object_prefix=object_prefix, verbose=verbose)
continue
self._local_files.append(os.path.join(object_prefix, prefix,
fname))
local_etag = utils.get_checksum(pth)
if object_prefix:
prefix = os.path.join(object_prefix, prefix)
object_prefix = ""
fullname_with_prefix = os.path.join(prefix, fname)
try:
obj = self._remote_files[fullname_with_prefix]
obj_etag = obj.etag
except KeyError:
obj = None
obj_etag = None
if local_etag != obj_etag:
if not ignore_timestamps:
if obj:
obj_time_str = obj.last_modified[:19]
else:
obj_time_str = EARLY_DATE_STR
local_mod = datetime.datetime.utcfromtimestamp(
os.stat(pth).st_mtime)
local_mod_str = local_mod.isoformat()
if obj_time_str >= local_mod_str:
# Remote object is newer
self._sync_summary["older"] += 1
if verbose:
log.info("%s NOT UPLOADED because remote object is "
"newer", fullname_with_prefix)
log.info(" Local: %s Remote: %s" % (
local_mod_str, obj_time_str))
continue
try:
container.upload_file(pth, obj_name=fullname_with_prefix,
etag=local_etag, return_none=True)
self._sync_summary["uploaded"] += 1
if verbose:
log.info("%s UPLOADED", fullname_with_prefix)
except Exception as e:
# Record the failure, and move on
self._sync_summary["failed"] += 1
self._sync_summary["failure_reasons"].append("%s" % e)
if verbose:
log.error("%s UPLOAD FAILED. Exception: %s" %
(fullname_with_prefix, e))
else:
self._sync_summary["duplicate"] += 1
if verbose:
log.info("%s NOT UPLOADED because it already exists",
fullname_with_prefix)
if delete and not prefix:
self._delete_objects_not_in_list(container, object_prefix) | def function[_sync_folder_to_container, parameter[self, folder_path, container, prefix, delete, include_hidden, ignore, ignore_timestamps, object_prefix, verbose]]:
constant[
This is the internal method that is called recursively to handle
nested folder structures.
]
variable[fnames] assign[=] call[name[os].listdir, parameter[name[folder_path]]]
variable[ignore] assign[=] call[name[utils].coerce_to_list, parameter[name[ignore]]]
variable[log] assign[=] call[name[logging].getLogger, parameter[constant[pyrax]]]
if <ast.UnaryOp object at 0x7da1b056e290> begin[:]
call[name[ignore].append, parameter[constant[.*]]]
for taget[name[fname]] in starred[name[fnames]] begin[:]
if call[name[utils].match_pattern, parameter[name[fname], name[ignore]]] begin[:]
<ast.AugAssign object at 0x7da1b056d780>
continue
variable[pth] assign[=] call[name[os].path.join, parameter[name[folder_path], name[fname]]]
if call[name[os].path.isdir, parameter[name[pth]]] begin[:]
variable[subprefix] assign[=] name[fname]
if name[prefix] begin[:]
variable[subprefix] assign[=] call[name[os].path.join, parameter[name[prefix], name[subprefix]]]
call[name[self]._sync_folder_to_container, parameter[name[pth], name[container]]]
continue
call[name[self]._local_files.append, parameter[call[name[os].path.join, parameter[name[object_prefix], name[prefix], name[fname]]]]]
variable[local_etag] assign[=] call[name[utils].get_checksum, parameter[name[pth]]]
if name[object_prefix] begin[:]
variable[prefix] assign[=] call[name[os].path.join, parameter[name[object_prefix], name[prefix]]]
variable[object_prefix] assign[=] constant[]
variable[fullname_with_prefix] assign[=] call[name[os].path.join, parameter[name[prefix], name[fname]]]
<ast.Try object at 0x7da1b056d030>
if compare[name[local_etag] not_equal[!=] name[obj_etag]] begin[:]
if <ast.UnaryOp object at 0x7da1b056c5b0> begin[:]
if name[obj] begin[:]
variable[obj_time_str] assign[=] call[name[obj].last_modified][<ast.Slice object at 0x7da1b056c3d0>]
variable[local_mod] assign[=] call[name[datetime].datetime.utcfromtimestamp, parameter[call[name[os].stat, parameter[name[pth]]].st_mtime]]
variable[local_mod_str] assign[=] call[name[local_mod].isoformat, parameter[]]
if compare[name[obj_time_str] greater_or_equal[>=] name[local_mod_str]] begin[:]
<ast.AugAssign object at 0x7da1b056cfa0>
if name[verbose] begin[:]
call[name[log].info, parameter[constant[%s NOT UPLOADED because remote object is newer], name[fullname_with_prefix]]]
call[name[log].info, parameter[binary_operation[constant[ Local: %s Remote: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18bccb070>, <ast.Name object at 0x7da18bccafe0>]]]]]
continue
<ast.Try object at 0x7da18bcc8d90>
if <ast.BoolOp object at 0x7da18bcc88e0> begin[:]
call[name[self]._delete_objects_not_in_list, parameter[name[container], name[object_prefix]]] | keyword[def] identifier[_sync_folder_to_container] ( identifier[self] , identifier[folder_path] , identifier[container] , identifier[prefix] , identifier[delete] ,
identifier[include_hidden] , identifier[ignore] , identifier[ignore_timestamps] , identifier[object_prefix] , identifier[verbose] ):
literal[string]
identifier[fnames] = identifier[os] . identifier[listdir] ( identifier[folder_path] )
identifier[ignore] = identifier[utils] . identifier[coerce_to_list] ( identifier[ignore] )
identifier[log] = identifier[logging] . identifier[getLogger] ( literal[string] )
keyword[if] keyword[not] identifier[include_hidden] :
identifier[ignore] . identifier[append] ( literal[string] )
keyword[for] identifier[fname] keyword[in] identifier[fnames] :
keyword[if] identifier[utils] . identifier[match_pattern] ( identifier[fname] , identifier[ignore] ):
identifier[self] . identifier[_sync_summary] [ literal[string] ]+= literal[int]
keyword[continue]
identifier[pth] = identifier[os] . identifier[path] . identifier[join] ( identifier[folder_path] , identifier[fname] )
keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[pth] ):
identifier[subprefix] = identifier[fname]
keyword[if] identifier[prefix] :
identifier[subprefix] = identifier[os] . identifier[path] . identifier[join] ( identifier[prefix] , identifier[subprefix] )
identifier[self] . identifier[_sync_folder_to_container] ( identifier[pth] , identifier[container] , identifier[prefix] = identifier[subprefix] ,
identifier[delete] = identifier[delete] , identifier[include_hidden] = identifier[include_hidden] ,
identifier[ignore] = identifier[ignore] , identifier[ignore_timestamps] = identifier[ignore_timestamps] ,
identifier[object_prefix] = identifier[object_prefix] , identifier[verbose] = identifier[verbose] )
keyword[continue]
identifier[self] . identifier[_local_files] . identifier[append] ( identifier[os] . identifier[path] . identifier[join] ( identifier[object_prefix] , identifier[prefix] ,
identifier[fname] ))
identifier[local_etag] = identifier[utils] . identifier[get_checksum] ( identifier[pth] )
keyword[if] identifier[object_prefix] :
identifier[prefix] = identifier[os] . identifier[path] . identifier[join] ( identifier[object_prefix] , identifier[prefix] )
identifier[object_prefix] = literal[string]
identifier[fullname_with_prefix] = identifier[os] . identifier[path] . identifier[join] ( identifier[prefix] , identifier[fname] )
keyword[try] :
identifier[obj] = identifier[self] . identifier[_remote_files] [ identifier[fullname_with_prefix] ]
identifier[obj_etag] = identifier[obj] . identifier[etag]
keyword[except] identifier[KeyError] :
identifier[obj] = keyword[None]
identifier[obj_etag] = keyword[None]
keyword[if] identifier[local_etag] != identifier[obj_etag] :
keyword[if] keyword[not] identifier[ignore_timestamps] :
keyword[if] identifier[obj] :
identifier[obj_time_str] = identifier[obj] . identifier[last_modified] [: literal[int] ]
keyword[else] :
identifier[obj_time_str] = identifier[EARLY_DATE_STR]
identifier[local_mod] = identifier[datetime] . identifier[datetime] . identifier[utcfromtimestamp] (
identifier[os] . identifier[stat] ( identifier[pth] ). identifier[st_mtime] )
identifier[local_mod_str] = identifier[local_mod] . identifier[isoformat] ()
keyword[if] identifier[obj_time_str] >= identifier[local_mod_str] :
identifier[self] . identifier[_sync_summary] [ literal[string] ]+= literal[int]
keyword[if] identifier[verbose] :
identifier[log] . identifier[info] ( literal[string]
literal[string] , identifier[fullname_with_prefix] )
identifier[log] . identifier[info] ( literal[string] %(
identifier[local_mod_str] , identifier[obj_time_str] ))
keyword[continue]
keyword[try] :
identifier[container] . identifier[upload_file] ( identifier[pth] , identifier[obj_name] = identifier[fullname_with_prefix] ,
identifier[etag] = identifier[local_etag] , identifier[return_none] = keyword[True] )
identifier[self] . identifier[_sync_summary] [ literal[string] ]+= literal[int]
keyword[if] identifier[verbose] :
identifier[log] . identifier[info] ( literal[string] , identifier[fullname_with_prefix] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[self] . identifier[_sync_summary] [ literal[string] ]+= literal[int]
identifier[self] . identifier[_sync_summary] [ literal[string] ]. identifier[append] ( literal[string] % identifier[e] )
keyword[if] identifier[verbose] :
identifier[log] . identifier[error] ( literal[string] %
( identifier[fullname_with_prefix] , identifier[e] ))
keyword[else] :
identifier[self] . identifier[_sync_summary] [ literal[string] ]+= literal[int]
keyword[if] identifier[verbose] :
identifier[log] . identifier[info] ( literal[string] ,
identifier[fullname_with_prefix] )
keyword[if] identifier[delete] keyword[and] keyword[not] identifier[prefix] :
identifier[self] . identifier[_delete_objects_not_in_list] ( identifier[container] , identifier[object_prefix] ) | def _sync_folder_to_container(self, folder_path, container, prefix, delete, include_hidden, ignore, ignore_timestamps, object_prefix, verbose):
"""
This is the internal method that is called recursively to handle
nested folder structures.
"""
fnames = os.listdir(folder_path)
ignore = utils.coerce_to_list(ignore)
log = logging.getLogger('pyrax')
if not include_hidden:
ignore.append('.*') # depends on [control=['if'], data=[]]
for fname in fnames:
if utils.match_pattern(fname, ignore):
self._sync_summary['ignored'] += 1
continue # depends on [control=['if'], data=[]]
pth = os.path.join(folder_path, fname)
if os.path.isdir(pth):
subprefix = fname
if prefix:
subprefix = os.path.join(prefix, subprefix) # depends on [control=['if'], data=[]]
self._sync_folder_to_container(pth, container, prefix=subprefix, delete=delete, include_hidden=include_hidden, ignore=ignore, ignore_timestamps=ignore_timestamps, object_prefix=object_prefix, verbose=verbose)
continue # depends on [control=['if'], data=[]]
self._local_files.append(os.path.join(object_prefix, prefix, fname))
local_etag = utils.get_checksum(pth)
if object_prefix:
prefix = os.path.join(object_prefix, prefix)
object_prefix = '' # depends on [control=['if'], data=[]]
fullname_with_prefix = os.path.join(prefix, fname)
try:
obj = self._remote_files[fullname_with_prefix]
obj_etag = obj.etag # depends on [control=['try'], data=[]]
except KeyError:
obj = None
obj_etag = None # depends on [control=['except'], data=[]]
if local_etag != obj_etag:
if not ignore_timestamps:
if obj:
obj_time_str = obj.last_modified[:19] # depends on [control=['if'], data=[]]
else:
obj_time_str = EARLY_DATE_STR
local_mod = datetime.datetime.utcfromtimestamp(os.stat(pth).st_mtime)
local_mod_str = local_mod.isoformat()
if obj_time_str >= local_mod_str:
# Remote object is newer
self._sync_summary['older'] += 1
if verbose:
log.info('%s NOT UPLOADED because remote object is newer', fullname_with_prefix)
log.info(' Local: %s Remote: %s' % (local_mod_str, obj_time_str)) # depends on [control=['if'], data=[]]
continue # depends on [control=['if'], data=['obj_time_str', 'local_mod_str']] # depends on [control=['if'], data=[]]
try:
container.upload_file(pth, obj_name=fullname_with_prefix, etag=local_etag, return_none=True)
self._sync_summary['uploaded'] += 1
if verbose:
log.info('%s UPLOADED', fullname_with_prefix) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except Exception as e:
# Record the failure, and move on
self._sync_summary['failed'] += 1
self._sync_summary['failure_reasons'].append('%s' % e)
if verbose:
log.error('%s UPLOAD FAILED. Exception: %s' % (fullname_with_prefix, e)) # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=['local_etag']]
else:
self._sync_summary['duplicate'] += 1
if verbose:
log.info('%s NOT UPLOADED because it already exists', fullname_with_prefix) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['fname']]
if delete and (not prefix):
self._delete_objects_not_in_list(container, object_prefix) # depends on [control=['if'], data=[]] |
def all_table_names_in_database(self, cache=False,
cache_timeout=None, force=False):
"""Parameters need to be passed as keyword arguments."""
if not self.allow_multi_schema_metadata_fetch:
return []
return self.db_engine_spec.fetch_result_sets(self, 'table') | def function[all_table_names_in_database, parameter[self, cache, cache_timeout, force]]:
constant[Parameters need to be passed as keyword arguments.]
if <ast.UnaryOp object at 0x7da1b2030b20> begin[:]
return[list[[]]]
return[call[name[self].db_engine_spec.fetch_result_sets, parameter[name[self], constant[table]]]] | keyword[def] identifier[all_table_names_in_database] ( identifier[self] , identifier[cache] = keyword[False] ,
identifier[cache_timeout] = keyword[None] , identifier[force] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[allow_multi_schema_metadata_fetch] :
keyword[return] []
keyword[return] identifier[self] . identifier[db_engine_spec] . identifier[fetch_result_sets] ( identifier[self] , literal[string] ) | def all_table_names_in_database(self, cache=False, cache_timeout=None, force=False):
"""Parameters need to be passed as keyword arguments."""
if not self.allow_multi_schema_metadata_fetch:
return [] # depends on [control=['if'], data=[]]
return self.db_engine_spec.fetch_result_sets(self, 'table') |
def main_executable_region_limbos_contain(self, addr):
"""
Sometimes there exists a pointer that points to a few bytes before the beginning of a section, or a few bytes
after the beginning of the section. We take care of that here.
:param int addr: The address to check.
:return: A 2-tuple of (bool, the closest base address)
:rtype: tuple
"""
TOLERANCE = 64
closest_region = None
least_limbo = None
for start, end in self.main_executable_regions:
if start - TOLERANCE <= addr < start:
if least_limbo is None or start - addr < least_limbo:
closest_region = (True, start)
least_limbo = start - addr
if end <= addr < end + TOLERANCE:
if least_limbo is None or addr - end < least_limbo:
closest_region = (True, end)
least_limbo = addr - end
if closest_region is not None:
return closest_region
return (False, None) | def function[main_executable_region_limbos_contain, parameter[self, addr]]:
constant[
Sometimes there exists a pointer that points to a few bytes before the beginning of a section, or a few bytes
after the beginning of the section. We take care of that here.
:param int addr: The address to check.
:return: A 2-tuple of (bool, the closest base address)
:rtype: tuple
]
variable[TOLERANCE] assign[=] constant[64]
variable[closest_region] assign[=] constant[None]
variable[least_limbo] assign[=] constant[None]
for taget[tuple[[<ast.Name object at 0x7da20c992020>, <ast.Name object at 0x7da20c993010>]]] in starred[name[self].main_executable_regions] begin[:]
if compare[binary_operation[name[start] - name[TOLERANCE]] less_or_equal[<=] name[addr]] begin[:]
if <ast.BoolOp object at 0x7da20c991c30> begin[:]
variable[closest_region] assign[=] tuple[[<ast.Constant object at 0x7da20c990370>, <ast.Name object at 0x7da20c993400>]]
variable[least_limbo] assign[=] binary_operation[name[start] - name[addr]]
if compare[name[end] less_or_equal[<=] name[addr]] begin[:]
if <ast.BoolOp object at 0x7da20c990b20> begin[:]
variable[closest_region] assign[=] tuple[[<ast.Constant object at 0x7da20c993580>, <ast.Name object at 0x7da20c990be0>]]
variable[least_limbo] assign[=] binary_operation[name[addr] - name[end]]
if compare[name[closest_region] is_not constant[None]] begin[:]
return[name[closest_region]]
return[tuple[[<ast.Constant object at 0x7da20c990b80>, <ast.Constant object at 0x7da20c992170>]]] | keyword[def] identifier[main_executable_region_limbos_contain] ( identifier[self] , identifier[addr] ):
literal[string]
identifier[TOLERANCE] = literal[int]
identifier[closest_region] = keyword[None]
identifier[least_limbo] = keyword[None]
keyword[for] identifier[start] , identifier[end] keyword[in] identifier[self] . identifier[main_executable_regions] :
keyword[if] identifier[start] - identifier[TOLERANCE] <= identifier[addr] < identifier[start] :
keyword[if] identifier[least_limbo] keyword[is] keyword[None] keyword[or] identifier[start] - identifier[addr] < identifier[least_limbo] :
identifier[closest_region] =( keyword[True] , identifier[start] )
identifier[least_limbo] = identifier[start] - identifier[addr]
keyword[if] identifier[end] <= identifier[addr] < identifier[end] + identifier[TOLERANCE] :
keyword[if] identifier[least_limbo] keyword[is] keyword[None] keyword[or] identifier[addr] - identifier[end] < identifier[least_limbo] :
identifier[closest_region] =( keyword[True] , identifier[end] )
identifier[least_limbo] = identifier[addr] - identifier[end]
keyword[if] identifier[closest_region] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[closest_region]
keyword[return] ( keyword[False] , keyword[None] ) | def main_executable_region_limbos_contain(self, addr):
"""
Sometimes there exists a pointer that points to a few bytes before the beginning of a section, or a few bytes
after the beginning of the section. We take care of that here.
:param int addr: The address to check.
:return: A 2-tuple of (bool, the closest base address)
:rtype: tuple
"""
TOLERANCE = 64
closest_region = None
least_limbo = None
for (start, end) in self.main_executable_regions:
if start - TOLERANCE <= addr < start:
if least_limbo is None or start - addr < least_limbo:
closest_region = (True, start)
least_limbo = start - addr # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['addr']]
if end <= addr < end + TOLERANCE:
if least_limbo is None or addr - end < least_limbo:
closest_region = (True, end)
least_limbo = addr - end # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['end', 'addr']] # depends on [control=['for'], data=[]]
if closest_region is not None:
return closest_region # depends on [control=['if'], data=['closest_region']]
return (False, None) |
def cli(ctx, data, verbose, color, format, editor):
"""Query a meetup database.
"""
ctx.obj['verbose'] = verbose
if verbose:
logging.basicConfig(level=logging.INFO)
logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
ctx.obj['datadir'] = os.path.abspath(data)
if 'db' not in ctx.obj:
ctx.obj['db'] = get_db(data)
if color is None:
ctx.obj['term'] = blessings.Terminal()
elif color is True:
ctx.obj['term'] = blessings.Terminal(force_styling=True)
elif color is False:
ctx.obj['term'] = blessings.Terminal(force_styling=None)
if 'PYVO_TEST_NOW' in os.environ:
# Fake the current date for testing
ctx.obj['now'] = datetime.datetime.strptime(
os.environ['PYVO_TEST_NOW'], '%Y-%m-%d %H:%M:%S')
else:
ctx.obj['now'] = datetime.datetime.now()
ctx.obj['format'] = format
ctx.obj['editor'] = shlex.split(editor) | def function[cli, parameter[ctx, data, verbose, color, format, editor]]:
constant[Query a meetup database.
]
call[name[ctx].obj][constant[verbose]] assign[=] name[verbose]
if name[verbose] begin[:]
call[name[logging].basicConfig, parameter[]]
call[call[name[logging].getLogger, parameter[constant[sqlalchemy.engine]]].setLevel, parameter[name[logging].INFO]]
call[name[ctx].obj][constant[datadir]] assign[=] call[name[os].path.abspath, parameter[name[data]]]
if compare[constant[db] <ast.NotIn object at 0x7da2590d7190> name[ctx].obj] begin[:]
call[name[ctx].obj][constant[db]] assign[=] call[name[get_db], parameter[name[data]]]
if compare[name[color] is constant[None]] begin[:]
call[name[ctx].obj][constant[term]] assign[=] call[name[blessings].Terminal, parameter[]]
if compare[constant[PYVO_TEST_NOW] in name[os].environ] begin[:]
call[name[ctx].obj][constant[now]] assign[=] call[name[datetime].datetime.strptime, parameter[call[name[os].environ][constant[PYVO_TEST_NOW]], constant[%Y-%m-%d %H:%M:%S]]]
call[name[ctx].obj][constant[format]] assign[=] name[format]
call[name[ctx].obj][constant[editor]] assign[=] call[name[shlex].split, parameter[name[editor]]] | keyword[def] identifier[cli] ( identifier[ctx] , identifier[data] , identifier[verbose] , identifier[color] , identifier[format] , identifier[editor] ):
literal[string]
identifier[ctx] . identifier[obj] [ literal[string] ]= identifier[verbose]
keyword[if] identifier[verbose] :
identifier[logging] . identifier[basicConfig] ( identifier[level] = identifier[logging] . identifier[INFO] )
identifier[logging] . identifier[getLogger] ( literal[string] ). identifier[setLevel] ( identifier[logging] . identifier[INFO] )
identifier[ctx] . identifier[obj] [ literal[string] ]= identifier[os] . identifier[path] . identifier[abspath] ( identifier[data] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[ctx] . identifier[obj] :
identifier[ctx] . identifier[obj] [ literal[string] ]= identifier[get_db] ( identifier[data] )
keyword[if] identifier[color] keyword[is] keyword[None] :
identifier[ctx] . identifier[obj] [ literal[string] ]= identifier[blessings] . identifier[Terminal] ()
keyword[elif] identifier[color] keyword[is] keyword[True] :
identifier[ctx] . identifier[obj] [ literal[string] ]= identifier[blessings] . identifier[Terminal] ( identifier[force_styling] = keyword[True] )
keyword[elif] identifier[color] keyword[is] keyword[False] :
identifier[ctx] . identifier[obj] [ literal[string] ]= identifier[blessings] . identifier[Terminal] ( identifier[force_styling] = keyword[None] )
keyword[if] literal[string] keyword[in] identifier[os] . identifier[environ] :
identifier[ctx] . identifier[obj] [ literal[string] ]= identifier[datetime] . identifier[datetime] . identifier[strptime] (
identifier[os] . identifier[environ] [ literal[string] ], literal[string] )
keyword[else] :
identifier[ctx] . identifier[obj] [ literal[string] ]= identifier[datetime] . identifier[datetime] . identifier[now] ()
identifier[ctx] . identifier[obj] [ literal[string] ]= identifier[format]
identifier[ctx] . identifier[obj] [ literal[string] ]= identifier[shlex] . identifier[split] ( identifier[editor] ) | def cli(ctx, data, verbose, color, format, editor):
"""Query a meetup database.
"""
ctx.obj['verbose'] = verbose
if verbose:
logging.basicConfig(level=logging.INFO)
logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO) # depends on [control=['if'], data=[]]
ctx.obj['datadir'] = os.path.abspath(data)
if 'db' not in ctx.obj:
ctx.obj['db'] = get_db(data) # depends on [control=['if'], data=[]]
if color is None:
ctx.obj['term'] = blessings.Terminal() # depends on [control=['if'], data=[]]
elif color is True:
ctx.obj['term'] = blessings.Terminal(force_styling=True) # depends on [control=['if'], data=[]]
elif color is False:
ctx.obj['term'] = blessings.Terminal(force_styling=None) # depends on [control=['if'], data=[]]
if 'PYVO_TEST_NOW' in os.environ:
# Fake the current date for testing
ctx.obj['now'] = datetime.datetime.strptime(os.environ['PYVO_TEST_NOW'], '%Y-%m-%d %H:%M:%S') # depends on [control=['if'], data=[]]
else:
ctx.obj['now'] = datetime.datetime.now()
ctx.obj['format'] = format
ctx.obj['editor'] = shlex.split(editor) |
def run(self):
'''Reads from the channel (pipe) that is the output pipe for a
called Popen. As we are reading from the pipe, the output is added
to a deque. After the size of the deque exceeds the sizelimit
earlier (older) entries are removed.
This means the returned output is chunksize-sensitive, but is not
really byte-sensitive.
'''
try:
while True:
line = self.chan.read(self.chunksize)
if not line:
# Pipe can remain open after output has completed
break
self.deque.append(line)
except (ValueError, IOError):
# pipe has closed, meaning command output is done
pass
self.running = False | def function[run, parameter[self]]:
constant[Reads from the channel (pipe) that is the output pipe for a
called Popen. As we are reading from the pipe, the output is added
to a deque. After the size of the deque exceeds the sizelimit
earlier (older) entries are removed.
This means the returned output is chunksize-sensitive, but is not
really byte-sensitive.
]
<ast.Try object at 0x7da18c4cdb40>
name[self].running assign[=] constant[False] | keyword[def] identifier[run] ( identifier[self] ):
literal[string]
keyword[try] :
keyword[while] keyword[True] :
identifier[line] = identifier[self] . identifier[chan] . identifier[read] ( identifier[self] . identifier[chunksize] )
keyword[if] keyword[not] identifier[line] :
keyword[break]
identifier[self] . identifier[deque] . identifier[append] ( identifier[line] )
keyword[except] ( identifier[ValueError] , identifier[IOError] ):
keyword[pass]
identifier[self] . identifier[running] = keyword[False] | def run(self):
"""Reads from the channel (pipe) that is the output pipe for a
called Popen. As we are reading from the pipe, the output is added
to a deque. After the size of the deque exceeds the sizelimit
earlier (older) entries are removed.
This means the returned output is chunksize-sensitive, but is not
really byte-sensitive.
"""
try:
while True:
line = self.chan.read(self.chunksize)
if not line:
# Pipe can remain open after output has completed
break # depends on [control=['if'], data=[]]
self.deque.append(line) # depends on [control=['while'], data=[]] # depends on [control=['try'], data=[]]
except (ValueError, IOError):
# pipe has closed, meaning command output is done
pass # depends on [control=['except'], data=[]]
self.running = False |
def set_zero_config(self):
"""Set config such that radiative forcing and temperature output will be zero
This method is intended as a convenience only, it does not handle everything in
an obvious way. Adjusting the parameter settings still requires great care and
may behave unepexctedly.
"""
# zero_emissions is imported from scenarios module
zero_emissions.write(join(self.run_dir, self._scen_file_name), self.version)
time = zero_emissions.filter(variable="Emissions|CH4", region="World")[
"time"
].values
no_timesteps = len(time)
# value doesn't actually matter as calculations are done from difference but
# chose sensible value nonetheless
ch4_conc_pi = 722
ch4_conc = ch4_conc_pi * np.ones(no_timesteps)
ch4_conc_df = pd.DataFrame(
{
"time": time,
"scenario": "idealised",
"model": "unspecified",
"climate_model": "unspecified",
"variable": "Atmospheric Concentrations|CH4",
"unit": "ppb",
"todo": "SET",
"region": "World",
"value": ch4_conc,
}
)
ch4_conc_writer = MAGICCData(ch4_conc_df)
ch4_conc_filename = "HIST_CONSTANT_CH4_CONC.IN"
ch4_conc_writer.metadata = {
"header": "Constant pre-industrial CH4 concentrations"
}
ch4_conc_writer.write(join(self.run_dir, ch4_conc_filename), self.version)
fgas_conc_pi = 0
fgas_conc = fgas_conc_pi * np.ones(no_timesteps)
# MAGICC6 doesn't read this so not a problem, for MAGICC7 we might have to
# write each file separately
varname = "FGAS_CONC"
fgas_conc_df = pd.DataFrame(
{
"time": time,
"scenario": "idealised",
"model": "unspecified",
"climate_model": "unspecified",
"variable": varname,
"unit": "ppt",
"todo": "SET",
"region": "World",
"value": fgas_conc,
}
)
fgas_conc_writer = MAGICCData(fgas_conc_df)
fgas_conc_filename = "HIST_ZERO_{}.IN".format(varname)
fgas_conc_writer.metadata = {"header": "Zero concentrations"}
fgas_conc_writer.write(join(self.run_dir, fgas_conc_filename), self.version)
emis_config = self._fix_any_backwards_emissions_scen_key_in_config(
{"file_emissionscenario": self._scen_file_name}
)
self.set_config(
**emis_config,
rf_initialization_method="ZEROSTARTSHIFT",
rf_total_constantafteryr=10000,
file_co2i_emis="",
file_co2b_emis="",
co2_switchfromconc2emis_year=1750,
file_ch4i_emis="",
file_ch4b_emis="",
file_ch4n_emis="",
file_ch4_conc=ch4_conc_filename,
ch4_switchfromconc2emis_year=10000,
file_n2oi_emis="",
file_n2ob_emis="",
file_n2on_emis="",
file_n2o_conc="",
n2o_switchfromconc2emis_year=1750,
file_noxi_emis="",
file_noxb_emis="",
file_noxi_ot="",
file_noxb_ot="",
file_noxt_rf="",
file_soxnb_ot="",
file_soxi_ot="",
file_soxt_rf="",
file_soxi_emis="",
file_soxb_emis="",
file_soxn_emis="",
file_oci_emis="",
file_ocb_emis="",
file_oci_ot="",
file_ocb_ot="",
file_oci_rf="",
file_ocb_rf="",
file_bci_emis="",
file_bcb_emis="",
file_bci_ot="",
file_bcb_ot="",
file_bci_rf="",
file_bcb_rf="",
bcoc_switchfromrf2emis_year=1750,
file_nh3i_emis="",
file_nh3b_emis="",
file_nmvoci_emis="",
file_nmvocb_emis="",
file_coi_emis="",
file_cob_emis="",
file_mineraldust_rf="",
file_landuse_rf="",
file_bcsnow_rf="",
# rf_fgassum_scale=0, # this appears to do nothing, hence the next two lines
file_fgas_conc=[fgas_conc_filename] * 12,
fgas_switchfromconc2emis_year=10000,
rf_mhalosum_scale=0,
mhalo_switch_conc2emis_yr=1750,
stratoz_o3scale=0,
rf_volcanic_scale=0,
rf_solar_scale=0,
) | def function[set_zero_config, parameter[self]]:
constant[Set config such that radiative forcing and temperature output will be zero
This method is intended as a convenience only, it does not handle everything in
an obvious way. Adjusting the parameter settings still requires great care and
may behave unepexctedly.
]
call[name[zero_emissions].write, parameter[call[name[join], parameter[name[self].run_dir, name[self]._scen_file_name]], name[self].version]]
variable[time] assign[=] call[call[name[zero_emissions].filter, parameter[]]][constant[time]].values
variable[no_timesteps] assign[=] call[name[len], parameter[name[time]]]
variable[ch4_conc_pi] assign[=] constant[722]
variable[ch4_conc] assign[=] binary_operation[name[ch4_conc_pi] * call[name[np].ones, parameter[name[no_timesteps]]]]
variable[ch4_conc_df] assign[=] call[name[pd].DataFrame, parameter[dictionary[[<ast.Constant object at 0x7da18f09c160>, <ast.Constant object at 0x7da18f09d030>, <ast.Constant object at 0x7da18f09f7f0>, <ast.Constant object at 0x7da18f09f2e0>, <ast.Constant object at 0x7da18f09dcf0>, <ast.Constant object at 0x7da18f09e050>, <ast.Constant object at 0x7da18f09fd90>, <ast.Constant object at 0x7da18f09d930>, <ast.Constant object at 0x7da18f09da50>], [<ast.Name object at 0x7da18f09e8c0>, <ast.Constant object at 0x7da18f09d6c0>, <ast.Constant object at 0x7da18f09e650>, <ast.Constant object at 0x7da18f09d8a0>, <ast.Constant object at 0x7da18f09ef20>, <ast.Constant object at 0x7da18f09f1f0>, <ast.Constant object at 0x7da18f09ea40>, <ast.Constant object at 0x7da18f09f520>, <ast.Name object at 0x7da18f09ee90>]]]]
variable[ch4_conc_writer] assign[=] call[name[MAGICCData], parameter[name[ch4_conc_df]]]
variable[ch4_conc_filename] assign[=] constant[HIST_CONSTANT_CH4_CONC.IN]
name[ch4_conc_writer].metadata assign[=] dictionary[[<ast.Constant object at 0x7da18f09f190>], [<ast.Constant object at 0x7da18f09d180>]]
call[name[ch4_conc_writer].write, parameter[call[name[join], parameter[name[self].run_dir, name[ch4_conc_filename]]], name[self].version]]
variable[fgas_conc_pi] assign[=] constant[0]
variable[fgas_conc] assign[=] binary_operation[name[fgas_conc_pi] * call[name[np].ones, parameter[name[no_timesteps]]]]
variable[varname] assign[=] constant[FGAS_CONC]
variable[fgas_conc_df] assign[=] call[name[pd].DataFrame, parameter[dictionary[[<ast.Constant object at 0x7da2054a5930>, <ast.Constant object at 0x7da2054a7310>, <ast.Constant object at 0x7da2054a5c90>, <ast.Constant object at 0x7da2054a69b0>, <ast.Constant object at 0x7da2054a7a30>, <ast.Constant object at 0x7da2054a76a0>, <ast.Constant object at 0x7da2054a78b0>, <ast.Constant object at 0x7da2054a6650>, <ast.Constant object at 0x7da2054a4820>], [<ast.Name object at 0x7da2054a6260>, <ast.Constant object at 0x7da2054a7910>, <ast.Constant object at 0x7da2054a4d90>, <ast.Constant object at 0x7da2054a7460>, <ast.Name object at 0x7da2054a4e80>, <ast.Constant object at 0x7da2054a76d0>, <ast.Constant object at 0x7da2054a4280>, <ast.Constant object at 0x7da2054a5ab0>, <ast.Name object at 0x7da2054a4370>]]]]
variable[fgas_conc_writer] assign[=] call[name[MAGICCData], parameter[name[fgas_conc_df]]]
variable[fgas_conc_filename] assign[=] call[constant[HIST_ZERO_{}.IN].format, parameter[name[varname]]]
name[fgas_conc_writer].metadata assign[=] dictionary[[<ast.Constant object at 0x7da2054a6380>], [<ast.Constant object at 0x7da2054a7610>]]
call[name[fgas_conc_writer].write, parameter[call[name[join], parameter[name[self].run_dir, name[fgas_conc_filename]]], name[self].version]]
variable[emis_config] assign[=] call[name[self]._fix_any_backwards_emissions_scen_key_in_config, parameter[dictionary[[<ast.Constant object at 0x7da2054a71c0>], [<ast.Attribute object at 0x7da2054a5db0>]]]]
call[name[self].set_config, parameter[]] | keyword[def] identifier[set_zero_config] ( identifier[self] ):
literal[string]
identifier[zero_emissions] . identifier[write] ( identifier[join] ( identifier[self] . identifier[run_dir] , identifier[self] . identifier[_scen_file_name] ), identifier[self] . identifier[version] )
identifier[time] = identifier[zero_emissions] . identifier[filter] ( identifier[variable] = literal[string] , identifier[region] = literal[string] )[
literal[string]
]. identifier[values]
identifier[no_timesteps] = identifier[len] ( identifier[time] )
identifier[ch4_conc_pi] = literal[int]
identifier[ch4_conc] = identifier[ch4_conc_pi] * identifier[np] . identifier[ones] ( identifier[no_timesteps] )
identifier[ch4_conc_df] = identifier[pd] . identifier[DataFrame] (
{
literal[string] : identifier[time] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : identifier[ch4_conc] ,
}
)
identifier[ch4_conc_writer] = identifier[MAGICCData] ( identifier[ch4_conc_df] )
identifier[ch4_conc_filename] = literal[string]
identifier[ch4_conc_writer] . identifier[metadata] ={
literal[string] : literal[string]
}
identifier[ch4_conc_writer] . identifier[write] ( identifier[join] ( identifier[self] . identifier[run_dir] , identifier[ch4_conc_filename] ), identifier[self] . identifier[version] )
identifier[fgas_conc_pi] = literal[int]
identifier[fgas_conc] = identifier[fgas_conc_pi] * identifier[np] . identifier[ones] ( identifier[no_timesteps] )
identifier[varname] = literal[string]
identifier[fgas_conc_df] = identifier[pd] . identifier[DataFrame] (
{
literal[string] : identifier[time] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : identifier[varname] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : identifier[fgas_conc] ,
}
)
identifier[fgas_conc_writer] = identifier[MAGICCData] ( identifier[fgas_conc_df] )
identifier[fgas_conc_filename] = literal[string] . identifier[format] ( identifier[varname] )
identifier[fgas_conc_writer] . identifier[metadata] ={ literal[string] : literal[string] }
identifier[fgas_conc_writer] . identifier[write] ( identifier[join] ( identifier[self] . identifier[run_dir] , identifier[fgas_conc_filename] ), identifier[self] . identifier[version] )
identifier[emis_config] = identifier[self] . identifier[_fix_any_backwards_emissions_scen_key_in_config] (
{ literal[string] : identifier[self] . identifier[_scen_file_name] }
)
identifier[self] . identifier[set_config] (
** identifier[emis_config] ,
identifier[rf_initialization_method] = literal[string] ,
identifier[rf_total_constantafteryr] = literal[int] ,
identifier[file_co2i_emis] = literal[string] ,
identifier[file_co2b_emis] = literal[string] ,
identifier[co2_switchfromconc2emis_year] = literal[int] ,
identifier[file_ch4i_emis] = literal[string] ,
identifier[file_ch4b_emis] = literal[string] ,
identifier[file_ch4n_emis] = literal[string] ,
identifier[file_ch4_conc] = identifier[ch4_conc_filename] ,
identifier[ch4_switchfromconc2emis_year] = literal[int] ,
identifier[file_n2oi_emis] = literal[string] ,
identifier[file_n2ob_emis] = literal[string] ,
identifier[file_n2on_emis] = literal[string] ,
identifier[file_n2o_conc] = literal[string] ,
identifier[n2o_switchfromconc2emis_year] = literal[int] ,
identifier[file_noxi_emis] = literal[string] ,
identifier[file_noxb_emis] = literal[string] ,
identifier[file_noxi_ot] = literal[string] ,
identifier[file_noxb_ot] = literal[string] ,
identifier[file_noxt_rf] = literal[string] ,
identifier[file_soxnb_ot] = literal[string] ,
identifier[file_soxi_ot] = literal[string] ,
identifier[file_soxt_rf] = literal[string] ,
identifier[file_soxi_emis] = literal[string] ,
identifier[file_soxb_emis] = literal[string] ,
identifier[file_soxn_emis] = literal[string] ,
identifier[file_oci_emis] = literal[string] ,
identifier[file_ocb_emis] = literal[string] ,
identifier[file_oci_ot] = literal[string] ,
identifier[file_ocb_ot] = literal[string] ,
identifier[file_oci_rf] = literal[string] ,
identifier[file_ocb_rf] = literal[string] ,
identifier[file_bci_emis] = literal[string] ,
identifier[file_bcb_emis] = literal[string] ,
identifier[file_bci_ot] = literal[string] ,
identifier[file_bcb_ot] = literal[string] ,
identifier[file_bci_rf] = literal[string] ,
identifier[file_bcb_rf] = literal[string] ,
identifier[bcoc_switchfromrf2emis_year] = literal[int] ,
identifier[file_nh3i_emis] = literal[string] ,
identifier[file_nh3b_emis] = literal[string] ,
identifier[file_nmvoci_emis] = literal[string] ,
identifier[file_nmvocb_emis] = literal[string] ,
identifier[file_coi_emis] = literal[string] ,
identifier[file_cob_emis] = literal[string] ,
identifier[file_mineraldust_rf] = literal[string] ,
identifier[file_landuse_rf] = literal[string] ,
identifier[file_bcsnow_rf] = literal[string] ,
identifier[file_fgas_conc] =[ identifier[fgas_conc_filename] ]* literal[int] ,
identifier[fgas_switchfromconc2emis_year] = literal[int] ,
identifier[rf_mhalosum_scale] = literal[int] ,
identifier[mhalo_switch_conc2emis_yr] = literal[int] ,
identifier[stratoz_o3scale] = literal[int] ,
identifier[rf_volcanic_scale] = literal[int] ,
identifier[rf_solar_scale] = literal[int] ,
) | def set_zero_config(self):
"""Set config such that radiative forcing and temperature output will be zero
This method is intended as a convenience only, it does not handle everything in
an obvious way. Adjusting the parameter settings still requires great care and
may behave unepexctedly.
"""
# zero_emissions is imported from scenarios module
zero_emissions.write(join(self.run_dir, self._scen_file_name), self.version)
time = zero_emissions.filter(variable='Emissions|CH4', region='World')['time'].values
no_timesteps = len(time)
# value doesn't actually matter as calculations are done from difference but
# chose sensible value nonetheless
ch4_conc_pi = 722
ch4_conc = ch4_conc_pi * np.ones(no_timesteps)
ch4_conc_df = pd.DataFrame({'time': time, 'scenario': 'idealised', 'model': 'unspecified', 'climate_model': 'unspecified', 'variable': 'Atmospheric Concentrations|CH4', 'unit': 'ppb', 'todo': 'SET', 'region': 'World', 'value': ch4_conc})
ch4_conc_writer = MAGICCData(ch4_conc_df)
ch4_conc_filename = 'HIST_CONSTANT_CH4_CONC.IN'
ch4_conc_writer.metadata = {'header': 'Constant pre-industrial CH4 concentrations'}
ch4_conc_writer.write(join(self.run_dir, ch4_conc_filename), self.version)
fgas_conc_pi = 0
fgas_conc = fgas_conc_pi * np.ones(no_timesteps)
# MAGICC6 doesn't read this so not a problem, for MAGICC7 we might have to
# write each file separately
varname = 'FGAS_CONC'
fgas_conc_df = pd.DataFrame({'time': time, 'scenario': 'idealised', 'model': 'unspecified', 'climate_model': 'unspecified', 'variable': varname, 'unit': 'ppt', 'todo': 'SET', 'region': 'World', 'value': fgas_conc})
fgas_conc_writer = MAGICCData(fgas_conc_df)
fgas_conc_filename = 'HIST_ZERO_{}.IN'.format(varname)
fgas_conc_writer.metadata = {'header': 'Zero concentrations'}
fgas_conc_writer.write(join(self.run_dir, fgas_conc_filename), self.version)
emis_config = self._fix_any_backwards_emissions_scen_key_in_config({'file_emissionscenario': self._scen_file_name})
# rf_fgassum_scale=0, # this appears to do nothing, hence the next two lines
self.set_config(**emis_config, rf_initialization_method='ZEROSTARTSHIFT', rf_total_constantafteryr=10000, file_co2i_emis='', file_co2b_emis='', co2_switchfromconc2emis_year=1750, file_ch4i_emis='', file_ch4b_emis='', file_ch4n_emis='', file_ch4_conc=ch4_conc_filename, ch4_switchfromconc2emis_year=10000, file_n2oi_emis='', file_n2ob_emis='', file_n2on_emis='', file_n2o_conc='', n2o_switchfromconc2emis_year=1750, file_noxi_emis='', file_noxb_emis='', file_noxi_ot='', file_noxb_ot='', file_noxt_rf='', file_soxnb_ot='', file_soxi_ot='', file_soxt_rf='', file_soxi_emis='', file_soxb_emis='', file_soxn_emis='', file_oci_emis='', file_ocb_emis='', file_oci_ot='', file_ocb_ot='', file_oci_rf='', file_ocb_rf='', file_bci_emis='', file_bcb_emis='', file_bci_ot='', file_bcb_ot='', file_bci_rf='', file_bcb_rf='', bcoc_switchfromrf2emis_year=1750, file_nh3i_emis='', file_nh3b_emis='', file_nmvoci_emis='', file_nmvocb_emis='', file_coi_emis='', file_cob_emis='', file_mineraldust_rf='', file_landuse_rf='', file_bcsnow_rf='', file_fgas_conc=[fgas_conc_filename] * 12, fgas_switchfromconc2emis_year=10000, rf_mhalosum_scale=0, mhalo_switch_conc2emis_yr=1750, stratoz_o3scale=0, rf_volcanic_scale=0, rf_solar_scale=0) |
def onCall(self, n): #pylint: disable=invalid-name
"""
Adds a condition for when the stub is called. When the condition is met, a special
return value can be returned. Adds the specified call number into the condition
list.
For example, when the stub function is called the second time, it will return "#":
stub.onCall(1).returns("#")
Without returns/throws at the end of the chain of functions, nothing will happen.
For example, in this case, although 2 is in the condition list, nothing will happen:
stub.onCall(2)
Args:
n: integer, the call # for which we want a special return value.
The first call has an index of 0.
Return:
a SinonStub object (able to be chained)
"""
cond_oncall = n + 1
return _SinonStubCondition(copy=self._copy, oncall=cond_oncall, cond_args=self._cond_args, cond_kwargs=self._cond_kwargs) | def function[onCall, parameter[self, n]]:
constant[
Adds a condition for when the stub is called. When the condition is met, a special
return value can be returned. Adds the specified call number into the condition
list.
For example, when the stub function is called the second time, it will return "#":
stub.onCall(1).returns("#")
Without returns/throws at the end of the chain of functions, nothing will happen.
For example, in this case, although 2 is in the condition list, nothing will happen:
stub.onCall(2)
Args:
n: integer, the call # for which we want a special return value.
The first call has an index of 0.
Return:
a SinonStub object (able to be chained)
]
variable[cond_oncall] assign[=] binary_operation[name[n] + constant[1]]
return[call[name[_SinonStubCondition], parameter[]]] | keyword[def] identifier[onCall] ( identifier[self] , identifier[n] ):
literal[string]
identifier[cond_oncall] = identifier[n] + literal[int]
keyword[return] identifier[_SinonStubCondition] ( identifier[copy] = identifier[self] . identifier[_copy] , identifier[oncall] = identifier[cond_oncall] , identifier[cond_args] = identifier[self] . identifier[_cond_args] , identifier[cond_kwargs] = identifier[self] . identifier[_cond_kwargs] ) | def onCall(self, n): #pylint: disable=invalid-name
'\n Adds a condition for when the stub is called. When the condition is met, a special\n return value can be returned. Adds the specified call number into the condition\n list.\n\n For example, when the stub function is called the second time, it will return "#":\n stub.onCall(1).returns("#")\n\n Without returns/throws at the end of the chain of functions, nothing will happen.\n For example, in this case, although 2 is in the condition list, nothing will happen:\n stub.onCall(2)\n\n Args:\n n: integer, the call # for which we want a special return value.\n The first call has an index of 0.\n\n Return:\n a SinonStub object (able to be chained)\n '
cond_oncall = n + 1
return _SinonStubCondition(copy=self._copy, oncall=cond_oncall, cond_args=self._cond_args, cond_kwargs=self._cond_kwargs) |
def public_dsn(dsn):
'''Transform a standard Sentry DSN into a public one'''
m = RE_DSN.match(dsn)
if not m:
log.error('Unable to parse Sentry DSN')
public = '{scheme}://{client_id}@{domain}/{site_id}'.format(
**m.groupdict())
return public | def function[public_dsn, parameter[dsn]]:
constant[Transform a standard Sentry DSN into a public one]
variable[m] assign[=] call[name[RE_DSN].match, parameter[name[dsn]]]
if <ast.UnaryOp object at 0x7da18f09d7e0> begin[:]
call[name[log].error, parameter[constant[Unable to parse Sentry DSN]]]
variable[public] assign[=] call[constant[{scheme}://{client_id}@{domain}/{site_id}].format, parameter[]]
return[name[public]] | keyword[def] identifier[public_dsn] ( identifier[dsn] ):
literal[string]
identifier[m] = identifier[RE_DSN] . identifier[match] ( identifier[dsn] )
keyword[if] keyword[not] identifier[m] :
identifier[log] . identifier[error] ( literal[string] )
identifier[public] = literal[string] . identifier[format] (
** identifier[m] . identifier[groupdict] ())
keyword[return] identifier[public] | def public_dsn(dsn):
"""Transform a standard Sentry DSN into a public one"""
m = RE_DSN.match(dsn)
if not m:
log.error('Unable to parse Sentry DSN') # depends on [control=['if'], data=[]]
public = '{scheme}://{client_id}@{domain}/{site_id}'.format(**m.groupdict())
return public |
def download(self, field):
"""Download a file.
:param field: file field to download
:type field: string
:rtype: a file handle
"""
if not field.startswith('output'):
raise ValueError("Only processor results (output.* fields) can be downloaded")
if field not in self.annotation:
raise ValueError("Download field {} does not exist".format(field))
ann = self.annotation[field]
if ann['type'] != 'basic:file:':
raise ValueError("Only basic:file: field can be downloaded")
return next(self.gencloud.download([self.id], field)) | def function[download, parameter[self, field]]:
constant[Download a file.
:param field: file field to download
:type field: string
:rtype: a file handle
]
if <ast.UnaryOp object at 0x7da1b26ac550> begin[:]
<ast.Raise object at 0x7da1b26ad0c0>
if compare[name[field] <ast.NotIn object at 0x7da2590d7190> name[self].annotation] begin[:]
<ast.Raise object at 0x7da1b26ae170>
variable[ann] assign[=] call[name[self].annotation][name[field]]
if compare[call[name[ann]][constant[type]] not_equal[!=] constant[basic:file:]] begin[:]
<ast.Raise object at 0x7da1b26ac6d0>
return[call[name[next], parameter[call[name[self].gencloud.download, parameter[list[[<ast.Attribute object at 0x7da1b26ad420>]], name[field]]]]]] | keyword[def] identifier[download] ( identifier[self] , identifier[field] ):
literal[string]
keyword[if] keyword[not] identifier[field] . identifier[startswith] ( literal[string] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[field] keyword[not] keyword[in] identifier[self] . identifier[annotation] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[field] ))
identifier[ann] = identifier[self] . identifier[annotation] [ identifier[field] ]
keyword[if] identifier[ann] [ literal[string] ]!= literal[string] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] identifier[next] ( identifier[self] . identifier[gencloud] . identifier[download] ([ identifier[self] . identifier[id] ], identifier[field] )) | def download(self, field):
"""Download a file.
:param field: file field to download
:type field: string
:rtype: a file handle
"""
if not field.startswith('output'):
raise ValueError('Only processor results (output.* fields) can be downloaded') # depends on [control=['if'], data=[]]
if field not in self.annotation:
raise ValueError('Download field {} does not exist'.format(field)) # depends on [control=['if'], data=['field']]
ann = self.annotation[field]
if ann['type'] != 'basic:file:':
raise ValueError('Only basic:file: field can be downloaded') # depends on [control=['if'], data=[]]
return next(self.gencloud.download([self.id], field)) |
def _refresh_controller_id(self):
"""Determine the Kafka cluster controller."""
version = self._matching_api_version(MetadataRequest)
if 1 <= version <= 6:
request = MetadataRequest[version]()
response = self._send_request_to_node(self._client.least_loaded_node(), request)
controller_id = response.controller_id
# verify the controller is new enough to support our requests
controller_version = self._client.check_version(controller_id)
if controller_version < (0, 10, 0):
raise IncompatibleBrokerVersion(
"The controller appears to be running Kafka {}. KafkaAdminClient requires brokers >= 0.10.0.0."
.format(controller_version))
self._controller_id = controller_id
else:
raise UnrecognizedBrokerVersion(
"Kafka Admin interface cannot determine the controller using MetadataRequest_v{}."
.format(version)) | def function[_refresh_controller_id, parameter[self]]:
constant[Determine the Kafka cluster controller.]
variable[version] assign[=] call[name[self]._matching_api_version, parameter[name[MetadataRequest]]]
if compare[constant[1] less_or_equal[<=] name[version]] begin[:]
variable[request] assign[=] call[call[name[MetadataRequest]][name[version]], parameter[]]
variable[response] assign[=] call[name[self]._send_request_to_node, parameter[call[name[self]._client.least_loaded_node, parameter[]], name[request]]]
variable[controller_id] assign[=] name[response].controller_id
variable[controller_version] assign[=] call[name[self]._client.check_version, parameter[name[controller_id]]]
if compare[name[controller_version] less[<] tuple[[<ast.Constant object at 0x7da1b1f3aef0>, <ast.Constant object at 0x7da1b1f3b7f0>, <ast.Constant object at 0x7da1b1f3bc40>]]] begin[:]
<ast.Raise object at 0x7da1b1f3b010>
name[self]._controller_id assign[=] name[controller_id] | keyword[def] identifier[_refresh_controller_id] ( identifier[self] ):
literal[string]
identifier[version] = identifier[self] . identifier[_matching_api_version] ( identifier[MetadataRequest] )
keyword[if] literal[int] <= identifier[version] <= literal[int] :
identifier[request] = identifier[MetadataRequest] [ identifier[version] ]()
identifier[response] = identifier[self] . identifier[_send_request_to_node] ( identifier[self] . identifier[_client] . identifier[least_loaded_node] (), identifier[request] )
identifier[controller_id] = identifier[response] . identifier[controller_id]
identifier[controller_version] = identifier[self] . identifier[_client] . identifier[check_version] ( identifier[controller_id] )
keyword[if] identifier[controller_version] <( literal[int] , literal[int] , literal[int] ):
keyword[raise] identifier[IncompatibleBrokerVersion] (
literal[string]
. identifier[format] ( identifier[controller_version] ))
identifier[self] . identifier[_controller_id] = identifier[controller_id]
keyword[else] :
keyword[raise] identifier[UnrecognizedBrokerVersion] (
literal[string]
. identifier[format] ( identifier[version] )) | def _refresh_controller_id(self):
"""Determine the Kafka cluster controller."""
version = self._matching_api_version(MetadataRequest)
if 1 <= version <= 6:
request = MetadataRequest[version]()
response = self._send_request_to_node(self._client.least_loaded_node(), request)
controller_id = response.controller_id
# verify the controller is new enough to support our requests
controller_version = self._client.check_version(controller_id)
if controller_version < (0, 10, 0):
raise IncompatibleBrokerVersion('The controller appears to be running Kafka {}. KafkaAdminClient requires brokers >= 0.10.0.0.'.format(controller_version)) # depends on [control=['if'], data=['controller_version']]
self._controller_id = controller_id # depends on [control=['if'], data=['version']]
else:
raise UnrecognizedBrokerVersion('Kafka Admin interface cannot determine the controller using MetadataRequest_v{}.'.format(version)) |
def auth_finish(self, _unused):
"""Handle success of the legacy authentication."""
self.lock.acquire()
try:
self.__logger.debug("Authenticated")
self.authenticated=True
self.state_change("authorized",self.my_jid)
self._post_auth()
finally:
self.lock.release() | def function[auth_finish, parameter[self, _unused]]:
constant[Handle success of the legacy authentication.]
call[name[self].lock.acquire, parameter[]]
<ast.Try object at 0x7da2046226e0> | keyword[def] identifier[auth_finish] ( identifier[self] , identifier[_unused] ):
literal[string]
identifier[self] . identifier[lock] . identifier[acquire] ()
keyword[try] :
identifier[self] . identifier[__logger] . identifier[debug] ( literal[string] )
identifier[self] . identifier[authenticated] = keyword[True]
identifier[self] . identifier[state_change] ( literal[string] , identifier[self] . identifier[my_jid] )
identifier[self] . identifier[_post_auth] ()
keyword[finally] :
identifier[self] . identifier[lock] . identifier[release] () | def auth_finish(self, _unused):
"""Handle success of the legacy authentication."""
self.lock.acquire()
try:
self.__logger.debug('Authenticated')
self.authenticated = True
self.state_change('authorized', self.my_jid)
self._post_auth() # depends on [control=['try'], data=[]]
finally:
self.lock.release() |
def validate_client_id(self, client_id, request, *args, **kwargs):
"""Ensure client_id belong to a valid and active client."""
log.debug('Validate client %r', client_id)
client = request.client or self._clientgetter(client_id)
if client:
# attach client to request object
request.client = client
return True
return False | def function[validate_client_id, parameter[self, client_id, request]]:
constant[Ensure client_id belong to a valid and active client.]
call[name[log].debug, parameter[constant[Validate client %r], name[client_id]]]
variable[client] assign[=] <ast.BoolOp object at 0x7da1b020dc30>
if name[client] begin[:]
name[request].client assign[=] name[client]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[validate_client_id] ( identifier[self] , identifier[client_id] , identifier[request] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[log] . identifier[debug] ( literal[string] , identifier[client_id] )
identifier[client] = identifier[request] . identifier[client] keyword[or] identifier[self] . identifier[_clientgetter] ( identifier[client_id] )
keyword[if] identifier[client] :
identifier[request] . identifier[client] = identifier[client]
keyword[return] keyword[True]
keyword[return] keyword[False] | def validate_client_id(self, client_id, request, *args, **kwargs):
"""Ensure client_id belong to a valid and active client."""
log.debug('Validate client %r', client_id)
client = request.client or self._clientgetter(client_id)
if client:
# attach client to request object
request.client = client
return True # depends on [control=['if'], data=[]]
return False |
def _queue_response_channel(self, obj):
"""Generate the feedback channel name from the object's id.
:param obj: The Channels message object.
"""
return '{}.{}'.format(state.MANAGER_EXECUTOR_CHANNELS.queue_response, obj[ExecutorProtocol.DATA_ID]) | def function[_queue_response_channel, parameter[self, obj]]:
constant[Generate the feedback channel name from the object's id.
:param obj: The Channels message object.
]
return[call[constant[{}.{}].format, parameter[name[state].MANAGER_EXECUTOR_CHANNELS.queue_response, call[name[obj]][name[ExecutorProtocol].DATA_ID]]]] | keyword[def] identifier[_queue_response_channel] ( identifier[self] , identifier[obj] ):
literal[string]
keyword[return] literal[string] . identifier[format] ( identifier[state] . identifier[MANAGER_EXECUTOR_CHANNELS] . identifier[queue_response] , identifier[obj] [ identifier[ExecutorProtocol] . identifier[DATA_ID] ]) | def _queue_response_channel(self, obj):
"""Generate the feedback channel name from the object's id.
:param obj: The Channels message object.
"""
return '{}.{}'.format(state.MANAGER_EXECUTOR_CHANNELS.queue_response, obj[ExecutorProtocol.DATA_ID]) |
def qualified_name(self):
"""Retrieves the fully qualified command name.
This is the full parent name with the command name as well.
For example, in ``?one two three`` the qualified name would be
``one two three``.
"""
parent = self.full_parent_name
if parent:
return parent + ' ' + self.name
else:
return self.name | def function[qualified_name, parameter[self]]:
constant[Retrieves the fully qualified command name.
This is the full parent name with the command name as well.
For example, in ``?one two three`` the qualified name would be
``one two three``.
]
variable[parent] assign[=] name[self].full_parent_name
if name[parent] begin[:]
return[binary_operation[binary_operation[name[parent] + constant[ ]] + name[self].name]] | keyword[def] identifier[qualified_name] ( identifier[self] ):
literal[string]
identifier[parent] = identifier[self] . identifier[full_parent_name]
keyword[if] identifier[parent] :
keyword[return] identifier[parent] + literal[string] + identifier[self] . identifier[name]
keyword[else] :
keyword[return] identifier[self] . identifier[name] | def qualified_name(self):
"""Retrieves the fully qualified command name.
This is the full parent name with the command name as well.
For example, in ``?one two three`` the qualified name would be
``one two three``.
"""
parent = self.full_parent_name
if parent:
return parent + ' ' + self.name # depends on [control=['if'], data=[]]
else:
return self.name |
def rmse(self, relative_to='AME2003'):
"""Calculate root mean squared error
Parameters
----------
relative_to : string,
a valid mass table name.
Example:
----------
>>> template = '{0:10}|{1:^6.2f}|{2:^6.2f}|{3:^6.2f}'
>>> print 'Model ', 'AME95 ', 'AME03 ', 'AME12 ' # Table header
... for name in Table.names:
... print template.format(name, Table(name).rmse(relative_to='AME1995'),
... Table(name).rmse(relative_to='AME2003'),
... Table(name).rmse(relative_to='AME2012'))
Model AME95 AME03 AME12
AME2003 | 0.13 | 0.00 | 0.13
AME2003all| 0.42 | 0.40 | 0.71
AME2012 | 0.16 | 0.13 | 0.00
AME2012all| 0.43 | 0.43 | 0.69
AME1995 | 0.00 | 0.13 | 0.16
AME1995all| 0.00 | 0.17 | 0.21
DUZU | 0.52 | 0.52 | 0.76
FRDM95 | 0.79 | 0.78 | 0.95
KTUY05 | 0.78 | 0.77 | 1.03
ETFSI12 | 0.84 | 0.84 | 1.04
HFB14 | 0.84 | 0.83 | 1.02
"""
error = self.error(relative_to=relative_to)
return math.sqrt((error.df ** 2).mean()) | def function[rmse, parameter[self, relative_to]]:
constant[Calculate root mean squared error
Parameters
----------
relative_to : string,
a valid mass table name.
Example:
----------
>>> template = '{0:10}|{1:^6.2f}|{2:^6.2f}|{3:^6.2f}'
>>> print 'Model ', 'AME95 ', 'AME03 ', 'AME12 ' # Table header
... for name in Table.names:
... print template.format(name, Table(name).rmse(relative_to='AME1995'),
... Table(name).rmse(relative_to='AME2003'),
... Table(name).rmse(relative_to='AME2012'))
Model AME95 AME03 AME12
AME2003 | 0.13 | 0.00 | 0.13
AME2003all| 0.42 | 0.40 | 0.71
AME2012 | 0.16 | 0.13 | 0.00
AME2012all| 0.43 | 0.43 | 0.69
AME1995 | 0.00 | 0.13 | 0.16
AME1995all| 0.00 | 0.17 | 0.21
DUZU | 0.52 | 0.52 | 0.76
FRDM95 | 0.79 | 0.78 | 0.95
KTUY05 | 0.78 | 0.77 | 1.03
ETFSI12 | 0.84 | 0.84 | 1.04
HFB14 | 0.84 | 0.83 | 1.02
]
variable[error] assign[=] call[name[self].error, parameter[]]
return[call[name[math].sqrt, parameter[call[binary_operation[name[error].df ** constant[2]].mean, parameter[]]]]] | keyword[def] identifier[rmse] ( identifier[self] , identifier[relative_to] = literal[string] ):
literal[string]
identifier[error] = identifier[self] . identifier[error] ( identifier[relative_to] = identifier[relative_to] )
keyword[return] identifier[math] . identifier[sqrt] (( identifier[error] . identifier[df] ** literal[int] ). identifier[mean] ()) | def rmse(self, relative_to='AME2003'):
"""Calculate root mean squared error
Parameters
----------
relative_to : string,
a valid mass table name.
Example:
----------
>>> template = '{0:10}|{1:^6.2f}|{2:^6.2f}|{3:^6.2f}'
>>> print 'Model ', 'AME95 ', 'AME03 ', 'AME12 ' # Table header
... for name in Table.names:
... print template.format(name, Table(name).rmse(relative_to='AME1995'),
... Table(name).rmse(relative_to='AME2003'),
... Table(name).rmse(relative_to='AME2012'))
Model AME95 AME03 AME12
AME2003 | 0.13 | 0.00 | 0.13
AME2003all| 0.42 | 0.40 | 0.71
AME2012 | 0.16 | 0.13 | 0.00
AME2012all| 0.43 | 0.43 | 0.69
AME1995 | 0.00 | 0.13 | 0.16
AME1995all| 0.00 | 0.17 | 0.21
DUZU | 0.52 | 0.52 | 0.76
FRDM95 | 0.79 | 0.78 | 0.95
KTUY05 | 0.78 | 0.77 | 1.03
ETFSI12 | 0.84 | 0.84 | 1.04
HFB14 | 0.84 | 0.83 | 1.02
"""
error = self.error(relative_to=relative_to)
return math.sqrt((error.df ** 2).mean()) |
def match(self, s=''):
"""return all options that match, in the name or the description,
with string `s`, case is disregarded.
Example: ``cma.CMAOptions().match('verb')`` returns the verbosity
options.
"""
match = s.lower()
res = {}
for k in sorted(self):
s = str(k) + '=\'' + str(self[k]) + '\''
if match in s.lower():
res[k] = self[k]
return CMAOptions(res, unchecked=True) | def function[match, parameter[self, s]]:
constant[return all options that match, in the name or the description,
with string `s`, case is disregarded.
Example: ``cma.CMAOptions().match('verb')`` returns the verbosity
options.
]
variable[match] assign[=] call[name[s].lower, parameter[]]
variable[res] assign[=] dictionary[[], []]
for taget[name[k]] in starred[call[name[sorted], parameter[name[self]]]] begin[:]
variable[s] assign[=] binary_operation[binary_operation[binary_operation[call[name[str], parameter[name[k]]] + constant[=']] + call[name[str], parameter[call[name[self]][name[k]]]]] + constant[']]
if compare[name[match] in call[name[s].lower, parameter[]]] begin[:]
call[name[res]][name[k]] assign[=] call[name[self]][name[k]]
return[call[name[CMAOptions], parameter[name[res]]]] | keyword[def] identifier[match] ( identifier[self] , identifier[s] = literal[string] ):
literal[string]
identifier[match] = identifier[s] . identifier[lower] ()
identifier[res] ={}
keyword[for] identifier[k] keyword[in] identifier[sorted] ( identifier[self] ):
identifier[s] = identifier[str] ( identifier[k] )+ literal[string] + identifier[str] ( identifier[self] [ identifier[k] ])+ literal[string]
keyword[if] identifier[match] keyword[in] identifier[s] . identifier[lower] ():
identifier[res] [ identifier[k] ]= identifier[self] [ identifier[k] ]
keyword[return] identifier[CMAOptions] ( identifier[res] , identifier[unchecked] = keyword[True] ) | def match(self, s=''):
"""return all options that match, in the name or the description,
with string `s`, case is disregarded.
Example: ``cma.CMAOptions().match('verb')`` returns the verbosity
options.
"""
match = s.lower()
res = {}
for k in sorted(self):
s = str(k) + "='" + str(self[k]) + "'"
if match in s.lower():
res[k] = self[k] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['k']]
return CMAOptions(res, unchecked=True) |
def log_entries(self, time_zone='UTC', is_overview=False,
include=None, fetch_all=True):
"""Query for log entries on an incident instance."""
endpoint = '/'.join((self.endpoint, self.id, 'log_entries'))
query_params = {
'time_zone': time_zone,
'is_overview': json.dumps(is_overview),
}
if include:
query_params['include'] = include
result = self.logEntryFactory.find(
endpoint=endpoint,
api_key=self.api_key,
fetch_all=fetch_all,
**query_params
)
return result | def function[log_entries, parameter[self, time_zone, is_overview, include, fetch_all]]:
constant[Query for log entries on an incident instance.]
variable[endpoint] assign[=] call[constant[/].join, parameter[tuple[[<ast.Attribute object at 0x7da1b06fefe0>, <ast.Attribute object at 0x7da1b06ffa00>, <ast.Constant object at 0x7da1b06ff610>]]]]
variable[query_params] assign[=] dictionary[[<ast.Constant object at 0x7da1b06fc850>, <ast.Constant object at 0x7da1b06ff250>], [<ast.Name object at 0x7da1b06ffd30>, <ast.Call object at 0x7da1b06fcdf0>]]
if name[include] begin[:]
call[name[query_params]][constant[include]] assign[=] name[include]
variable[result] assign[=] call[name[self].logEntryFactory.find, parameter[]]
return[name[result]] | keyword[def] identifier[log_entries] ( identifier[self] , identifier[time_zone] = literal[string] , identifier[is_overview] = keyword[False] ,
identifier[include] = keyword[None] , identifier[fetch_all] = keyword[True] ):
literal[string]
identifier[endpoint] = literal[string] . identifier[join] (( identifier[self] . identifier[endpoint] , identifier[self] . identifier[id] , literal[string] ))
identifier[query_params] ={
literal[string] : identifier[time_zone] ,
literal[string] : identifier[json] . identifier[dumps] ( identifier[is_overview] ),
}
keyword[if] identifier[include] :
identifier[query_params] [ literal[string] ]= identifier[include]
identifier[result] = identifier[self] . identifier[logEntryFactory] . identifier[find] (
identifier[endpoint] = identifier[endpoint] ,
identifier[api_key] = identifier[self] . identifier[api_key] ,
identifier[fetch_all] = identifier[fetch_all] ,
** identifier[query_params]
)
keyword[return] identifier[result] | def log_entries(self, time_zone='UTC', is_overview=False, include=None, fetch_all=True):
"""Query for log entries on an incident instance."""
endpoint = '/'.join((self.endpoint, self.id, 'log_entries'))
query_params = {'time_zone': time_zone, 'is_overview': json.dumps(is_overview)}
if include:
query_params['include'] = include # depends on [control=['if'], data=[]]
result = self.logEntryFactory.find(endpoint=endpoint, api_key=self.api_key, fetch_all=fetch_all, **query_params)
return result |
def on_batch_begin(self, train, **kwargs:Any)->None:
"Record learning rate and momentum at beginning of batch."
if train:
self.lrs.append(self.opt.lr)
self.moms.append(self.opt.mom) | def function[on_batch_begin, parameter[self, train]]:
constant[Record learning rate and momentum at beginning of batch.]
if name[train] begin[:]
call[name[self].lrs.append, parameter[name[self].opt.lr]]
call[name[self].moms.append, parameter[name[self].opt.mom]] | keyword[def] identifier[on_batch_begin] ( identifier[self] , identifier[train] ,** identifier[kwargs] : identifier[Any] )-> keyword[None] :
literal[string]
keyword[if] identifier[train] :
identifier[self] . identifier[lrs] . identifier[append] ( identifier[self] . identifier[opt] . identifier[lr] )
identifier[self] . identifier[moms] . identifier[append] ( identifier[self] . identifier[opt] . identifier[mom] ) | def on_batch_begin(self, train, **kwargs: Any) -> None:
"""Record learning rate and momentum at beginning of batch."""
if train:
self.lrs.append(self.opt.lr)
self.moms.append(self.opt.mom) # depends on [control=['if'], data=[]] |
def remove_class(self, ioclass):
"""Remove VNXIOClass instance from policy."""
current_ioclasses = self.ioclasses
new_ioclasses = filter(lambda x: x.name != ioclass.name,
current_ioclasses)
self.modify(new_ioclasses=new_ioclasses) | def function[remove_class, parameter[self, ioclass]]:
constant[Remove VNXIOClass instance from policy.]
variable[current_ioclasses] assign[=] name[self].ioclasses
variable[new_ioclasses] assign[=] call[name[filter], parameter[<ast.Lambda object at 0x7da1b1150400>, name[current_ioclasses]]]
call[name[self].modify, parameter[]] | keyword[def] identifier[remove_class] ( identifier[self] , identifier[ioclass] ):
literal[string]
identifier[current_ioclasses] = identifier[self] . identifier[ioclasses]
identifier[new_ioclasses] = identifier[filter] ( keyword[lambda] identifier[x] : identifier[x] . identifier[name] != identifier[ioclass] . identifier[name] ,
identifier[current_ioclasses] )
identifier[self] . identifier[modify] ( identifier[new_ioclasses] = identifier[new_ioclasses] ) | def remove_class(self, ioclass):
"""Remove VNXIOClass instance from policy."""
current_ioclasses = self.ioclasses
new_ioclasses = filter(lambda x: x.name != ioclass.name, current_ioclasses)
self.modify(new_ioclasses=new_ioclasses) |
def _controller(self):
"""Return the server controller."""
def server_controller(cmd_id, cmd_body, _):
"""Server controler."""
if not self.init_logginig:
# the reason put the codes here is because we cannot get
# kvstore.rank earlier
head = '%(asctime)-15s Server[' + str(
self.kvstore.rank) + '] %(message)s'
logging.basicConfig(level=logging.DEBUG, format=head)
self.init_logginig = True
if cmd_id == 0:
try:
optimizer = pickle.loads(cmd_body)
except:
raise
self.kvstore.set_optimizer(optimizer)
else:
print("server %d, unknown command (%d, %s)" % (
self.kvstore.rank, cmd_id, cmd_body))
return server_controller | def function[_controller, parameter[self]]:
constant[Return the server controller.]
def function[server_controller, parameter[cmd_id, cmd_body, _]]:
constant[Server controler.]
if <ast.UnaryOp object at 0x7da2049603d0> begin[:]
variable[head] assign[=] binary_operation[binary_operation[constant[%(asctime)-15s Server[] + call[name[str], parameter[name[self].kvstore.rank]]] + constant[] %(message)s]]
call[name[logging].basicConfig, parameter[]]
name[self].init_logginig assign[=] constant[True]
if compare[name[cmd_id] equal[==] constant[0]] begin[:]
<ast.Try object at 0x7da1b1e064a0>
call[name[self].kvstore.set_optimizer, parameter[name[optimizer]]]
return[name[server_controller]] | keyword[def] identifier[_controller] ( identifier[self] ):
literal[string]
keyword[def] identifier[server_controller] ( identifier[cmd_id] , identifier[cmd_body] , identifier[_] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[init_logginig] :
identifier[head] = literal[string] + identifier[str] (
identifier[self] . identifier[kvstore] . identifier[rank] )+ literal[string]
identifier[logging] . identifier[basicConfig] ( identifier[level] = identifier[logging] . identifier[DEBUG] , identifier[format] = identifier[head] )
identifier[self] . identifier[init_logginig] = keyword[True]
keyword[if] identifier[cmd_id] == literal[int] :
keyword[try] :
identifier[optimizer] = identifier[pickle] . identifier[loads] ( identifier[cmd_body] )
keyword[except] :
keyword[raise]
identifier[self] . identifier[kvstore] . identifier[set_optimizer] ( identifier[optimizer] )
keyword[else] :
identifier[print] ( literal[string] %(
identifier[self] . identifier[kvstore] . identifier[rank] , identifier[cmd_id] , identifier[cmd_body] ))
keyword[return] identifier[server_controller] | def _controller(self):
"""Return the server controller."""
def server_controller(cmd_id, cmd_body, _):
"""Server controler."""
if not self.init_logginig:
# the reason put the codes here is because we cannot get
# kvstore.rank earlier
head = '%(asctime)-15s Server[' + str(self.kvstore.rank) + '] %(message)s'
logging.basicConfig(level=logging.DEBUG, format=head)
self.init_logginig = True # depends on [control=['if'], data=[]]
if cmd_id == 0:
try:
optimizer = pickle.loads(cmd_body) # depends on [control=['try'], data=[]]
except:
raise # depends on [control=['except'], data=[]]
self.kvstore.set_optimizer(optimizer) # depends on [control=['if'], data=[]]
else:
print('server %d, unknown command (%d, %s)' % (self.kvstore.rank, cmd_id, cmd_body))
return server_controller |
def last_name(anon, obj, field, val):
"""
Returns a random second name
"""
return anon.faker.last_name(field=field) | def function[last_name, parameter[anon, obj, field, val]]:
constant[
Returns a random second name
]
return[call[name[anon].faker.last_name, parameter[]]] | keyword[def] identifier[last_name] ( identifier[anon] , identifier[obj] , identifier[field] , identifier[val] ):
literal[string]
keyword[return] identifier[anon] . identifier[faker] . identifier[last_name] ( identifier[field] = identifier[field] ) | def last_name(anon, obj, field, val):
"""
Returns a random second name
"""
return anon.faker.last_name(field=field) |
def boxed_text_to_image_block(tag):
"covert boxed-text to an image block containing an inline-graphic"
tag_block = OrderedDict()
image_content = body_block_image_content(first(raw_parser.inline_graphic(tag)))
tag_block["type"] = "image"
set_if_value(tag_block, "doi", doi_uri_to_doi(object_id_doi(tag, tag.name)))
set_if_value(tag_block, "id", tag.get("id"))
set_if_value(tag_block, "image", image_content)
# render paragraphs into a caption
p_tags = raw_parser.paragraph(tag)
caption_content = []
for p_tag in p_tags:
if not raw_parser.inline_graphic(p_tag):
caption_content.append(body_block_content(p_tag))
set_if_value(tag_block, "caption", caption_content)
return tag_block | def function[boxed_text_to_image_block, parameter[tag]]:
constant[covert boxed-text to an image block containing an inline-graphic]
variable[tag_block] assign[=] call[name[OrderedDict], parameter[]]
variable[image_content] assign[=] call[name[body_block_image_content], parameter[call[name[first], parameter[call[name[raw_parser].inline_graphic, parameter[name[tag]]]]]]]
call[name[tag_block]][constant[type]] assign[=] constant[image]
call[name[set_if_value], parameter[name[tag_block], constant[doi], call[name[doi_uri_to_doi], parameter[call[name[object_id_doi], parameter[name[tag], name[tag].name]]]]]]
call[name[set_if_value], parameter[name[tag_block], constant[id], call[name[tag].get, parameter[constant[id]]]]]
call[name[set_if_value], parameter[name[tag_block], constant[image], name[image_content]]]
variable[p_tags] assign[=] call[name[raw_parser].paragraph, parameter[name[tag]]]
variable[caption_content] assign[=] list[[]]
for taget[name[p_tag]] in starred[name[p_tags]] begin[:]
if <ast.UnaryOp object at 0x7da1b1130d60> begin[:]
call[name[caption_content].append, parameter[call[name[body_block_content], parameter[name[p_tag]]]]]
call[name[set_if_value], parameter[name[tag_block], constant[caption], name[caption_content]]]
return[name[tag_block]] | keyword[def] identifier[boxed_text_to_image_block] ( identifier[tag] ):
literal[string]
identifier[tag_block] = identifier[OrderedDict] ()
identifier[image_content] = identifier[body_block_image_content] ( identifier[first] ( identifier[raw_parser] . identifier[inline_graphic] ( identifier[tag] )))
identifier[tag_block] [ literal[string] ]= literal[string]
identifier[set_if_value] ( identifier[tag_block] , literal[string] , identifier[doi_uri_to_doi] ( identifier[object_id_doi] ( identifier[tag] , identifier[tag] . identifier[name] )))
identifier[set_if_value] ( identifier[tag_block] , literal[string] , identifier[tag] . identifier[get] ( literal[string] ))
identifier[set_if_value] ( identifier[tag_block] , literal[string] , identifier[image_content] )
identifier[p_tags] = identifier[raw_parser] . identifier[paragraph] ( identifier[tag] )
identifier[caption_content] =[]
keyword[for] identifier[p_tag] keyword[in] identifier[p_tags] :
keyword[if] keyword[not] identifier[raw_parser] . identifier[inline_graphic] ( identifier[p_tag] ):
identifier[caption_content] . identifier[append] ( identifier[body_block_content] ( identifier[p_tag] ))
identifier[set_if_value] ( identifier[tag_block] , literal[string] , identifier[caption_content] )
keyword[return] identifier[tag_block] | def boxed_text_to_image_block(tag):
"""covert boxed-text to an image block containing an inline-graphic"""
tag_block = OrderedDict()
image_content = body_block_image_content(first(raw_parser.inline_graphic(tag)))
tag_block['type'] = 'image'
set_if_value(tag_block, 'doi', doi_uri_to_doi(object_id_doi(tag, tag.name)))
set_if_value(tag_block, 'id', tag.get('id'))
set_if_value(tag_block, 'image', image_content)
# render paragraphs into a caption
p_tags = raw_parser.paragraph(tag)
caption_content = []
for p_tag in p_tags:
if not raw_parser.inline_graphic(p_tag):
caption_content.append(body_block_content(p_tag)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['p_tag']]
set_if_value(tag_block, 'caption', caption_content)
return tag_block |
def blur(self, image, geometry, options):
"""
Wrapper for ``_blur``
"""
if options.get('blur'):
return self._blur(image, int(options.get('blur')))
return image | def function[blur, parameter[self, image, geometry, options]]:
constant[
Wrapper for ``_blur``
]
if call[name[options].get, parameter[constant[blur]]] begin[:]
return[call[name[self]._blur, parameter[name[image], call[name[int], parameter[call[name[options].get, parameter[constant[blur]]]]]]]]
return[name[image]] | keyword[def] identifier[blur] ( identifier[self] , identifier[image] , identifier[geometry] , identifier[options] ):
literal[string]
keyword[if] identifier[options] . identifier[get] ( literal[string] ):
keyword[return] identifier[self] . identifier[_blur] ( identifier[image] , identifier[int] ( identifier[options] . identifier[get] ( literal[string] )))
keyword[return] identifier[image] | def blur(self, image, geometry, options):
"""
Wrapper for ``_blur``
"""
if options.get('blur'):
return self._blur(image, int(options.get('blur'))) # depends on [control=['if'], data=[]]
return image |
def _forward_kernel(self, F, inputs, states, **kwargs):
""" forward using CUDNN or CPU kenrel"""
if self._layout == 'NTC':
inputs = F.swapaxes(inputs, dim1=0, dim2=1)
if self._projection_size is None:
params = (kwargs['{}{}_{}_{}'.format(d, l, g, t)].reshape(-1)
for t in ['weight', 'bias']
for l in range(self._num_layers)
for d in ['l', 'r'][:self._dir]
for g in ['i2h', 'h2h'])
else:
params = (kwargs['{}{}_{}_{}'.format(d, l, g, t)].reshape(-1)
for t in ['weight', 'bias']
for l in range(self._num_layers)
for d in ['l', 'r'][:self._dir]
for g in ['i2h', 'h2h', 'h2r']
if g != 'h2r' or t != 'bias')
params = F._internal._rnn_param_concat(*params, dim=0)
rnn = F.RNN(inputs, params, *states, state_size=self._hidden_size,
projection_size=self._projection_size,
num_layers=self._num_layers, bidirectional=self._dir == 2,
p=self._dropout, state_outputs=True, mode=self._mode,
lstm_state_clip_min=self._lstm_state_clip_min,
lstm_state_clip_max=self._lstm_state_clip_max,
lstm_state_clip_nan=self._lstm_state_clip_nan)
if self._mode == 'lstm':
outputs, states = rnn[0], [rnn[1], rnn[2]]
else:
outputs, states = rnn[0], [rnn[1]]
if self._layout == 'NTC':
outputs = F.swapaxes(outputs, dim1=0, dim2=1)
return outputs, states | def function[_forward_kernel, parameter[self, F, inputs, states]]:
constant[ forward using CUDNN or CPU kenrel]
if compare[name[self]._layout equal[==] constant[NTC]] begin[:]
variable[inputs] assign[=] call[name[F].swapaxes, parameter[name[inputs]]]
if compare[name[self]._projection_size is constant[None]] begin[:]
variable[params] assign[=] <ast.GeneratorExp object at 0x7da1b208ae00>
variable[params] assign[=] call[name[F]._internal._rnn_param_concat, parameter[<ast.Starred object at 0x7da1b204f220>]]
variable[rnn] assign[=] call[name[F].RNN, parameter[name[inputs], name[params], <ast.Starred object at 0x7da1b204ece0>]]
if compare[name[self]._mode equal[==] constant[lstm]] begin[:]
<ast.Tuple object at 0x7da1b204f280> assign[=] tuple[[<ast.Subscript object at 0x7da1b204c880>, <ast.List object at 0x7da1b1ef17e0>]]
if compare[name[self]._layout equal[==] constant[NTC]] begin[:]
variable[outputs] assign[=] call[name[F].swapaxes, parameter[name[outputs]]]
return[tuple[[<ast.Name object at 0x7da1b204da50>, <ast.Name object at 0x7da1b204e380>]]] | keyword[def] identifier[_forward_kernel] ( identifier[self] , identifier[F] , identifier[inputs] , identifier[states] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[self] . identifier[_layout] == literal[string] :
identifier[inputs] = identifier[F] . identifier[swapaxes] ( identifier[inputs] , identifier[dim1] = literal[int] , identifier[dim2] = literal[int] )
keyword[if] identifier[self] . identifier[_projection_size] keyword[is] keyword[None] :
identifier[params] =( identifier[kwargs] [ literal[string] . identifier[format] ( identifier[d] , identifier[l] , identifier[g] , identifier[t] )]. identifier[reshape] (- literal[int] )
keyword[for] identifier[t] keyword[in] [ literal[string] , literal[string] ]
keyword[for] identifier[l] keyword[in] identifier[range] ( identifier[self] . identifier[_num_layers] )
keyword[for] identifier[d] keyword[in] [ literal[string] , literal[string] ][: identifier[self] . identifier[_dir] ]
keyword[for] identifier[g] keyword[in] [ literal[string] , literal[string] ])
keyword[else] :
identifier[params] =( identifier[kwargs] [ literal[string] . identifier[format] ( identifier[d] , identifier[l] , identifier[g] , identifier[t] )]. identifier[reshape] (- literal[int] )
keyword[for] identifier[t] keyword[in] [ literal[string] , literal[string] ]
keyword[for] identifier[l] keyword[in] identifier[range] ( identifier[self] . identifier[_num_layers] )
keyword[for] identifier[d] keyword[in] [ literal[string] , literal[string] ][: identifier[self] . identifier[_dir] ]
keyword[for] identifier[g] keyword[in] [ literal[string] , literal[string] , literal[string] ]
keyword[if] identifier[g] != literal[string] keyword[or] identifier[t] != literal[string] )
identifier[params] = identifier[F] . identifier[_internal] . identifier[_rnn_param_concat] (* identifier[params] , identifier[dim] = literal[int] )
identifier[rnn] = identifier[F] . identifier[RNN] ( identifier[inputs] , identifier[params] ,* identifier[states] , identifier[state_size] = identifier[self] . identifier[_hidden_size] ,
identifier[projection_size] = identifier[self] . identifier[_projection_size] ,
identifier[num_layers] = identifier[self] . identifier[_num_layers] , identifier[bidirectional] = identifier[self] . identifier[_dir] == literal[int] ,
identifier[p] = identifier[self] . identifier[_dropout] , identifier[state_outputs] = keyword[True] , identifier[mode] = identifier[self] . identifier[_mode] ,
identifier[lstm_state_clip_min] = identifier[self] . identifier[_lstm_state_clip_min] ,
identifier[lstm_state_clip_max] = identifier[self] . identifier[_lstm_state_clip_max] ,
identifier[lstm_state_clip_nan] = identifier[self] . identifier[_lstm_state_clip_nan] )
keyword[if] identifier[self] . identifier[_mode] == literal[string] :
identifier[outputs] , identifier[states] = identifier[rnn] [ literal[int] ],[ identifier[rnn] [ literal[int] ], identifier[rnn] [ literal[int] ]]
keyword[else] :
identifier[outputs] , identifier[states] = identifier[rnn] [ literal[int] ],[ identifier[rnn] [ literal[int] ]]
keyword[if] identifier[self] . identifier[_layout] == literal[string] :
identifier[outputs] = identifier[F] . identifier[swapaxes] ( identifier[outputs] , identifier[dim1] = literal[int] , identifier[dim2] = literal[int] )
keyword[return] identifier[outputs] , identifier[states] | def _forward_kernel(self, F, inputs, states, **kwargs):
""" forward using CUDNN or CPU kenrel"""
if self._layout == 'NTC':
inputs = F.swapaxes(inputs, dim1=0, dim2=1) # depends on [control=['if'], data=[]]
if self._projection_size is None:
params = (kwargs['{}{}_{}_{}'.format(d, l, g, t)].reshape(-1) for t in ['weight', 'bias'] for l in range(self._num_layers) for d in ['l', 'r'][:self._dir] for g in ['i2h', 'h2h']) # depends on [control=['if'], data=[]]
else:
params = (kwargs['{}{}_{}_{}'.format(d, l, g, t)].reshape(-1) for t in ['weight', 'bias'] for l in range(self._num_layers) for d in ['l', 'r'][:self._dir] for g in ['i2h', 'h2h', 'h2r'] if g != 'h2r' or t != 'bias')
params = F._internal._rnn_param_concat(*params, dim=0)
rnn = F.RNN(inputs, params, *states, state_size=self._hidden_size, projection_size=self._projection_size, num_layers=self._num_layers, bidirectional=self._dir == 2, p=self._dropout, state_outputs=True, mode=self._mode, lstm_state_clip_min=self._lstm_state_clip_min, lstm_state_clip_max=self._lstm_state_clip_max, lstm_state_clip_nan=self._lstm_state_clip_nan)
if self._mode == 'lstm':
(outputs, states) = (rnn[0], [rnn[1], rnn[2]]) # depends on [control=['if'], data=[]]
else:
(outputs, states) = (rnn[0], [rnn[1]])
if self._layout == 'NTC':
outputs = F.swapaxes(outputs, dim1=0, dim2=1) # depends on [control=['if'], data=[]]
return (outputs, states) |
def vcsmode_vcs_mode(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
vcsmode = ET.SubElement(config, "vcsmode", xmlns="urn:brocade.com:mgmt:brocade-vcs")
vcs_mode = ET.SubElement(vcsmode, "vcs-mode")
vcs_mode.text = kwargs.pop('vcs_mode')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[vcsmode_vcs_mode, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[vcsmode] assign[=] call[name[ET].SubElement, parameter[name[config], constant[vcsmode]]]
variable[vcs_mode] assign[=] call[name[ET].SubElement, parameter[name[vcsmode], constant[vcs-mode]]]
name[vcs_mode].text assign[=] call[name[kwargs].pop, parameter[constant[vcs_mode]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[vcsmode_vcs_mode] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[vcsmode] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[vcs_mode] = identifier[ET] . identifier[SubElement] ( identifier[vcsmode] , literal[string] )
identifier[vcs_mode] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def vcsmode_vcs_mode(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
vcsmode = ET.SubElement(config, 'vcsmode', xmlns='urn:brocade.com:mgmt:brocade-vcs')
vcs_mode = ET.SubElement(vcsmode, 'vcs-mode')
vcs_mode.text = kwargs.pop('vcs_mode')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def extend_selection():
"""Checks is the selection is to be extended
The selection is to be extended, if a special modifier key (typically <Ctrl>) is being pressed.
:return: If to extend the selection
:rtype: True
"""
from rafcon.gui.singleton import main_window_controller
currently_pressed_keys = main_window_controller.currently_pressed_keys if main_window_controller else set()
if any(key in currently_pressed_keys for key in [constants.EXTEND_SELECTION_KEY,
constants.EXTEND_SELECTION_KEY_ALT]):
return True
return False | def function[extend_selection, parameter[]]:
constant[Checks is the selection is to be extended
The selection is to be extended, if a special modifier key (typically <Ctrl>) is being pressed.
:return: If to extend the selection
:rtype: True
]
from relative_module[rafcon.gui.singleton] import module[main_window_controller]
variable[currently_pressed_keys] assign[=] <ast.IfExp object at 0x7da18ede6890>
if call[name[any], parameter[<ast.GeneratorExp object at 0x7da18ede46a0>]] begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[extend_selection] ():
literal[string]
keyword[from] identifier[rafcon] . identifier[gui] . identifier[singleton] keyword[import] identifier[main_window_controller]
identifier[currently_pressed_keys] = identifier[main_window_controller] . identifier[currently_pressed_keys] keyword[if] identifier[main_window_controller] keyword[else] identifier[set] ()
keyword[if] identifier[any] ( identifier[key] keyword[in] identifier[currently_pressed_keys] keyword[for] identifier[key] keyword[in] [ identifier[constants] . identifier[EXTEND_SELECTION_KEY] ,
identifier[constants] . identifier[EXTEND_SELECTION_KEY_ALT] ]):
keyword[return] keyword[True]
keyword[return] keyword[False] | def extend_selection():
"""Checks is the selection is to be extended
The selection is to be extended, if a special modifier key (typically <Ctrl>) is being pressed.
:return: If to extend the selection
:rtype: True
"""
from rafcon.gui.singleton import main_window_controller
currently_pressed_keys = main_window_controller.currently_pressed_keys if main_window_controller else set()
if any((key in currently_pressed_keys for key in [constants.EXTEND_SELECTION_KEY, constants.EXTEND_SELECTION_KEY_ALT])):
return True # depends on [control=['if'], data=[]]
return False |
def change_color(self, selections, value):
'''Change the color of each atom by a certain value. *value*
should be a tuple.
'''
if 'atoms' in selections:
atms = selections['atoms'].mask
if value is None:
#self.radii_state.array[atms] = [vdw_radii.get(t) * 0.3 for t in self.system.type_array[atms]]
self.atom_colors.array[atms, 0:3] = [self.color_scheme.get(t, colors.deep_pink)[0:3]
for t in self.system.type_array[atms]]
else:
self.atom_colors.array[atms, 0:3] = value[0:3]
self.atom_renderer.update_colors(self.atom_colors.array)
self.on_atom_colors_changed() | def function[change_color, parameter[self, selections, value]]:
constant[Change the color of each atom by a certain value. *value*
should be a tuple.
]
if compare[constant[atoms] in name[selections]] begin[:]
variable[atms] assign[=] call[name[selections]][constant[atoms]].mask
if compare[name[value] is constant[None]] begin[:]
call[name[self].atom_colors.array][tuple[[<ast.Name object at 0x7da18f8104f0>, <ast.Slice object at 0x7da18f813b80>]]] assign[=] <ast.ListComp object at 0x7da18f813d90>
call[name[self].atom_renderer.update_colors, parameter[name[self].atom_colors.array]]
call[name[self].on_atom_colors_changed, parameter[]] | keyword[def] identifier[change_color] ( identifier[self] , identifier[selections] , identifier[value] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[selections] :
identifier[atms] = identifier[selections] [ literal[string] ]. identifier[mask]
keyword[if] identifier[value] keyword[is] keyword[None] :
identifier[self] . identifier[atom_colors] . identifier[array] [ identifier[atms] , literal[int] : literal[int] ]=[ identifier[self] . identifier[color_scheme] . identifier[get] ( identifier[t] , identifier[colors] . identifier[deep_pink] )[ literal[int] : literal[int] ]
keyword[for] identifier[t] keyword[in] identifier[self] . identifier[system] . identifier[type_array] [ identifier[atms] ]]
keyword[else] :
identifier[self] . identifier[atom_colors] . identifier[array] [ identifier[atms] , literal[int] : literal[int] ]= identifier[value] [ literal[int] : literal[int] ]
identifier[self] . identifier[atom_renderer] . identifier[update_colors] ( identifier[self] . identifier[atom_colors] . identifier[array] )
identifier[self] . identifier[on_atom_colors_changed] () | def change_color(self, selections, value):
"""Change the color of each atom by a certain value. *value*
should be a tuple.
"""
if 'atoms' in selections:
atms = selections['atoms'].mask
if value is None:
#self.radii_state.array[atms] = [vdw_radii.get(t) * 0.3 for t in self.system.type_array[atms]]
self.atom_colors.array[atms, 0:3] = [self.color_scheme.get(t, colors.deep_pink)[0:3] for t in self.system.type_array[atms]] # depends on [control=['if'], data=[]]
else:
self.atom_colors.array[atms, 0:3] = value[0:3] # depends on [control=['if'], data=['selections']]
self.atom_renderer.update_colors(self.atom_colors.array)
self.on_atom_colors_changed() |
def insert_knot(obj, param, num, **kwargs):
""" Inserts knots n-times to a spline geometry.
The following code snippet illustrates the usage of this function:
.. code-block:: python
# Insert knot u=0.5 to a curve 2 times
operations.insert_knot(curve, [0.5], [2])
# Insert knot v=0.25 to a surface 1 time
operations.insert_knot(surface, [None, 0.25], [0, 1])
# Insert knots u=0.75, v=0.25 to a surface 2 and 1 times, respectively
operations.insert_knot(surface, [0.75, 0.25], [2, 1])
# Insert knot w=0.5 to a volume 1 time
operations.insert_knot(volume, [None, None, 0.5], [0, 0, 1])
Please note that input spline geometry object will always be updated if the knot insertion operation is successful.
Keyword Arguments:
* ``check_num``: enables/disables operation validity checks. *Default: True*
:param obj: spline geometry
:type obj: abstract.SplineGeometry
:param param: knot(s) to be inserted in [u, v, w] format
:type param: list, tuple
:param num: number of knot insertions in [num_u, num_v, num_w] format
:type num: list, tuple
:return: updated spline geometry
"""
# Get keyword arguments
check_num = kwargs.get('check_num', True) # can be set to False when the caller checks number of insertions
if check_num:
# Check the validity of number of insertions
if not isinstance(num, (list, tuple)):
raise GeomdlException("The number of insertions must be a list or a tuple",
data=dict(num=num))
if len(num) != obj.pdimension:
raise GeomdlException("The length of the num array must be equal to the number of parametric dimensions",
data=dict(pdim=obj.pdimension, num_len=len(num)))
for idx, val in enumerate(num):
if val < 0:
raise GeomdlException('Number of insertions must be a positive integer value',
data=dict(idx=idx, num=val))
# Start curve knot insertion
if isinstance(obj, abstract.Curve):
if param[0] is not None and num[0] > 0:
# Find knot multiplicity
s = helpers.find_multiplicity(param[0], obj.knotvector)
# Check if it is possible add that many number of knots
if check_num and num[0] > obj.degree - s:
raise GeomdlException("Knot " + str(param[0]) + " cannot be inserted " + str(num[0]) + " times",
data=dict(knot=param[0], num=num[0], multiplicity=s))
# Find knot span
span = helpers.find_span_linear(obj.degree, obj.knotvector, obj.ctrlpts_size, param[0])
# Compute new knot vector
kv_new = helpers.knot_insertion_kv(obj.knotvector, param[0], span, num[0])
# Compute new control points
cpts = obj.ctrlptsw if obj.rational else obj.ctrlpts
cpts_tmp = helpers.knot_insertion(obj.degree, obj.knotvector, cpts, param[0],
num=num[0], s=s, span=span)
# Update curve
obj.set_ctrlpts(cpts_tmp)
obj.knotvector = kv_new
# Start surface knot insertion
if isinstance(obj, abstract.Surface):
# u-direction
if param[0] is not None and num[0] > 0:
# Find knot multiplicity
s_u = helpers.find_multiplicity(param[0], obj.knotvector_u)
# Check if it is possible add that many number of knots
if check_num and num[0] > obj.degree_u - s_u:
raise GeomdlException("Knot " + str(param[0]) + " cannot be inserted " + str(num[0]) + " times (u-dir)",
data=dict(knot=param[0], num=num[0], multiplicity=s_u))
# Find knot span
span_u = helpers.find_span_linear(obj.degree_u, obj.knotvector_u, obj.ctrlpts_size_u, param[0])
# Compute new knot vector
kv_u = helpers.knot_insertion_kv(obj.knotvector_u, param[0], span_u, num[0])
# Get curves
cpts_tmp = []
cpts = obj.ctrlptsw if obj.rational else obj.ctrlpts
for v in range(obj.ctrlpts_size_v):
ccu = [cpts[v + (obj.ctrlpts_size_v * u)] for u in range(obj.ctrlpts_size_u)]
ctrlpts_tmp = helpers.knot_insertion(obj.degree_u, obj.knotvector_u, ccu, param[0],
num=num[0], s=s_u, span=span_u)
cpts_tmp += ctrlpts_tmp
# Update the surface after knot insertion
obj.set_ctrlpts(compatibility.flip_ctrlpts_u(cpts_tmp, obj.ctrlpts_size_u + num[0], obj.ctrlpts_size_v),
obj.ctrlpts_size_u + num[0], obj.ctrlpts_size_v)
obj.knotvector_u = kv_u
# v-direction
if param[1] is not None and num[1] > 0:
# Find knot multiplicity
s_v = helpers.find_multiplicity(param[1], obj.knotvector_v)
# Check if it is possible add that many number of knots
if check_num and num[1] > obj.degree_v - s_v:
raise GeomdlException("Knot " + str(param[1]) + " cannot be inserted " + str(num[1]) + " times (v-dir)",
data=dict(knot=param[1], num=num[1], multiplicity=s_v))
# Find knot span
span_v = helpers.find_span_linear(obj.degree_v, obj.knotvector_v, obj.ctrlpts_size_v, param[1])
# Compute new knot vector
kv_v = helpers.knot_insertion_kv(obj.knotvector_v, param[1], span_v, num[1])
# Get curves
cpts_tmp = []
cpts = obj.ctrlptsw if obj.rational else obj.ctrlpts
for u in range(obj.ctrlpts_size_u):
ccv = [cpts[v + (obj.ctrlpts_size_v * u)] for v in range(obj.ctrlpts_size_v)]
ctrlpts_tmp = helpers.knot_insertion(obj.degree_v, obj.knotvector_v, ccv, param[1],
num=num[1], s=s_v, span=span_v)
cpts_tmp += ctrlpts_tmp
# Update the surface after knot insertion
obj.set_ctrlpts(cpts_tmp, obj.ctrlpts_size_u, obj.ctrlpts_size_v + num[1])
obj.knotvector_v = kv_v
# Start volume knot insertion
if isinstance(obj, abstract.Volume):
# u-direction
if param[0] is not None and num[0] > 0:
# Find knot multiplicity
s_u = helpers.find_multiplicity(param[0], obj.knotvector_u)
# Check if it is possible add that many number of knots
if check_num and num[0] > obj.degree_u - s_u:
raise GeomdlException("Knot " + str(param[0]) + " cannot be inserted " + str(num[0]) + " times (u-dir)",
data=dict(knot=param[0], num=num[0], multiplicity=s_u))
# Find knot span
span_u = helpers.find_span_linear(obj.degree_u, obj.knotvector_u, obj.ctrlpts_size_u, param[0])
# Compute new knot vector
kv_u = helpers.knot_insertion_kv(obj.knotvector_u, param[0], span_u, num[0])
# Use Pw if rational
cpts = obj.ctrlptsw if obj.rational else obj.ctrlpts
# Construct 2-dimensional structure
cpt2d = []
for u in range(obj.ctrlpts_size_u):
temp_surf = []
for w in range(obj.ctrlpts_size_w):
for v in range(obj.ctrlpts_size_v):
temp_pt = cpts[v + (u * obj.ctrlpts_size_v) + (w * obj.ctrlpts_size_u * obj.ctrlpts_size_v)]
temp_surf.append(temp_pt)
cpt2d.append(temp_surf)
# Compute new control points
ctrlpts_tmp = helpers.knot_insertion(obj.degree_u, obj.knotvector_u, cpt2d, param[0],
num=num[0], s=s_u, span=span_u)
# Flatten to 1-dimensional structure
ctrlpts_new = []
for w in range(obj.ctrlpts_size_w):
for u in range(obj.ctrlpts_size_u + num[0]):
for v in range(obj.ctrlpts_size_v):
temp_pt = ctrlpts_tmp[u][v + (w * obj.ctrlpts_size_v)]
ctrlpts_new.append(temp_pt)
# Update the volume after knot insertion
obj.set_ctrlpts(ctrlpts_new, obj.ctrlpts_size_u + num[0], obj.ctrlpts_size_v, obj.ctrlpts_size_w)
obj.knotvector_u = kv_u
# v-direction
if param[1] is not None and num[1] > 0:
# Find knot multiplicity
s_v = helpers.find_multiplicity(param[1], obj.knotvector_v)
# Check if it is possible add that many number of knots
if check_num and num[1] > obj.degree_v - s_v:
raise GeomdlException("Knot " + str(param[1]) + " cannot be inserted " + str(num[1]) + " times (v-dir)",
data=dict(knot=param[1], num=num[1], multiplicity=s_v))
# Find knot span
span_v = helpers.find_span_linear(obj.degree_v, obj.knotvector_v, obj.ctrlpts_size_v, param[1])
# Compute new knot vector
kv_v = helpers.knot_insertion_kv(obj.knotvector_v, param[1], span_v, num[1])
# Use Pw if rational
cpts = obj.ctrlptsw if obj.rational else obj.ctrlpts
# Construct 2-dimensional structure
cpt2d = []
for v in range(obj.ctrlpts_size_v):
temp_surf = []
for w in range(obj.ctrlpts_size_w):
for u in range(obj.ctrlpts_size_u):
temp_pt = cpts[v + (u * obj.ctrlpts_size_v) + (w * obj.ctrlpts_size_u * obj.ctrlpts_size_v)]
temp_surf.append(temp_pt)
cpt2d.append(temp_surf)
# Compute new control points
ctrlpts_tmp = helpers.knot_insertion(obj.degree_v, obj.knotvector_v, cpt2d, param[1],
num=num[1], s=s_v, span=span_v)
# Flatten to 1-dimensional structure
ctrlpts_new = []
for w in range(obj.ctrlpts_size_w):
for u in range(obj.ctrlpts_size_u):
for v in range(obj.ctrlpts_size_v + num[1]):
temp_pt = ctrlpts_tmp[v][u + (w * obj.ctrlpts_size_u)]
ctrlpts_new.append(temp_pt)
# Update the volume after knot insertion
obj.set_ctrlpts(ctrlpts_new, obj.ctrlpts_size_u, obj.ctrlpts_size_v + num[1], obj.ctrlpts_size_w)
obj.knotvector_v = kv_v
# w-direction
if param[2] is not None and num[2] > 0:
# Find knot multiplicity
s_w = helpers.find_multiplicity(param[2], obj.knotvector_w)
# Check if it is possible add that many number of knots
if check_num and num[2] > obj.degree_w - s_w:
raise GeomdlException("Knot " + str(param[2]) + " cannot be inserted " + str(num[2]) + " times (w-dir)",
data=dict(knot=param[2], num=num[2], multiplicity=s_w))
# Find knot span
span_w = helpers.find_span_linear(obj.degree_w, obj.knotvector_w, obj.ctrlpts_size_w, param[2])
# Compute new knot vector
kv_w = helpers.knot_insertion_kv(obj.knotvector_w, param[2], span_w, num[2])
# Use Pw if rational
cpts = obj.ctrlptsw if obj.rational else obj.ctrlpts
# Construct 2-dimensional structure
cpt2d = []
for w in range(obj.ctrlpts_size_w):
temp_surf = [cpts[uv + (w * obj.ctrlpts_size_u * obj.ctrlpts_size_v)] for uv in
range(obj.ctrlpts_size_u * obj.ctrlpts_size_v)]
cpt2d.append(temp_surf)
# Compute new control points
ctrlpts_tmp = helpers.knot_insertion(obj.degree_w, obj.knotvector_w, cpt2d, param[2],
num=num[2], s=s_w, span=span_w)
# Flatten to 1-dimensional structure
ctrlpts_new = []
for w in range(obj.ctrlpts_size_w + num[2]):
ctrlpts_new += ctrlpts_tmp[w]
# Update the volume after knot insertion
obj.set_ctrlpts(ctrlpts_new, obj.ctrlpts_size_u, obj.ctrlpts_size_v, obj.ctrlpts_size_w + num[2])
obj.knotvector_w = kv_w
# Return updated spline geometry
return obj | def function[insert_knot, parameter[obj, param, num]]:
constant[ Inserts knots n-times to a spline geometry.
The following code snippet illustrates the usage of this function:
.. code-block:: python
# Insert knot u=0.5 to a curve 2 times
operations.insert_knot(curve, [0.5], [2])
# Insert knot v=0.25 to a surface 1 time
operations.insert_knot(surface, [None, 0.25], [0, 1])
# Insert knots u=0.75, v=0.25 to a surface 2 and 1 times, respectively
operations.insert_knot(surface, [0.75, 0.25], [2, 1])
# Insert knot w=0.5 to a volume 1 time
operations.insert_knot(volume, [None, None, 0.5], [0, 0, 1])
Please note that input spline geometry object will always be updated if the knot insertion operation is successful.
Keyword Arguments:
* ``check_num``: enables/disables operation validity checks. *Default: True*
:param obj: spline geometry
:type obj: abstract.SplineGeometry
:param param: knot(s) to be inserted in [u, v, w] format
:type param: list, tuple
:param num: number of knot insertions in [num_u, num_v, num_w] format
:type num: list, tuple
:return: updated spline geometry
]
variable[check_num] assign[=] call[name[kwargs].get, parameter[constant[check_num], constant[True]]]
if name[check_num] begin[:]
if <ast.UnaryOp object at 0x7da1b1517c10> begin[:]
<ast.Raise object at 0x7da1b1517ac0>
if compare[call[name[len], parameter[name[num]]] not_equal[!=] name[obj].pdimension] begin[:]
<ast.Raise object at 0x7da1b15177c0>
for taget[tuple[[<ast.Name object at 0x7da1b15174c0>, <ast.Name object at 0x7da1b1517490>]]] in starred[call[name[enumerate], parameter[name[num]]]] begin[:]
if compare[name[val] less[<] constant[0]] begin[:]
<ast.Raise object at 0x7da1b1517310>
if call[name[isinstance], parameter[name[obj], name[abstract].Curve]] begin[:]
if <ast.BoolOp object at 0x7da1b1516fb0> begin[:]
variable[s] assign[=] call[name[helpers].find_multiplicity, parameter[call[name[param]][constant[0]], name[obj].knotvector]]
if <ast.BoolOp object at 0x7da1b1516b90> begin[:]
<ast.Raise object at 0x7da1b15169b0>
variable[span] assign[=] call[name[helpers].find_span_linear, parameter[name[obj].degree, name[obj].knotvector, name[obj].ctrlpts_size, call[name[param]][constant[0]]]]
variable[kv_new] assign[=] call[name[helpers].knot_insertion_kv, parameter[name[obj].knotvector, call[name[param]][constant[0]], name[span], call[name[num]][constant[0]]]]
variable[cpts] assign[=] <ast.IfExp object at 0x7da1b1515de0>
variable[cpts_tmp] assign[=] call[name[helpers].knot_insertion, parameter[name[obj].degree, name[obj].knotvector, name[cpts], call[name[param]][constant[0]]]]
call[name[obj].set_ctrlpts, parameter[name[cpts_tmp]]]
name[obj].knotvector assign[=] name[kv_new]
if call[name[isinstance], parameter[name[obj], name[abstract].Surface]] begin[:]
if <ast.BoolOp object at 0x7da1b1510f40> begin[:]
variable[s_u] assign[=] call[name[helpers].find_multiplicity, parameter[call[name[param]][constant[0]], name[obj].knotvector_u]]
if <ast.BoolOp object at 0x7da1b1510b20> begin[:]
<ast.Raise object at 0x7da1b1510940>
variable[span_u] assign[=] call[name[helpers].find_span_linear, parameter[name[obj].degree_u, name[obj].knotvector_u, name[obj].ctrlpts_size_u, call[name[param]][constant[0]]]]
variable[kv_u] assign[=] call[name[helpers].knot_insertion_kv, parameter[name[obj].knotvector_u, call[name[param]][constant[0]], name[span_u], call[name[num]][constant[0]]]]
variable[cpts_tmp] assign[=] list[[]]
variable[cpts] assign[=] <ast.IfExp object at 0x7da1b1646650>
for taget[name[v]] in starred[call[name[range], parameter[name[obj].ctrlpts_size_v]]] begin[:]
variable[ccu] assign[=] <ast.ListComp object at 0x7da1b16a87f0>
variable[ctrlpts_tmp] assign[=] call[name[helpers].knot_insertion, parameter[name[obj].degree_u, name[obj].knotvector_u, name[ccu], call[name[param]][constant[0]]]]
<ast.AugAssign object at 0x7da1b16ab2e0>
call[name[obj].set_ctrlpts, parameter[call[name[compatibility].flip_ctrlpts_u, parameter[name[cpts_tmp], binary_operation[name[obj].ctrlpts_size_u + call[name[num]][constant[0]]], name[obj].ctrlpts_size_v]], binary_operation[name[obj].ctrlpts_size_u + call[name[num]][constant[0]]], name[obj].ctrlpts_size_v]]
name[obj].knotvector_u assign[=] name[kv_u]
if <ast.BoolOp object at 0x7da1b16a9180> begin[:]
variable[s_v] assign[=] call[name[helpers].find_multiplicity, parameter[call[name[param]][constant[1]], name[obj].knotvector_v]]
if <ast.BoolOp object at 0x7da1b16aa890> begin[:]
<ast.Raise object at 0x7da1b16ab580>
variable[span_v] assign[=] call[name[helpers].find_span_linear, parameter[name[obj].degree_v, name[obj].knotvector_v, name[obj].ctrlpts_size_v, call[name[param]][constant[1]]]]
variable[kv_v] assign[=] call[name[helpers].knot_insertion_kv, parameter[name[obj].knotvector_v, call[name[param]][constant[1]], name[span_v], call[name[num]][constant[1]]]]
variable[cpts_tmp] assign[=] list[[]]
variable[cpts] assign[=] <ast.IfExp object at 0x7da1b1780fa0>
for taget[name[u]] in starred[call[name[range], parameter[name[obj].ctrlpts_size_u]]] begin[:]
variable[ccv] assign[=] <ast.ListComp object at 0x7da1b1783ac0>
variable[ctrlpts_tmp] assign[=] call[name[helpers].knot_insertion, parameter[name[obj].degree_v, name[obj].knotvector_v, name[ccv], call[name[param]][constant[1]]]]
<ast.AugAssign object at 0x7da1b1781db0>
call[name[obj].set_ctrlpts, parameter[name[cpts_tmp], name[obj].ctrlpts_size_u, binary_operation[name[obj].ctrlpts_size_v + call[name[num]][constant[1]]]]]
name[obj].knotvector_v assign[=] name[kv_v]
if call[name[isinstance], parameter[name[obj], name[abstract].Volume]] begin[:]
if <ast.BoolOp object at 0x7da1b1781e40> begin[:]
variable[s_u] assign[=] call[name[helpers].find_multiplicity, parameter[call[name[param]][constant[0]], name[obj].knotvector_u]]
if <ast.BoolOp object at 0x7da1b1783df0> begin[:]
<ast.Raise object at 0x7da1b1783b20>
variable[span_u] assign[=] call[name[helpers].find_span_linear, parameter[name[obj].degree_u, name[obj].knotvector_u, name[obj].ctrlpts_size_u, call[name[param]][constant[0]]]]
variable[kv_u] assign[=] call[name[helpers].knot_insertion_kv, parameter[name[obj].knotvector_u, call[name[param]][constant[0]], name[span_u], call[name[num]][constant[0]]]]
variable[cpts] assign[=] <ast.IfExp object at 0x7da1b1781330>
variable[cpt2d] assign[=] list[[]]
for taget[name[u]] in starred[call[name[range], parameter[name[obj].ctrlpts_size_u]]] begin[:]
variable[temp_surf] assign[=] list[[]]
for taget[name[w]] in starred[call[name[range], parameter[name[obj].ctrlpts_size_w]]] begin[:]
for taget[name[v]] in starred[call[name[range], parameter[name[obj].ctrlpts_size_v]]] begin[:]
variable[temp_pt] assign[=] call[name[cpts]][binary_operation[binary_operation[name[v] + binary_operation[name[u] * name[obj].ctrlpts_size_v]] + binary_operation[binary_operation[name[w] * name[obj].ctrlpts_size_u] * name[obj].ctrlpts_size_v]]]
call[name[temp_surf].append, parameter[name[temp_pt]]]
call[name[cpt2d].append, parameter[name[temp_surf]]]
variable[ctrlpts_tmp] assign[=] call[name[helpers].knot_insertion, parameter[name[obj].degree_u, name[obj].knotvector_u, name[cpt2d], call[name[param]][constant[0]]]]
variable[ctrlpts_new] assign[=] list[[]]
for taget[name[w]] in starred[call[name[range], parameter[name[obj].ctrlpts_size_w]]] begin[:]
for taget[name[u]] in starred[call[name[range], parameter[binary_operation[name[obj].ctrlpts_size_u + call[name[num]][constant[0]]]]]] begin[:]
for taget[name[v]] in starred[call[name[range], parameter[name[obj].ctrlpts_size_v]]] begin[:]
variable[temp_pt] assign[=] call[call[name[ctrlpts_tmp]][name[u]]][binary_operation[name[v] + binary_operation[name[w] * name[obj].ctrlpts_size_v]]]
call[name[ctrlpts_new].append, parameter[name[temp_pt]]]
call[name[obj].set_ctrlpts, parameter[name[ctrlpts_new], binary_operation[name[obj].ctrlpts_size_u + call[name[num]][constant[0]]], name[obj].ctrlpts_size_v, name[obj].ctrlpts_size_w]]
name[obj].knotvector_u assign[=] name[kv_u]
if <ast.BoolOp object at 0x7da1b16c5870> begin[:]
variable[s_v] assign[=] call[name[helpers].find_multiplicity, parameter[call[name[param]][constant[1]], name[obj].knotvector_v]]
if <ast.BoolOp object at 0x7da1b16c6590> begin[:]
<ast.Raise object at 0x7da1b16c6500>
variable[span_v] assign[=] call[name[helpers].find_span_linear, parameter[name[obj].degree_v, name[obj].knotvector_v, name[obj].ctrlpts_size_v, call[name[param]][constant[1]]]]
variable[kv_v] assign[=] call[name[helpers].knot_insertion_kv, parameter[name[obj].knotvector_v, call[name[param]][constant[1]], name[span_v], call[name[num]][constant[1]]]]
variable[cpts] assign[=] <ast.IfExp object at 0x7da1b16c4190>
variable[cpt2d] assign[=] list[[]]
for taget[name[v]] in starred[call[name[range], parameter[name[obj].ctrlpts_size_v]]] begin[:]
variable[temp_surf] assign[=] list[[]]
for taget[name[w]] in starred[call[name[range], parameter[name[obj].ctrlpts_size_w]]] begin[:]
for taget[name[u]] in starred[call[name[range], parameter[name[obj].ctrlpts_size_u]]] begin[:]
variable[temp_pt] assign[=] call[name[cpts]][binary_operation[binary_operation[name[v] + binary_operation[name[u] * name[obj].ctrlpts_size_v]] + binary_operation[binary_operation[name[w] * name[obj].ctrlpts_size_u] * name[obj].ctrlpts_size_v]]]
call[name[temp_surf].append, parameter[name[temp_pt]]]
call[name[cpt2d].append, parameter[name[temp_surf]]]
variable[ctrlpts_tmp] assign[=] call[name[helpers].knot_insertion, parameter[name[obj].degree_v, name[obj].knotvector_v, name[cpt2d], call[name[param]][constant[1]]]]
variable[ctrlpts_new] assign[=] list[[]]
for taget[name[w]] in starred[call[name[range], parameter[name[obj].ctrlpts_size_w]]] begin[:]
for taget[name[u]] in starred[call[name[range], parameter[name[obj].ctrlpts_size_u]]] begin[:]
for taget[name[v]] in starred[call[name[range], parameter[binary_operation[name[obj].ctrlpts_size_v + call[name[num]][constant[1]]]]]] begin[:]
variable[temp_pt] assign[=] call[call[name[ctrlpts_tmp]][name[v]]][binary_operation[name[u] + binary_operation[name[w] * name[obj].ctrlpts_size_u]]]
call[name[ctrlpts_new].append, parameter[name[temp_pt]]]
call[name[obj].set_ctrlpts, parameter[name[ctrlpts_new], name[obj].ctrlpts_size_u, binary_operation[name[obj].ctrlpts_size_v + call[name[num]][constant[1]]], name[obj].ctrlpts_size_w]]
name[obj].knotvector_v assign[=] name[kv_v]
if <ast.BoolOp object at 0x7da1b16d14e0> begin[:]
variable[s_w] assign[=] call[name[helpers].find_multiplicity, parameter[call[name[param]][constant[2]], name[obj].knotvector_w]]
if <ast.BoolOp object at 0x7da1b16d1de0> begin[:]
<ast.Raise object at 0x7da1b16d1db0>
variable[span_w] assign[=] call[name[helpers].find_span_linear, parameter[name[obj].degree_w, name[obj].knotvector_w, name[obj].ctrlpts_size_w, call[name[param]][constant[2]]]]
variable[kv_w] assign[=] call[name[helpers].knot_insertion_kv, parameter[name[obj].knotvector_w, call[name[param]][constant[2]], name[span_w], call[name[num]][constant[2]]]]
variable[cpts] assign[=] <ast.IfExp object at 0x7da1b16d1570>
variable[cpt2d] assign[=] list[[]]
for taget[name[w]] in starred[call[name[range], parameter[name[obj].ctrlpts_size_w]]] begin[:]
variable[temp_surf] assign[=] <ast.ListComp object at 0x7da1b16b4250>
call[name[cpt2d].append, parameter[name[temp_surf]]]
variable[ctrlpts_tmp] assign[=] call[name[helpers].knot_insertion, parameter[name[obj].degree_w, name[obj].knotvector_w, name[cpt2d], call[name[param]][constant[2]]]]
variable[ctrlpts_new] assign[=] list[[]]
for taget[name[w]] in starred[call[name[range], parameter[binary_operation[name[obj].ctrlpts_size_w + call[name[num]][constant[2]]]]]] begin[:]
<ast.AugAssign object at 0x7da1b16b78e0>
call[name[obj].set_ctrlpts, parameter[name[ctrlpts_new], name[obj].ctrlpts_size_u, name[obj].ctrlpts_size_v, binary_operation[name[obj].ctrlpts_size_w + call[name[num]][constant[2]]]]]
name[obj].knotvector_w assign[=] name[kv_w]
return[name[obj]] | keyword[def] identifier[insert_knot] ( identifier[obj] , identifier[param] , identifier[num] ,** identifier[kwargs] ):
literal[string]
identifier[check_num] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[True] )
keyword[if] identifier[check_num] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[num] ,( identifier[list] , identifier[tuple] )):
keyword[raise] identifier[GeomdlException] ( literal[string] ,
identifier[data] = identifier[dict] ( identifier[num] = identifier[num] ))
keyword[if] identifier[len] ( identifier[num] )!= identifier[obj] . identifier[pdimension] :
keyword[raise] identifier[GeomdlException] ( literal[string] ,
identifier[data] = identifier[dict] ( identifier[pdim] = identifier[obj] . identifier[pdimension] , identifier[num_len] = identifier[len] ( identifier[num] )))
keyword[for] identifier[idx] , identifier[val] keyword[in] identifier[enumerate] ( identifier[num] ):
keyword[if] identifier[val] < literal[int] :
keyword[raise] identifier[GeomdlException] ( literal[string] ,
identifier[data] = identifier[dict] ( identifier[idx] = identifier[idx] , identifier[num] = identifier[val] ))
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[abstract] . identifier[Curve] ):
keyword[if] identifier[param] [ literal[int] ] keyword[is] keyword[not] keyword[None] keyword[and] identifier[num] [ literal[int] ]> literal[int] :
identifier[s] = identifier[helpers] . identifier[find_multiplicity] ( identifier[param] [ literal[int] ], identifier[obj] . identifier[knotvector] )
keyword[if] identifier[check_num] keyword[and] identifier[num] [ literal[int] ]> identifier[obj] . identifier[degree] - identifier[s] :
keyword[raise] identifier[GeomdlException] ( literal[string] + identifier[str] ( identifier[param] [ literal[int] ])+ literal[string] + identifier[str] ( identifier[num] [ literal[int] ])+ literal[string] ,
identifier[data] = identifier[dict] ( identifier[knot] = identifier[param] [ literal[int] ], identifier[num] = identifier[num] [ literal[int] ], identifier[multiplicity] = identifier[s] ))
identifier[span] = identifier[helpers] . identifier[find_span_linear] ( identifier[obj] . identifier[degree] , identifier[obj] . identifier[knotvector] , identifier[obj] . identifier[ctrlpts_size] , identifier[param] [ literal[int] ])
identifier[kv_new] = identifier[helpers] . identifier[knot_insertion_kv] ( identifier[obj] . identifier[knotvector] , identifier[param] [ literal[int] ], identifier[span] , identifier[num] [ literal[int] ])
identifier[cpts] = identifier[obj] . identifier[ctrlptsw] keyword[if] identifier[obj] . identifier[rational] keyword[else] identifier[obj] . identifier[ctrlpts]
identifier[cpts_tmp] = identifier[helpers] . identifier[knot_insertion] ( identifier[obj] . identifier[degree] , identifier[obj] . identifier[knotvector] , identifier[cpts] , identifier[param] [ literal[int] ],
identifier[num] = identifier[num] [ literal[int] ], identifier[s] = identifier[s] , identifier[span] = identifier[span] )
identifier[obj] . identifier[set_ctrlpts] ( identifier[cpts_tmp] )
identifier[obj] . identifier[knotvector] = identifier[kv_new]
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[abstract] . identifier[Surface] ):
keyword[if] identifier[param] [ literal[int] ] keyword[is] keyword[not] keyword[None] keyword[and] identifier[num] [ literal[int] ]> literal[int] :
identifier[s_u] = identifier[helpers] . identifier[find_multiplicity] ( identifier[param] [ literal[int] ], identifier[obj] . identifier[knotvector_u] )
keyword[if] identifier[check_num] keyword[and] identifier[num] [ literal[int] ]> identifier[obj] . identifier[degree_u] - identifier[s_u] :
keyword[raise] identifier[GeomdlException] ( literal[string] + identifier[str] ( identifier[param] [ literal[int] ])+ literal[string] + identifier[str] ( identifier[num] [ literal[int] ])+ literal[string] ,
identifier[data] = identifier[dict] ( identifier[knot] = identifier[param] [ literal[int] ], identifier[num] = identifier[num] [ literal[int] ], identifier[multiplicity] = identifier[s_u] ))
identifier[span_u] = identifier[helpers] . identifier[find_span_linear] ( identifier[obj] . identifier[degree_u] , identifier[obj] . identifier[knotvector_u] , identifier[obj] . identifier[ctrlpts_size_u] , identifier[param] [ literal[int] ])
identifier[kv_u] = identifier[helpers] . identifier[knot_insertion_kv] ( identifier[obj] . identifier[knotvector_u] , identifier[param] [ literal[int] ], identifier[span_u] , identifier[num] [ literal[int] ])
identifier[cpts_tmp] =[]
identifier[cpts] = identifier[obj] . identifier[ctrlptsw] keyword[if] identifier[obj] . identifier[rational] keyword[else] identifier[obj] . identifier[ctrlpts]
keyword[for] identifier[v] keyword[in] identifier[range] ( identifier[obj] . identifier[ctrlpts_size_v] ):
identifier[ccu] =[ identifier[cpts] [ identifier[v] +( identifier[obj] . identifier[ctrlpts_size_v] * identifier[u] )] keyword[for] identifier[u] keyword[in] identifier[range] ( identifier[obj] . identifier[ctrlpts_size_u] )]
identifier[ctrlpts_tmp] = identifier[helpers] . identifier[knot_insertion] ( identifier[obj] . identifier[degree_u] , identifier[obj] . identifier[knotvector_u] , identifier[ccu] , identifier[param] [ literal[int] ],
identifier[num] = identifier[num] [ literal[int] ], identifier[s] = identifier[s_u] , identifier[span] = identifier[span_u] )
identifier[cpts_tmp] += identifier[ctrlpts_tmp]
identifier[obj] . identifier[set_ctrlpts] ( identifier[compatibility] . identifier[flip_ctrlpts_u] ( identifier[cpts_tmp] , identifier[obj] . identifier[ctrlpts_size_u] + identifier[num] [ literal[int] ], identifier[obj] . identifier[ctrlpts_size_v] ),
identifier[obj] . identifier[ctrlpts_size_u] + identifier[num] [ literal[int] ], identifier[obj] . identifier[ctrlpts_size_v] )
identifier[obj] . identifier[knotvector_u] = identifier[kv_u]
keyword[if] identifier[param] [ literal[int] ] keyword[is] keyword[not] keyword[None] keyword[and] identifier[num] [ literal[int] ]> literal[int] :
identifier[s_v] = identifier[helpers] . identifier[find_multiplicity] ( identifier[param] [ literal[int] ], identifier[obj] . identifier[knotvector_v] )
keyword[if] identifier[check_num] keyword[and] identifier[num] [ literal[int] ]> identifier[obj] . identifier[degree_v] - identifier[s_v] :
keyword[raise] identifier[GeomdlException] ( literal[string] + identifier[str] ( identifier[param] [ literal[int] ])+ literal[string] + identifier[str] ( identifier[num] [ literal[int] ])+ literal[string] ,
identifier[data] = identifier[dict] ( identifier[knot] = identifier[param] [ literal[int] ], identifier[num] = identifier[num] [ literal[int] ], identifier[multiplicity] = identifier[s_v] ))
identifier[span_v] = identifier[helpers] . identifier[find_span_linear] ( identifier[obj] . identifier[degree_v] , identifier[obj] . identifier[knotvector_v] , identifier[obj] . identifier[ctrlpts_size_v] , identifier[param] [ literal[int] ])
identifier[kv_v] = identifier[helpers] . identifier[knot_insertion_kv] ( identifier[obj] . identifier[knotvector_v] , identifier[param] [ literal[int] ], identifier[span_v] , identifier[num] [ literal[int] ])
identifier[cpts_tmp] =[]
identifier[cpts] = identifier[obj] . identifier[ctrlptsw] keyword[if] identifier[obj] . identifier[rational] keyword[else] identifier[obj] . identifier[ctrlpts]
keyword[for] identifier[u] keyword[in] identifier[range] ( identifier[obj] . identifier[ctrlpts_size_u] ):
identifier[ccv] =[ identifier[cpts] [ identifier[v] +( identifier[obj] . identifier[ctrlpts_size_v] * identifier[u] )] keyword[for] identifier[v] keyword[in] identifier[range] ( identifier[obj] . identifier[ctrlpts_size_v] )]
identifier[ctrlpts_tmp] = identifier[helpers] . identifier[knot_insertion] ( identifier[obj] . identifier[degree_v] , identifier[obj] . identifier[knotvector_v] , identifier[ccv] , identifier[param] [ literal[int] ],
identifier[num] = identifier[num] [ literal[int] ], identifier[s] = identifier[s_v] , identifier[span] = identifier[span_v] )
identifier[cpts_tmp] += identifier[ctrlpts_tmp]
identifier[obj] . identifier[set_ctrlpts] ( identifier[cpts_tmp] , identifier[obj] . identifier[ctrlpts_size_u] , identifier[obj] . identifier[ctrlpts_size_v] + identifier[num] [ literal[int] ])
identifier[obj] . identifier[knotvector_v] = identifier[kv_v]
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[abstract] . identifier[Volume] ):
keyword[if] identifier[param] [ literal[int] ] keyword[is] keyword[not] keyword[None] keyword[and] identifier[num] [ literal[int] ]> literal[int] :
identifier[s_u] = identifier[helpers] . identifier[find_multiplicity] ( identifier[param] [ literal[int] ], identifier[obj] . identifier[knotvector_u] )
keyword[if] identifier[check_num] keyword[and] identifier[num] [ literal[int] ]> identifier[obj] . identifier[degree_u] - identifier[s_u] :
keyword[raise] identifier[GeomdlException] ( literal[string] + identifier[str] ( identifier[param] [ literal[int] ])+ literal[string] + identifier[str] ( identifier[num] [ literal[int] ])+ literal[string] ,
identifier[data] = identifier[dict] ( identifier[knot] = identifier[param] [ literal[int] ], identifier[num] = identifier[num] [ literal[int] ], identifier[multiplicity] = identifier[s_u] ))
identifier[span_u] = identifier[helpers] . identifier[find_span_linear] ( identifier[obj] . identifier[degree_u] , identifier[obj] . identifier[knotvector_u] , identifier[obj] . identifier[ctrlpts_size_u] , identifier[param] [ literal[int] ])
identifier[kv_u] = identifier[helpers] . identifier[knot_insertion_kv] ( identifier[obj] . identifier[knotvector_u] , identifier[param] [ literal[int] ], identifier[span_u] , identifier[num] [ literal[int] ])
identifier[cpts] = identifier[obj] . identifier[ctrlptsw] keyword[if] identifier[obj] . identifier[rational] keyword[else] identifier[obj] . identifier[ctrlpts]
identifier[cpt2d] =[]
keyword[for] identifier[u] keyword[in] identifier[range] ( identifier[obj] . identifier[ctrlpts_size_u] ):
identifier[temp_surf] =[]
keyword[for] identifier[w] keyword[in] identifier[range] ( identifier[obj] . identifier[ctrlpts_size_w] ):
keyword[for] identifier[v] keyword[in] identifier[range] ( identifier[obj] . identifier[ctrlpts_size_v] ):
identifier[temp_pt] = identifier[cpts] [ identifier[v] +( identifier[u] * identifier[obj] . identifier[ctrlpts_size_v] )+( identifier[w] * identifier[obj] . identifier[ctrlpts_size_u] * identifier[obj] . identifier[ctrlpts_size_v] )]
identifier[temp_surf] . identifier[append] ( identifier[temp_pt] )
identifier[cpt2d] . identifier[append] ( identifier[temp_surf] )
identifier[ctrlpts_tmp] = identifier[helpers] . identifier[knot_insertion] ( identifier[obj] . identifier[degree_u] , identifier[obj] . identifier[knotvector_u] , identifier[cpt2d] , identifier[param] [ literal[int] ],
identifier[num] = identifier[num] [ literal[int] ], identifier[s] = identifier[s_u] , identifier[span] = identifier[span_u] )
identifier[ctrlpts_new] =[]
keyword[for] identifier[w] keyword[in] identifier[range] ( identifier[obj] . identifier[ctrlpts_size_w] ):
keyword[for] identifier[u] keyword[in] identifier[range] ( identifier[obj] . identifier[ctrlpts_size_u] + identifier[num] [ literal[int] ]):
keyword[for] identifier[v] keyword[in] identifier[range] ( identifier[obj] . identifier[ctrlpts_size_v] ):
identifier[temp_pt] = identifier[ctrlpts_tmp] [ identifier[u] ][ identifier[v] +( identifier[w] * identifier[obj] . identifier[ctrlpts_size_v] )]
identifier[ctrlpts_new] . identifier[append] ( identifier[temp_pt] )
identifier[obj] . identifier[set_ctrlpts] ( identifier[ctrlpts_new] , identifier[obj] . identifier[ctrlpts_size_u] + identifier[num] [ literal[int] ], identifier[obj] . identifier[ctrlpts_size_v] , identifier[obj] . identifier[ctrlpts_size_w] )
identifier[obj] . identifier[knotvector_u] = identifier[kv_u]
keyword[if] identifier[param] [ literal[int] ] keyword[is] keyword[not] keyword[None] keyword[and] identifier[num] [ literal[int] ]> literal[int] :
identifier[s_v] = identifier[helpers] . identifier[find_multiplicity] ( identifier[param] [ literal[int] ], identifier[obj] . identifier[knotvector_v] )
keyword[if] identifier[check_num] keyword[and] identifier[num] [ literal[int] ]> identifier[obj] . identifier[degree_v] - identifier[s_v] :
keyword[raise] identifier[GeomdlException] ( literal[string] + identifier[str] ( identifier[param] [ literal[int] ])+ literal[string] + identifier[str] ( identifier[num] [ literal[int] ])+ literal[string] ,
identifier[data] = identifier[dict] ( identifier[knot] = identifier[param] [ literal[int] ], identifier[num] = identifier[num] [ literal[int] ], identifier[multiplicity] = identifier[s_v] ))
identifier[span_v] = identifier[helpers] . identifier[find_span_linear] ( identifier[obj] . identifier[degree_v] , identifier[obj] . identifier[knotvector_v] , identifier[obj] . identifier[ctrlpts_size_v] , identifier[param] [ literal[int] ])
identifier[kv_v] = identifier[helpers] . identifier[knot_insertion_kv] ( identifier[obj] . identifier[knotvector_v] , identifier[param] [ literal[int] ], identifier[span_v] , identifier[num] [ literal[int] ])
identifier[cpts] = identifier[obj] . identifier[ctrlptsw] keyword[if] identifier[obj] . identifier[rational] keyword[else] identifier[obj] . identifier[ctrlpts]
identifier[cpt2d] =[]
keyword[for] identifier[v] keyword[in] identifier[range] ( identifier[obj] . identifier[ctrlpts_size_v] ):
identifier[temp_surf] =[]
keyword[for] identifier[w] keyword[in] identifier[range] ( identifier[obj] . identifier[ctrlpts_size_w] ):
keyword[for] identifier[u] keyword[in] identifier[range] ( identifier[obj] . identifier[ctrlpts_size_u] ):
identifier[temp_pt] = identifier[cpts] [ identifier[v] +( identifier[u] * identifier[obj] . identifier[ctrlpts_size_v] )+( identifier[w] * identifier[obj] . identifier[ctrlpts_size_u] * identifier[obj] . identifier[ctrlpts_size_v] )]
identifier[temp_surf] . identifier[append] ( identifier[temp_pt] )
identifier[cpt2d] . identifier[append] ( identifier[temp_surf] )
identifier[ctrlpts_tmp] = identifier[helpers] . identifier[knot_insertion] ( identifier[obj] . identifier[degree_v] , identifier[obj] . identifier[knotvector_v] , identifier[cpt2d] , identifier[param] [ literal[int] ],
identifier[num] = identifier[num] [ literal[int] ], identifier[s] = identifier[s_v] , identifier[span] = identifier[span_v] )
identifier[ctrlpts_new] =[]
keyword[for] identifier[w] keyword[in] identifier[range] ( identifier[obj] . identifier[ctrlpts_size_w] ):
keyword[for] identifier[u] keyword[in] identifier[range] ( identifier[obj] . identifier[ctrlpts_size_u] ):
keyword[for] identifier[v] keyword[in] identifier[range] ( identifier[obj] . identifier[ctrlpts_size_v] + identifier[num] [ literal[int] ]):
identifier[temp_pt] = identifier[ctrlpts_tmp] [ identifier[v] ][ identifier[u] +( identifier[w] * identifier[obj] . identifier[ctrlpts_size_u] )]
identifier[ctrlpts_new] . identifier[append] ( identifier[temp_pt] )
identifier[obj] . identifier[set_ctrlpts] ( identifier[ctrlpts_new] , identifier[obj] . identifier[ctrlpts_size_u] , identifier[obj] . identifier[ctrlpts_size_v] + identifier[num] [ literal[int] ], identifier[obj] . identifier[ctrlpts_size_w] )
identifier[obj] . identifier[knotvector_v] = identifier[kv_v]
keyword[if] identifier[param] [ literal[int] ] keyword[is] keyword[not] keyword[None] keyword[and] identifier[num] [ literal[int] ]> literal[int] :
identifier[s_w] = identifier[helpers] . identifier[find_multiplicity] ( identifier[param] [ literal[int] ], identifier[obj] . identifier[knotvector_w] )
keyword[if] identifier[check_num] keyword[and] identifier[num] [ literal[int] ]> identifier[obj] . identifier[degree_w] - identifier[s_w] :
keyword[raise] identifier[GeomdlException] ( literal[string] + identifier[str] ( identifier[param] [ literal[int] ])+ literal[string] + identifier[str] ( identifier[num] [ literal[int] ])+ literal[string] ,
identifier[data] = identifier[dict] ( identifier[knot] = identifier[param] [ literal[int] ], identifier[num] = identifier[num] [ literal[int] ], identifier[multiplicity] = identifier[s_w] ))
identifier[span_w] = identifier[helpers] . identifier[find_span_linear] ( identifier[obj] . identifier[degree_w] , identifier[obj] . identifier[knotvector_w] , identifier[obj] . identifier[ctrlpts_size_w] , identifier[param] [ literal[int] ])
identifier[kv_w] = identifier[helpers] . identifier[knot_insertion_kv] ( identifier[obj] . identifier[knotvector_w] , identifier[param] [ literal[int] ], identifier[span_w] , identifier[num] [ literal[int] ])
identifier[cpts] = identifier[obj] . identifier[ctrlptsw] keyword[if] identifier[obj] . identifier[rational] keyword[else] identifier[obj] . identifier[ctrlpts]
identifier[cpt2d] =[]
keyword[for] identifier[w] keyword[in] identifier[range] ( identifier[obj] . identifier[ctrlpts_size_w] ):
identifier[temp_surf] =[ identifier[cpts] [ identifier[uv] +( identifier[w] * identifier[obj] . identifier[ctrlpts_size_u] * identifier[obj] . identifier[ctrlpts_size_v] )] keyword[for] identifier[uv] keyword[in]
identifier[range] ( identifier[obj] . identifier[ctrlpts_size_u] * identifier[obj] . identifier[ctrlpts_size_v] )]
identifier[cpt2d] . identifier[append] ( identifier[temp_surf] )
identifier[ctrlpts_tmp] = identifier[helpers] . identifier[knot_insertion] ( identifier[obj] . identifier[degree_w] , identifier[obj] . identifier[knotvector_w] , identifier[cpt2d] , identifier[param] [ literal[int] ],
identifier[num] = identifier[num] [ literal[int] ], identifier[s] = identifier[s_w] , identifier[span] = identifier[span_w] )
identifier[ctrlpts_new] =[]
keyword[for] identifier[w] keyword[in] identifier[range] ( identifier[obj] . identifier[ctrlpts_size_w] + identifier[num] [ literal[int] ]):
identifier[ctrlpts_new] += identifier[ctrlpts_tmp] [ identifier[w] ]
identifier[obj] . identifier[set_ctrlpts] ( identifier[ctrlpts_new] , identifier[obj] . identifier[ctrlpts_size_u] , identifier[obj] . identifier[ctrlpts_size_v] , identifier[obj] . identifier[ctrlpts_size_w] + identifier[num] [ literal[int] ])
identifier[obj] . identifier[knotvector_w] = identifier[kv_w]
keyword[return] identifier[obj] | def insert_knot(obj, param, num, **kwargs):
""" Inserts knots n-times to a spline geometry.
The following code snippet illustrates the usage of this function:
.. code-block:: python
# Insert knot u=0.5 to a curve 2 times
operations.insert_knot(curve, [0.5], [2])
# Insert knot v=0.25 to a surface 1 time
operations.insert_knot(surface, [None, 0.25], [0, 1])
# Insert knots u=0.75, v=0.25 to a surface 2 and 1 times, respectively
operations.insert_knot(surface, [0.75, 0.25], [2, 1])
# Insert knot w=0.5 to a volume 1 time
operations.insert_knot(volume, [None, None, 0.5], [0, 0, 1])
Please note that input spline geometry object will always be updated if the knot insertion operation is successful.
Keyword Arguments:
* ``check_num``: enables/disables operation validity checks. *Default: True*
:param obj: spline geometry
:type obj: abstract.SplineGeometry
:param param: knot(s) to be inserted in [u, v, w] format
:type param: list, tuple
:param num: number of knot insertions in [num_u, num_v, num_w] format
:type num: list, tuple
:return: updated spline geometry
"""
# Get keyword arguments
check_num = kwargs.get('check_num', True) # can be set to False when the caller checks number of insertions
if check_num:
# Check the validity of number of insertions
if not isinstance(num, (list, tuple)):
raise GeomdlException('The number of insertions must be a list or a tuple', data=dict(num=num)) # depends on [control=['if'], data=[]]
if len(num) != obj.pdimension:
raise GeomdlException('The length of the num array must be equal to the number of parametric dimensions', data=dict(pdim=obj.pdimension, num_len=len(num))) # depends on [control=['if'], data=[]]
for (idx, val) in enumerate(num):
if val < 0:
raise GeomdlException('Number of insertions must be a positive integer value', data=dict(idx=idx, num=val)) # depends on [control=['if'], data=['val']] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
# Start curve knot insertion
if isinstance(obj, abstract.Curve):
if param[0] is not None and num[0] > 0:
# Find knot multiplicity
s = helpers.find_multiplicity(param[0], obj.knotvector)
# Check if it is possible add that many number of knots
if check_num and num[0] > obj.degree - s:
raise GeomdlException('Knot ' + str(param[0]) + ' cannot be inserted ' + str(num[0]) + ' times', data=dict(knot=param[0], num=num[0], multiplicity=s)) # depends on [control=['if'], data=[]]
# Find knot span
span = helpers.find_span_linear(obj.degree, obj.knotvector, obj.ctrlpts_size, param[0])
# Compute new knot vector
kv_new = helpers.knot_insertion_kv(obj.knotvector, param[0], span, num[0])
# Compute new control points
cpts = obj.ctrlptsw if obj.rational else obj.ctrlpts
cpts_tmp = helpers.knot_insertion(obj.degree, obj.knotvector, cpts, param[0], num=num[0], s=s, span=span)
# Update curve
obj.set_ctrlpts(cpts_tmp)
obj.knotvector = kv_new # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Start surface knot insertion
if isinstance(obj, abstract.Surface):
# u-direction
if param[0] is not None and num[0] > 0:
# Find knot multiplicity
s_u = helpers.find_multiplicity(param[0], obj.knotvector_u)
# Check if it is possible add that many number of knots
if check_num and num[0] > obj.degree_u - s_u:
raise GeomdlException('Knot ' + str(param[0]) + ' cannot be inserted ' + str(num[0]) + ' times (u-dir)', data=dict(knot=param[0], num=num[0], multiplicity=s_u)) # depends on [control=['if'], data=[]]
# Find knot span
span_u = helpers.find_span_linear(obj.degree_u, obj.knotvector_u, obj.ctrlpts_size_u, param[0])
# Compute new knot vector
kv_u = helpers.knot_insertion_kv(obj.knotvector_u, param[0], span_u, num[0])
# Get curves
cpts_tmp = []
cpts = obj.ctrlptsw if obj.rational else obj.ctrlpts
for v in range(obj.ctrlpts_size_v):
ccu = [cpts[v + obj.ctrlpts_size_v * u] for u in range(obj.ctrlpts_size_u)]
ctrlpts_tmp = helpers.knot_insertion(obj.degree_u, obj.knotvector_u, ccu, param[0], num=num[0], s=s_u, span=span_u)
cpts_tmp += ctrlpts_tmp # depends on [control=['for'], data=['v']]
# Update the surface after knot insertion
obj.set_ctrlpts(compatibility.flip_ctrlpts_u(cpts_tmp, obj.ctrlpts_size_u + num[0], obj.ctrlpts_size_v), obj.ctrlpts_size_u + num[0], obj.ctrlpts_size_v)
obj.knotvector_u = kv_u # depends on [control=['if'], data=[]]
# v-direction
if param[1] is not None and num[1] > 0:
# Find knot multiplicity
s_v = helpers.find_multiplicity(param[1], obj.knotvector_v)
# Check if it is possible add that many number of knots
if check_num and num[1] > obj.degree_v - s_v:
raise GeomdlException('Knot ' + str(param[1]) + ' cannot be inserted ' + str(num[1]) + ' times (v-dir)', data=dict(knot=param[1], num=num[1], multiplicity=s_v)) # depends on [control=['if'], data=[]]
# Find knot span
span_v = helpers.find_span_linear(obj.degree_v, obj.knotvector_v, obj.ctrlpts_size_v, param[1])
# Compute new knot vector
kv_v = helpers.knot_insertion_kv(obj.knotvector_v, param[1], span_v, num[1])
# Get curves
cpts_tmp = []
cpts = obj.ctrlptsw if obj.rational else obj.ctrlpts
for u in range(obj.ctrlpts_size_u):
ccv = [cpts[v + obj.ctrlpts_size_v * u] for v in range(obj.ctrlpts_size_v)]
ctrlpts_tmp = helpers.knot_insertion(obj.degree_v, obj.knotvector_v, ccv, param[1], num=num[1], s=s_v, span=span_v)
cpts_tmp += ctrlpts_tmp # depends on [control=['for'], data=['u']]
# Update the surface after knot insertion
obj.set_ctrlpts(cpts_tmp, obj.ctrlpts_size_u, obj.ctrlpts_size_v + num[1])
obj.knotvector_v = kv_v # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Start volume knot insertion
if isinstance(obj, abstract.Volume):
# u-direction
if param[0] is not None and num[0] > 0:
# Find knot multiplicity
s_u = helpers.find_multiplicity(param[0], obj.knotvector_u)
# Check if it is possible add that many number of knots
if check_num and num[0] > obj.degree_u - s_u:
raise GeomdlException('Knot ' + str(param[0]) + ' cannot be inserted ' + str(num[0]) + ' times (u-dir)', data=dict(knot=param[0], num=num[0], multiplicity=s_u)) # depends on [control=['if'], data=[]]
# Find knot span
span_u = helpers.find_span_linear(obj.degree_u, obj.knotvector_u, obj.ctrlpts_size_u, param[0])
# Compute new knot vector
kv_u = helpers.knot_insertion_kv(obj.knotvector_u, param[0], span_u, num[0])
# Use Pw if rational
cpts = obj.ctrlptsw if obj.rational else obj.ctrlpts
# Construct 2-dimensional structure
cpt2d = []
for u in range(obj.ctrlpts_size_u):
temp_surf = []
for w in range(obj.ctrlpts_size_w):
for v in range(obj.ctrlpts_size_v):
temp_pt = cpts[v + u * obj.ctrlpts_size_v + w * obj.ctrlpts_size_u * obj.ctrlpts_size_v]
temp_surf.append(temp_pt) # depends on [control=['for'], data=['v']] # depends on [control=['for'], data=['w']]
cpt2d.append(temp_surf) # depends on [control=['for'], data=['u']]
# Compute new control points
ctrlpts_tmp = helpers.knot_insertion(obj.degree_u, obj.knotvector_u, cpt2d, param[0], num=num[0], s=s_u, span=span_u)
# Flatten to 1-dimensional structure
ctrlpts_new = []
for w in range(obj.ctrlpts_size_w):
for u in range(obj.ctrlpts_size_u + num[0]):
for v in range(obj.ctrlpts_size_v):
temp_pt = ctrlpts_tmp[u][v + w * obj.ctrlpts_size_v]
ctrlpts_new.append(temp_pt) # depends on [control=['for'], data=['v']] # depends on [control=['for'], data=['u']] # depends on [control=['for'], data=['w']]
# Update the volume after knot insertion
obj.set_ctrlpts(ctrlpts_new, obj.ctrlpts_size_u + num[0], obj.ctrlpts_size_v, obj.ctrlpts_size_w)
obj.knotvector_u = kv_u # depends on [control=['if'], data=[]]
# v-direction
if param[1] is not None and num[1] > 0:
# Find knot multiplicity
s_v = helpers.find_multiplicity(param[1], obj.knotvector_v)
# Check if it is possible add that many number of knots
if check_num and num[1] > obj.degree_v - s_v:
raise GeomdlException('Knot ' + str(param[1]) + ' cannot be inserted ' + str(num[1]) + ' times (v-dir)', data=dict(knot=param[1], num=num[1], multiplicity=s_v)) # depends on [control=['if'], data=[]]
# Find knot span
span_v = helpers.find_span_linear(obj.degree_v, obj.knotvector_v, obj.ctrlpts_size_v, param[1])
# Compute new knot vector
kv_v = helpers.knot_insertion_kv(obj.knotvector_v, param[1], span_v, num[1])
# Use Pw if rational
cpts = obj.ctrlptsw if obj.rational else obj.ctrlpts
# Construct 2-dimensional structure
cpt2d = []
for v in range(obj.ctrlpts_size_v):
temp_surf = []
for w in range(obj.ctrlpts_size_w):
for u in range(obj.ctrlpts_size_u):
temp_pt = cpts[v + u * obj.ctrlpts_size_v + w * obj.ctrlpts_size_u * obj.ctrlpts_size_v]
temp_surf.append(temp_pt) # depends on [control=['for'], data=['u']] # depends on [control=['for'], data=['w']]
cpt2d.append(temp_surf) # depends on [control=['for'], data=['v']]
# Compute new control points
ctrlpts_tmp = helpers.knot_insertion(obj.degree_v, obj.knotvector_v, cpt2d, param[1], num=num[1], s=s_v, span=span_v)
# Flatten to 1-dimensional structure
ctrlpts_new = []
for w in range(obj.ctrlpts_size_w):
for u in range(obj.ctrlpts_size_u):
for v in range(obj.ctrlpts_size_v + num[1]):
temp_pt = ctrlpts_tmp[v][u + w * obj.ctrlpts_size_u]
ctrlpts_new.append(temp_pt) # depends on [control=['for'], data=['v']] # depends on [control=['for'], data=['u']] # depends on [control=['for'], data=['w']]
# Update the volume after knot insertion
obj.set_ctrlpts(ctrlpts_new, obj.ctrlpts_size_u, obj.ctrlpts_size_v + num[1], obj.ctrlpts_size_w)
obj.knotvector_v = kv_v # depends on [control=['if'], data=[]]
# w-direction
if param[2] is not None and num[2] > 0:
# Find knot multiplicity
s_w = helpers.find_multiplicity(param[2], obj.knotvector_w)
# Check if it is possible add that many number of knots
if check_num and num[2] > obj.degree_w - s_w:
raise GeomdlException('Knot ' + str(param[2]) + ' cannot be inserted ' + str(num[2]) + ' times (w-dir)', data=dict(knot=param[2], num=num[2], multiplicity=s_w)) # depends on [control=['if'], data=[]]
# Find knot span
span_w = helpers.find_span_linear(obj.degree_w, obj.knotvector_w, obj.ctrlpts_size_w, param[2])
# Compute new knot vector
kv_w = helpers.knot_insertion_kv(obj.knotvector_w, param[2], span_w, num[2])
# Use Pw if rational
cpts = obj.ctrlptsw if obj.rational else obj.ctrlpts
# Construct 2-dimensional structure
cpt2d = []
for w in range(obj.ctrlpts_size_w):
temp_surf = [cpts[uv + w * obj.ctrlpts_size_u * obj.ctrlpts_size_v] for uv in range(obj.ctrlpts_size_u * obj.ctrlpts_size_v)]
cpt2d.append(temp_surf) # depends on [control=['for'], data=['w']]
# Compute new control points
ctrlpts_tmp = helpers.knot_insertion(obj.degree_w, obj.knotvector_w, cpt2d, param[2], num=num[2], s=s_w, span=span_w)
# Flatten to 1-dimensional structure
ctrlpts_new = []
for w in range(obj.ctrlpts_size_w + num[2]):
ctrlpts_new += ctrlpts_tmp[w] # depends on [control=['for'], data=['w']]
# Update the volume after knot insertion
obj.set_ctrlpts(ctrlpts_new, obj.ctrlpts_size_u, obj.ctrlpts_size_v, obj.ctrlpts_size_w + num[2])
obj.knotvector_w = kv_w # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Return updated spline geometry
return obj |
def _parse_host(host):
"""
The purpose of this function is to be robust to improper connections
settings provided by users, specifically in the host field.
For example -- when users supply ``https://xx.cloud.databricks.com`` as the
host, we must strip out the protocol to get the host.::
h = DatabricksHook()
assert h._parse_host('https://xx.cloud.databricks.com') == \
'xx.cloud.databricks.com'
In the case where users supply the correct ``xx.cloud.databricks.com`` as the
host, this function is a no-op.::
assert h._parse_host('xx.cloud.databricks.com') == 'xx.cloud.databricks.com'
"""
urlparse_host = urlparse.urlparse(host).hostname
if urlparse_host:
# In this case, host = https://xx.cloud.databricks.com
return urlparse_host
else:
# In this case, host = xx.cloud.databricks.com
return host | def function[_parse_host, parameter[host]]:
constant[
The purpose of this function is to be robust to improper connections
settings provided by users, specifically in the host field.
For example -- when users supply ``https://xx.cloud.databricks.com`` as the
host, we must strip out the protocol to get the host.::
h = DatabricksHook()
assert h._parse_host('https://xx.cloud.databricks.com') == 'xx.cloud.databricks.com'
In the case where users supply the correct ``xx.cloud.databricks.com`` as the
host, this function is a no-op.::
assert h._parse_host('xx.cloud.databricks.com') == 'xx.cloud.databricks.com'
]
variable[urlparse_host] assign[=] call[name[urlparse].urlparse, parameter[name[host]]].hostname
if name[urlparse_host] begin[:]
return[name[urlparse_host]] | keyword[def] identifier[_parse_host] ( identifier[host] ):
literal[string]
identifier[urlparse_host] = identifier[urlparse] . identifier[urlparse] ( identifier[host] ). identifier[hostname]
keyword[if] identifier[urlparse_host] :
keyword[return] identifier[urlparse_host]
keyword[else] :
keyword[return] identifier[host] | def _parse_host(host):
"""
The purpose of this function is to be robust to improper connections
settings provided by users, specifically in the host field.
For example -- when users supply ``https://xx.cloud.databricks.com`` as the
host, we must strip out the protocol to get the host.::
h = DatabricksHook()
assert h._parse_host('https://xx.cloud.databricks.com') == 'xx.cloud.databricks.com'
In the case where users supply the correct ``xx.cloud.databricks.com`` as the
host, this function is a no-op.::
assert h._parse_host('xx.cloud.databricks.com') == 'xx.cloud.databricks.com'
"""
urlparse_host = urlparse.urlparse(host).hostname
if urlparse_host:
# In this case, host = https://xx.cloud.databricks.com
return urlparse_host # depends on [control=['if'], data=[]]
else:
# In this case, host = xx.cloud.databricks.com
return host |
def get_account_details(self, account):
""" Get the account details. """
result = {}
try:
luser = self._get_account(account.username)
luser = preload(luser, database=self._database)
except ObjectDoesNotExist:
return result
for i, j in luser.items():
if i != 'userPassword' and j is not None:
result[i] = j
return result | def function[get_account_details, parameter[self, account]]:
constant[ Get the account details. ]
variable[result] assign[=] dictionary[[], []]
<ast.Try object at 0x7da1b0337190>
for taget[tuple[[<ast.Name object at 0x7da18bc70e20>, <ast.Name object at 0x7da18bc72d10>]]] in starred[call[name[luser].items, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da18bc700a0> begin[:]
call[name[result]][name[i]] assign[=] name[j]
return[name[result]] | keyword[def] identifier[get_account_details] ( identifier[self] , identifier[account] ):
literal[string]
identifier[result] ={}
keyword[try] :
identifier[luser] = identifier[self] . identifier[_get_account] ( identifier[account] . identifier[username] )
identifier[luser] = identifier[preload] ( identifier[luser] , identifier[database] = identifier[self] . identifier[_database] )
keyword[except] identifier[ObjectDoesNotExist] :
keyword[return] identifier[result]
keyword[for] identifier[i] , identifier[j] keyword[in] identifier[luser] . identifier[items] ():
keyword[if] identifier[i] != literal[string] keyword[and] identifier[j] keyword[is] keyword[not] keyword[None] :
identifier[result] [ identifier[i] ]= identifier[j]
keyword[return] identifier[result] | def get_account_details(self, account):
""" Get the account details. """
result = {}
try:
luser = self._get_account(account.username)
luser = preload(luser, database=self._database) # depends on [control=['try'], data=[]]
except ObjectDoesNotExist:
return result # depends on [control=['except'], data=[]]
for (i, j) in luser.items():
if i != 'userPassword' and j is not None:
result[i] = j # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return result |
def get_scoped_config(self, graph):
"""
Compute a configuration using the current scope.
"""
def loader(metadata):
if not self.current_scope:
target = graph.config
else:
target = graph.config.get(self.current_scope, {})
return {
self.key: target.get(self.key, {}),
}
defaults = {
self.key: get_defaults(self.func),
}
return configure(defaults, graph.metadata, loader) | def function[get_scoped_config, parameter[self, graph]]:
constant[
Compute a configuration using the current scope.
]
def function[loader, parameter[metadata]]:
if <ast.UnaryOp object at 0x7da1b0e158d0> begin[:]
variable[target] assign[=] name[graph].config
return[dictionary[[<ast.Attribute object at 0x7da1b0e14c70>], [<ast.Call object at 0x7da1b0c930d0>]]]
variable[defaults] assign[=] dictionary[[<ast.Attribute object at 0x7da1b0c921d0>], [<ast.Call object at 0x7da1b0c915d0>]]
return[call[name[configure], parameter[name[defaults], name[graph].metadata, name[loader]]]] | keyword[def] identifier[get_scoped_config] ( identifier[self] , identifier[graph] ):
literal[string]
keyword[def] identifier[loader] ( identifier[metadata] ):
keyword[if] keyword[not] identifier[self] . identifier[current_scope] :
identifier[target] = identifier[graph] . identifier[config]
keyword[else] :
identifier[target] = identifier[graph] . identifier[config] . identifier[get] ( identifier[self] . identifier[current_scope] ,{})
keyword[return] {
identifier[self] . identifier[key] : identifier[target] . identifier[get] ( identifier[self] . identifier[key] ,{}),
}
identifier[defaults] ={
identifier[self] . identifier[key] : identifier[get_defaults] ( identifier[self] . identifier[func] ),
}
keyword[return] identifier[configure] ( identifier[defaults] , identifier[graph] . identifier[metadata] , identifier[loader] ) | def get_scoped_config(self, graph):
"""
Compute a configuration using the current scope.
"""
def loader(metadata):
if not self.current_scope:
target = graph.config # depends on [control=['if'], data=[]]
else:
target = graph.config.get(self.current_scope, {})
return {self.key: target.get(self.key, {})}
defaults = {self.key: get_defaults(self.func)}
return configure(defaults, graph.metadata, loader) |
def _resume(self):
# type: (Descriptor) -> int
"""Resume a download, if possible
:param Descriptor self: this
:rtype: int or None
:return: verified download offset
"""
if self._resume_mgr is None or self._offset > 0 or self._finalized:
return None
# check if path exists in resume db
rr = self._resume_mgr.get_record(self._ase)
if rr is None:
logger.debug('no resume record for {}'.format(self.final_path))
return None
# ensure lengths are the same
if rr.length != self._ase.size:
logger.warning('resume length mismatch {} -> {}'.format(
rr.length, self._ase.size))
return None
# calculate current chunk and offset
if rr.next_integrity_chunk == 0:
logger.debug('nothing to resume for {}'.format(self.final_path))
return None
curr_chunk = rr.next_integrity_chunk
# set offsets if completed and the final path exists
if rr.completed and self.final_path.exists():
with self._meta_lock:
logger.debug('{} download already completed'.format(
self.final_path))
self._offset = self._ase.size
self._chunk_num = curr_chunk
self._chunk_size = rr.chunk_size
self._total_chunks = self._compute_total_chunks(rr.chunk_size)
self._next_integrity_chunk = rr.next_integrity_chunk
self._outstanding_ops = 0
self._finalized = True
return self._ase.size
# encrypted files are not resumable due to hmac requirement
if self._ase.is_encrypted:
logger.debug('cannot resume encrypted entity {}'.format(
self._ase.path))
return None
self._allocate_disk_space()
# check if final path exists
if not self.final_path.exists(): # noqa
logger.warning('download path {} does not exist'.format(
self.final_path))
return None
if self.hmac is not None:
raise RuntimeError(
'unexpected hmac object for entity {}'.format(self._ase.path))
# re-hash from 0 to offset if needed
_fd_offset = 0
_end_offset = min((curr_chunk * rr.chunk_size, rr.length))
if self.md5 is not None and curr_chunk > 0:
_blocksize = blobxfer.util.MEGABYTE << 2
logger.debug(
'integrity checking existing file {} offset {} -> {}'.format(
self.final_path,
self.view.fd_start,
self.view.fd_start + _end_offset)
)
with self._hasher_lock:
with self.final_path.open('rb') as filedesc:
filedesc.seek(self.view.fd_start, 0)
while _fd_offset < _end_offset:
if (_fd_offset + _blocksize) > _end_offset:
_blocksize = _end_offset - _fd_offset
_buf = filedesc.read(_blocksize)
self.md5.update(_buf)
_fd_offset += _blocksize
del _blocksize
# compare hashes
hexdigest = self.md5.hexdigest()
if rr.md5hexdigest != hexdigest:
logger.warning(
'MD5 mismatch resume={} computed={} for {}'.format(
rr.md5hexdigest, hexdigest, self.final_path))
# reset hasher
self.md5 = blobxfer.util.new_md5_hasher()
return None
# set values from resume
with self._meta_lock:
self._offset = _end_offset
self._chunk_num = curr_chunk
self._chunk_size = rr.chunk_size
self._total_chunks = self._compute_total_chunks(rr.chunk_size)
self._next_integrity_chunk = rr.next_integrity_chunk
self._outstanding_ops = (
self._total_chunks - self._next_integrity_chunk
)
logger.debug(
('resuming file {} from byte={} chunk={} chunk_size={} '
'total_chunks={} next_integrity_chunk={} '
'outstanding_ops={}').format(
self.final_path, self._offset, self._chunk_num,
self._chunk_size, self._total_chunks,
self._next_integrity_chunk, self._outstanding_ops))
return _end_offset | def function[_resume, parameter[self]]:
constant[Resume a download, if possible
:param Descriptor self: this
:rtype: int or None
:return: verified download offset
]
if <ast.BoolOp object at 0x7da20e956500> begin[:]
return[constant[None]]
variable[rr] assign[=] call[name[self]._resume_mgr.get_record, parameter[name[self]._ase]]
if compare[name[rr] is constant[None]] begin[:]
call[name[logger].debug, parameter[call[constant[no resume record for {}].format, parameter[name[self].final_path]]]]
return[constant[None]]
if compare[name[rr].length not_equal[!=] name[self]._ase.size] begin[:]
call[name[logger].warning, parameter[call[constant[resume length mismatch {} -> {}].format, parameter[name[rr].length, name[self]._ase.size]]]]
return[constant[None]]
if compare[name[rr].next_integrity_chunk equal[==] constant[0]] begin[:]
call[name[logger].debug, parameter[call[constant[nothing to resume for {}].format, parameter[name[self].final_path]]]]
return[constant[None]]
variable[curr_chunk] assign[=] name[rr].next_integrity_chunk
if <ast.BoolOp object at 0x7da20e9564d0> begin[:]
with name[self]._meta_lock begin[:]
call[name[logger].debug, parameter[call[constant[{} download already completed].format, parameter[name[self].final_path]]]]
name[self]._offset assign[=] name[self]._ase.size
name[self]._chunk_num assign[=] name[curr_chunk]
name[self]._chunk_size assign[=] name[rr].chunk_size
name[self]._total_chunks assign[=] call[name[self]._compute_total_chunks, parameter[name[rr].chunk_size]]
name[self]._next_integrity_chunk assign[=] name[rr].next_integrity_chunk
name[self]._outstanding_ops assign[=] constant[0]
name[self]._finalized assign[=] constant[True]
return[name[self]._ase.size]
if name[self]._ase.is_encrypted begin[:]
call[name[logger].debug, parameter[call[constant[cannot resume encrypted entity {}].format, parameter[name[self]._ase.path]]]]
return[constant[None]]
call[name[self]._allocate_disk_space, parameter[]]
if <ast.UnaryOp object at 0x7da18dc9b400> begin[:]
call[name[logger].warning, parameter[call[constant[download path {} does not exist].format, parameter[name[self].final_path]]]]
return[constant[None]]
if compare[name[self].hmac is_not constant[None]] begin[:]
<ast.Raise object at 0x7da18dc9a080>
variable[_fd_offset] assign[=] constant[0]
variable[_end_offset] assign[=] call[name[min], parameter[tuple[[<ast.BinOp object at 0x7da18dc9bd00>, <ast.Attribute object at 0x7da18dc99ed0>]]]]
if <ast.BoolOp object at 0x7da18dc99000> begin[:]
variable[_blocksize] assign[=] binary_operation[name[blobxfer].util.MEGABYTE <ast.LShift object at 0x7da2590d69e0> constant[2]]
call[name[logger].debug, parameter[call[constant[integrity checking existing file {} offset {} -> {}].format, parameter[name[self].final_path, name[self].view.fd_start, binary_operation[name[self].view.fd_start + name[_end_offset]]]]]]
with name[self]._hasher_lock begin[:]
with call[name[self].final_path.open, parameter[constant[rb]]] begin[:]
call[name[filedesc].seek, parameter[name[self].view.fd_start, constant[0]]]
while compare[name[_fd_offset] less[<] name[_end_offset]] begin[:]
if compare[binary_operation[name[_fd_offset] + name[_blocksize]] greater[>] name[_end_offset]] begin[:]
variable[_blocksize] assign[=] binary_operation[name[_end_offset] - name[_fd_offset]]
variable[_buf] assign[=] call[name[filedesc].read, parameter[name[_blocksize]]]
call[name[self].md5.update, parameter[name[_buf]]]
<ast.AugAssign object at 0x7da1b101a410>
<ast.Delete object at 0x7da2047ea8c0>
variable[hexdigest] assign[=] call[name[self].md5.hexdigest, parameter[]]
if compare[name[rr].md5hexdigest not_equal[!=] name[hexdigest]] begin[:]
call[name[logger].warning, parameter[call[constant[MD5 mismatch resume={} computed={} for {}].format, parameter[name[rr].md5hexdigest, name[hexdigest], name[self].final_path]]]]
name[self].md5 assign[=] call[name[blobxfer].util.new_md5_hasher, parameter[]]
return[constant[None]]
with name[self]._meta_lock begin[:]
name[self]._offset assign[=] name[_end_offset]
name[self]._chunk_num assign[=] name[curr_chunk]
name[self]._chunk_size assign[=] name[rr].chunk_size
name[self]._total_chunks assign[=] call[name[self]._compute_total_chunks, parameter[name[rr].chunk_size]]
name[self]._next_integrity_chunk assign[=] name[rr].next_integrity_chunk
name[self]._outstanding_ops assign[=] binary_operation[name[self]._total_chunks - name[self]._next_integrity_chunk]
call[name[logger].debug, parameter[call[constant[resuming file {} from byte={} chunk={} chunk_size={} total_chunks={} next_integrity_chunk={} outstanding_ops={}].format, parameter[name[self].final_path, name[self]._offset, name[self]._chunk_num, name[self]._chunk_size, name[self]._total_chunks, name[self]._next_integrity_chunk, name[self]._outstanding_ops]]]]
return[name[_end_offset]] | keyword[def] identifier[_resume] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_resume_mgr] keyword[is] keyword[None] keyword[or] identifier[self] . identifier[_offset] > literal[int] keyword[or] identifier[self] . identifier[_finalized] :
keyword[return] keyword[None]
identifier[rr] = identifier[self] . identifier[_resume_mgr] . identifier[get_record] ( identifier[self] . identifier[_ase] )
keyword[if] identifier[rr] keyword[is] keyword[None] :
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[self] . identifier[final_path] ))
keyword[return] keyword[None]
keyword[if] identifier[rr] . identifier[length] != identifier[self] . identifier[_ase] . identifier[size] :
identifier[logger] . identifier[warning] ( literal[string] . identifier[format] (
identifier[rr] . identifier[length] , identifier[self] . identifier[_ase] . identifier[size] ))
keyword[return] keyword[None]
keyword[if] identifier[rr] . identifier[next_integrity_chunk] == literal[int] :
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[self] . identifier[final_path] ))
keyword[return] keyword[None]
identifier[curr_chunk] = identifier[rr] . identifier[next_integrity_chunk]
keyword[if] identifier[rr] . identifier[completed] keyword[and] identifier[self] . identifier[final_path] . identifier[exists] ():
keyword[with] identifier[self] . identifier[_meta_lock] :
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] (
identifier[self] . identifier[final_path] ))
identifier[self] . identifier[_offset] = identifier[self] . identifier[_ase] . identifier[size]
identifier[self] . identifier[_chunk_num] = identifier[curr_chunk]
identifier[self] . identifier[_chunk_size] = identifier[rr] . identifier[chunk_size]
identifier[self] . identifier[_total_chunks] = identifier[self] . identifier[_compute_total_chunks] ( identifier[rr] . identifier[chunk_size] )
identifier[self] . identifier[_next_integrity_chunk] = identifier[rr] . identifier[next_integrity_chunk]
identifier[self] . identifier[_outstanding_ops] = literal[int]
identifier[self] . identifier[_finalized] = keyword[True]
keyword[return] identifier[self] . identifier[_ase] . identifier[size]
keyword[if] identifier[self] . identifier[_ase] . identifier[is_encrypted] :
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] (
identifier[self] . identifier[_ase] . identifier[path] ))
keyword[return] keyword[None]
identifier[self] . identifier[_allocate_disk_space] ()
keyword[if] keyword[not] identifier[self] . identifier[final_path] . identifier[exists] ():
identifier[logger] . identifier[warning] ( literal[string] . identifier[format] (
identifier[self] . identifier[final_path] ))
keyword[return] keyword[None]
keyword[if] identifier[self] . identifier[hmac] keyword[is] keyword[not] keyword[None] :
keyword[raise] identifier[RuntimeError] (
literal[string] . identifier[format] ( identifier[self] . identifier[_ase] . identifier[path] ))
identifier[_fd_offset] = literal[int]
identifier[_end_offset] = identifier[min] (( identifier[curr_chunk] * identifier[rr] . identifier[chunk_size] , identifier[rr] . identifier[length] ))
keyword[if] identifier[self] . identifier[md5] keyword[is] keyword[not] keyword[None] keyword[and] identifier[curr_chunk] > literal[int] :
identifier[_blocksize] = identifier[blobxfer] . identifier[util] . identifier[MEGABYTE] << literal[int]
identifier[logger] . identifier[debug] (
literal[string] . identifier[format] (
identifier[self] . identifier[final_path] ,
identifier[self] . identifier[view] . identifier[fd_start] ,
identifier[self] . identifier[view] . identifier[fd_start] + identifier[_end_offset] )
)
keyword[with] identifier[self] . identifier[_hasher_lock] :
keyword[with] identifier[self] . identifier[final_path] . identifier[open] ( literal[string] ) keyword[as] identifier[filedesc] :
identifier[filedesc] . identifier[seek] ( identifier[self] . identifier[view] . identifier[fd_start] , literal[int] )
keyword[while] identifier[_fd_offset] < identifier[_end_offset] :
keyword[if] ( identifier[_fd_offset] + identifier[_blocksize] )> identifier[_end_offset] :
identifier[_blocksize] = identifier[_end_offset] - identifier[_fd_offset]
identifier[_buf] = identifier[filedesc] . identifier[read] ( identifier[_blocksize] )
identifier[self] . identifier[md5] . identifier[update] ( identifier[_buf] )
identifier[_fd_offset] += identifier[_blocksize]
keyword[del] identifier[_blocksize]
identifier[hexdigest] = identifier[self] . identifier[md5] . identifier[hexdigest] ()
keyword[if] identifier[rr] . identifier[md5hexdigest] != identifier[hexdigest] :
identifier[logger] . identifier[warning] (
literal[string] . identifier[format] (
identifier[rr] . identifier[md5hexdigest] , identifier[hexdigest] , identifier[self] . identifier[final_path] ))
identifier[self] . identifier[md5] = identifier[blobxfer] . identifier[util] . identifier[new_md5_hasher] ()
keyword[return] keyword[None]
keyword[with] identifier[self] . identifier[_meta_lock] :
identifier[self] . identifier[_offset] = identifier[_end_offset]
identifier[self] . identifier[_chunk_num] = identifier[curr_chunk]
identifier[self] . identifier[_chunk_size] = identifier[rr] . identifier[chunk_size]
identifier[self] . identifier[_total_chunks] = identifier[self] . identifier[_compute_total_chunks] ( identifier[rr] . identifier[chunk_size] )
identifier[self] . identifier[_next_integrity_chunk] = identifier[rr] . identifier[next_integrity_chunk]
identifier[self] . identifier[_outstanding_ops] =(
identifier[self] . identifier[_total_chunks] - identifier[self] . identifier[_next_integrity_chunk]
)
identifier[logger] . identifier[debug] (
( literal[string]
literal[string]
literal[string] ). identifier[format] (
identifier[self] . identifier[final_path] , identifier[self] . identifier[_offset] , identifier[self] . identifier[_chunk_num] ,
identifier[self] . identifier[_chunk_size] , identifier[self] . identifier[_total_chunks] ,
identifier[self] . identifier[_next_integrity_chunk] , identifier[self] . identifier[_outstanding_ops] ))
keyword[return] identifier[_end_offset] | def _resume(self):
# type: (Descriptor) -> int
'Resume a download, if possible\n :param Descriptor self: this\n :rtype: int or None\n :return: verified download offset\n '
if self._resume_mgr is None or self._offset > 0 or self._finalized:
return None # depends on [control=['if'], data=[]]
# check if path exists in resume db
rr = self._resume_mgr.get_record(self._ase)
if rr is None:
logger.debug('no resume record for {}'.format(self.final_path))
return None # depends on [control=['if'], data=[]]
# ensure lengths are the same
if rr.length != self._ase.size:
logger.warning('resume length mismatch {} -> {}'.format(rr.length, self._ase.size))
return None # depends on [control=['if'], data=[]]
# calculate current chunk and offset
if rr.next_integrity_chunk == 0:
logger.debug('nothing to resume for {}'.format(self.final_path))
return None # depends on [control=['if'], data=[]]
curr_chunk = rr.next_integrity_chunk
# set offsets if completed and the final path exists
if rr.completed and self.final_path.exists():
with self._meta_lock:
logger.debug('{} download already completed'.format(self.final_path))
self._offset = self._ase.size
self._chunk_num = curr_chunk
self._chunk_size = rr.chunk_size
self._total_chunks = self._compute_total_chunks(rr.chunk_size)
self._next_integrity_chunk = rr.next_integrity_chunk
self._outstanding_ops = 0
self._finalized = True # depends on [control=['with'], data=[]]
return self._ase.size # depends on [control=['if'], data=[]]
# encrypted files are not resumable due to hmac requirement
if self._ase.is_encrypted:
logger.debug('cannot resume encrypted entity {}'.format(self._ase.path))
return None # depends on [control=['if'], data=[]]
self._allocate_disk_space()
# check if final path exists
if not self.final_path.exists(): # noqa
logger.warning('download path {} does not exist'.format(self.final_path))
return None # depends on [control=['if'], data=[]]
if self.hmac is not None:
raise RuntimeError('unexpected hmac object for entity {}'.format(self._ase.path)) # depends on [control=['if'], data=[]]
# re-hash from 0 to offset if needed
_fd_offset = 0
_end_offset = min((curr_chunk * rr.chunk_size, rr.length))
if self.md5 is not None and curr_chunk > 0:
_blocksize = blobxfer.util.MEGABYTE << 2
logger.debug('integrity checking existing file {} offset {} -> {}'.format(self.final_path, self.view.fd_start, self.view.fd_start + _end_offset))
with self._hasher_lock:
with self.final_path.open('rb') as filedesc:
filedesc.seek(self.view.fd_start, 0)
while _fd_offset < _end_offset:
if _fd_offset + _blocksize > _end_offset:
_blocksize = _end_offset - _fd_offset # depends on [control=['if'], data=['_end_offset']]
_buf = filedesc.read(_blocksize)
self.md5.update(_buf)
_fd_offset += _blocksize # depends on [control=['while'], data=['_fd_offset', '_end_offset']] # depends on [control=['with'], data=['filedesc']] # depends on [control=['with'], data=[]]
del _blocksize
# compare hashes
hexdigest = self.md5.hexdigest()
if rr.md5hexdigest != hexdigest:
logger.warning('MD5 mismatch resume={} computed={} for {}'.format(rr.md5hexdigest, hexdigest, self.final_path))
# reset hasher
self.md5 = blobxfer.util.new_md5_hasher()
return None # depends on [control=['if'], data=['hexdigest']] # depends on [control=['if'], data=[]]
# set values from resume
with self._meta_lock:
self._offset = _end_offset
self._chunk_num = curr_chunk
self._chunk_size = rr.chunk_size
self._total_chunks = self._compute_total_chunks(rr.chunk_size)
self._next_integrity_chunk = rr.next_integrity_chunk
self._outstanding_ops = self._total_chunks - self._next_integrity_chunk
logger.debug('resuming file {} from byte={} chunk={} chunk_size={} total_chunks={} next_integrity_chunk={} outstanding_ops={}'.format(self.final_path, self._offset, self._chunk_num, self._chunk_size, self._total_chunks, self._next_integrity_chunk, self._outstanding_ops)) # depends on [control=['with'], data=[]]
return _end_offset |
def _get_key_redis_key(bank, key):
'''
Return the Redis key given the bank name and the key name.
'''
opts = _get_redis_keys_opts()
return '{prefix}{separator}{bank}/{key}'.format(
prefix=opts['key_prefix'],
separator=opts['separator'],
bank=bank,
key=key
) | def function[_get_key_redis_key, parameter[bank, key]]:
constant[
Return the Redis key given the bank name and the key name.
]
variable[opts] assign[=] call[name[_get_redis_keys_opts], parameter[]]
return[call[constant[{prefix}{separator}{bank}/{key}].format, parameter[]]] | keyword[def] identifier[_get_key_redis_key] ( identifier[bank] , identifier[key] ):
literal[string]
identifier[opts] = identifier[_get_redis_keys_opts] ()
keyword[return] literal[string] . identifier[format] (
identifier[prefix] = identifier[opts] [ literal[string] ],
identifier[separator] = identifier[opts] [ literal[string] ],
identifier[bank] = identifier[bank] ,
identifier[key] = identifier[key]
) | def _get_key_redis_key(bank, key):
"""
Return the Redis key given the bank name and the key name.
"""
opts = _get_redis_keys_opts()
return '{prefix}{separator}{bank}/{key}'.format(prefix=opts['key_prefix'], separator=opts['separator'], bank=bank, key=key) |
def load_config(self):
"""Load the config from the config file or template."""
config = Config()
self.config_obj = config.load('awsshellrc')
self.config_section = self.config_obj['aws-shell']
self.model_completer.match_fuzzy = self.config_section.as_bool(
'match_fuzzy')
self.enable_vi_bindings = self.config_section.as_bool(
'enable_vi_bindings')
self.show_completion_columns = self.config_section.as_bool(
'show_completion_columns')
self.show_help = self.config_section.as_bool('show_help')
self.theme = self.config_section['theme'] | def function[load_config, parameter[self]]:
constant[Load the config from the config file or template.]
variable[config] assign[=] call[name[Config], parameter[]]
name[self].config_obj assign[=] call[name[config].load, parameter[constant[awsshellrc]]]
name[self].config_section assign[=] call[name[self].config_obj][constant[aws-shell]]
name[self].model_completer.match_fuzzy assign[=] call[name[self].config_section.as_bool, parameter[constant[match_fuzzy]]]
name[self].enable_vi_bindings assign[=] call[name[self].config_section.as_bool, parameter[constant[enable_vi_bindings]]]
name[self].show_completion_columns assign[=] call[name[self].config_section.as_bool, parameter[constant[show_completion_columns]]]
name[self].show_help assign[=] call[name[self].config_section.as_bool, parameter[constant[show_help]]]
name[self].theme assign[=] call[name[self].config_section][constant[theme]] | keyword[def] identifier[load_config] ( identifier[self] ):
literal[string]
identifier[config] = identifier[Config] ()
identifier[self] . identifier[config_obj] = identifier[config] . identifier[load] ( literal[string] )
identifier[self] . identifier[config_section] = identifier[self] . identifier[config_obj] [ literal[string] ]
identifier[self] . identifier[model_completer] . identifier[match_fuzzy] = identifier[self] . identifier[config_section] . identifier[as_bool] (
literal[string] )
identifier[self] . identifier[enable_vi_bindings] = identifier[self] . identifier[config_section] . identifier[as_bool] (
literal[string] )
identifier[self] . identifier[show_completion_columns] = identifier[self] . identifier[config_section] . identifier[as_bool] (
literal[string] )
identifier[self] . identifier[show_help] = identifier[self] . identifier[config_section] . identifier[as_bool] ( literal[string] )
identifier[self] . identifier[theme] = identifier[self] . identifier[config_section] [ literal[string] ] | def load_config(self):
"""Load the config from the config file or template."""
config = Config()
self.config_obj = config.load('awsshellrc')
self.config_section = self.config_obj['aws-shell']
self.model_completer.match_fuzzy = self.config_section.as_bool('match_fuzzy')
self.enable_vi_bindings = self.config_section.as_bool('enable_vi_bindings')
self.show_completion_columns = self.config_section.as_bool('show_completion_columns')
self.show_help = self.config_section.as_bool('show_help')
self.theme = self.config_section['theme'] |
def get(self, *args, **kwargs):
"""
Get from the interface collection. It is more accurate to use
kwargs to specify an attribute of the sub interface to retrieve
rather than using an index value. If retrieving using an index,
the collection will first check vlan interfaces and standard
interfaces second. In most cases, if VLANs exist, standard
interface definitions will be nested below the VLAN with exception
of Inline Interfaces which may have both.
:param int args: index to retrieve
:param kwargs: key value for sub interface
:rtype: SubInterface or None
"""
for collection in self.items:
if args:
index = args[0]
if len(collection) and (index <= len(collection)-1):
return collection[index]
else:
# Collection with get
result = collection.get(**kwargs)
if result is not None:
return result
return None | def function[get, parameter[self]]:
constant[
Get from the interface collection. It is more accurate to use
kwargs to specify an attribute of the sub interface to retrieve
rather than using an index value. If retrieving using an index,
the collection will first check vlan interfaces and standard
interfaces second. In most cases, if VLANs exist, standard
interface definitions will be nested below the VLAN with exception
of Inline Interfaces which may have both.
:param int args: index to retrieve
:param kwargs: key value for sub interface
:rtype: SubInterface or None
]
for taget[name[collection]] in starred[name[self].items] begin[:]
if name[args] begin[:]
variable[index] assign[=] call[name[args]][constant[0]]
if <ast.BoolOp object at 0x7da1b1bc2230> begin[:]
return[call[name[collection]][name[index]]]
return[constant[None]] | keyword[def] identifier[get] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[for] identifier[collection] keyword[in] identifier[self] . identifier[items] :
keyword[if] identifier[args] :
identifier[index] = identifier[args] [ literal[int] ]
keyword[if] identifier[len] ( identifier[collection] ) keyword[and] ( identifier[index] <= identifier[len] ( identifier[collection] )- literal[int] ):
keyword[return] identifier[collection] [ identifier[index] ]
keyword[else] :
identifier[result] = identifier[collection] . identifier[get] (** identifier[kwargs] )
keyword[if] identifier[result] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[result]
keyword[return] keyword[None] | def get(self, *args, **kwargs):
"""
Get from the interface collection. It is more accurate to use
kwargs to specify an attribute of the sub interface to retrieve
rather than using an index value. If retrieving using an index,
the collection will first check vlan interfaces and standard
interfaces second. In most cases, if VLANs exist, standard
interface definitions will be nested below the VLAN with exception
of Inline Interfaces which may have both.
:param int args: index to retrieve
:param kwargs: key value for sub interface
:rtype: SubInterface or None
"""
for collection in self.items:
if args:
index = args[0]
if len(collection) and index <= len(collection) - 1:
return collection[index] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
# Collection with get
result = collection.get(**kwargs)
if result is not None:
return result # depends on [control=['if'], data=['result']] # depends on [control=['for'], data=['collection']]
return None |
def lm_ffinal(freqs, damping_times, modes):
"""Return the maximum f_final of the modes given, with f_final the frequency
at which the amplitude falls to 1/1000 of the peak amplitude
"""
f_max = {}
for lmn in modes:
l, m, nmodes = int(lmn[0]), int(lmn[1]), int(lmn[2])
for n in range(nmodes):
f_max['%d%d%d' %(l,m,n)] = qnm_freq_decay(freqs['%d%d%d' %(l,m,n)],
damping_times['%d%d%d' %(l,m,n)], 1./1000)
f_final = max(f_max.values())
if f_final > max_freq:
f_final = max_freq
return f_final | def function[lm_ffinal, parameter[freqs, damping_times, modes]]:
constant[Return the maximum f_final of the modes given, with f_final the frequency
at which the amplitude falls to 1/1000 of the peak amplitude
]
variable[f_max] assign[=] dictionary[[], []]
for taget[name[lmn]] in starred[name[modes]] begin[:]
<ast.Tuple object at 0x7da20c6e58a0> assign[=] tuple[[<ast.Call object at 0x7da20c6e4ca0>, <ast.Call object at 0x7da20c6e6200>, <ast.Call object at 0x7da20c6e5030>]]
for taget[name[n]] in starred[call[name[range], parameter[name[nmodes]]]] begin[:]
call[name[f_max]][binary_operation[constant[%d%d%d] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c6e4c40>, <ast.Name object at 0x7da20c6e5d80>, <ast.Name object at 0x7da20c6e7e20>]]]] assign[=] call[name[qnm_freq_decay], parameter[call[name[freqs]][binary_operation[constant[%d%d%d] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c6e5840>, <ast.Name object at 0x7da20c6e69e0>, <ast.Name object at 0x7da20c6e74c0>]]]], call[name[damping_times]][binary_operation[constant[%d%d%d] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c6e4850>, <ast.Name object at 0x7da20c6e7760>, <ast.Name object at 0x7da20c6e6560>]]]], binary_operation[constant[1.0] / constant[1000]]]]
variable[f_final] assign[=] call[name[max], parameter[call[name[f_max].values, parameter[]]]]
if compare[name[f_final] greater[>] name[max_freq]] begin[:]
variable[f_final] assign[=] name[max_freq]
return[name[f_final]] | keyword[def] identifier[lm_ffinal] ( identifier[freqs] , identifier[damping_times] , identifier[modes] ):
literal[string]
identifier[f_max] ={}
keyword[for] identifier[lmn] keyword[in] identifier[modes] :
identifier[l] , identifier[m] , identifier[nmodes] = identifier[int] ( identifier[lmn] [ literal[int] ]), identifier[int] ( identifier[lmn] [ literal[int] ]), identifier[int] ( identifier[lmn] [ literal[int] ])
keyword[for] identifier[n] keyword[in] identifier[range] ( identifier[nmodes] ):
identifier[f_max] [ literal[string] %( identifier[l] , identifier[m] , identifier[n] )]= identifier[qnm_freq_decay] ( identifier[freqs] [ literal[string] %( identifier[l] , identifier[m] , identifier[n] )],
identifier[damping_times] [ literal[string] %( identifier[l] , identifier[m] , identifier[n] )], literal[int] / literal[int] )
identifier[f_final] = identifier[max] ( identifier[f_max] . identifier[values] ())
keyword[if] identifier[f_final] > identifier[max_freq] :
identifier[f_final] = identifier[max_freq]
keyword[return] identifier[f_final] | def lm_ffinal(freqs, damping_times, modes):
"""Return the maximum f_final of the modes given, with f_final the frequency
at which the amplitude falls to 1/1000 of the peak amplitude
"""
f_max = {}
for lmn in modes:
(l, m, nmodes) = (int(lmn[0]), int(lmn[1]), int(lmn[2]))
for n in range(nmodes):
f_max['%d%d%d' % (l, m, n)] = qnm_freq_decay(freqs['%d%d%d' % (l, m, n)], damping_times['%d%d%d' % (l, m, n)], 1.0 / 1000) # depends on [control=['for'], data=['n']] # depends on [control=['for'], data=['lmn']]
f_final = max(f_max.values())
if f_final > max_freq:
f_final = max_freq # depends on [control=['if'], data=['f_final', 'max_freq']]
return f_final |
def api_version(created_ver, last_changed_ver, return_value_ver):
"""Version check decorator. Currently only checks Bigger Than."""
def api_min_version_decorator(function):
def wrapper(function, self, *args, **kwargs):
if not self.version_check_mode == "none":
if self.version_check_mode == "created":
version = created_ver
else:
version = bigger_version(last_changed_ver, return_value_ver)
major, minor, patch = parse_version_string(version)
if major > self.mastodon_major:
raise MastodonVersionError("Version check failed (Need version " + version + ")")
elif major == self.mastodon_major and minor > self.mastodon_minor:
print(self.mastodon_minor)
raise MastodonVersionError("Version check failed (Need version " + version + ")")
elif major == self.mastodon_major and minor == self.mastodon_minor and patch > self.mastodon_patch:
raise MastodonVersionError("Version check failed (Need version " + version + ", patch is " + str(self.mastodon_patch) + ")")
return function(self, *args, **kwargs)
function.__doc__ = function.__doc__ + "\n\n *Added: Mastodon v" + created_ver + ", last changed: Mastodon v" + last_changed_ver + "*"
return decorate(function, wrapper)
return api_min_version_decorator | def function[api_version, parameter[created_ver, last_changed_ver, return_value_ver]]:
constant[Version check decorator. Currently only checks Bigger Than.]
def function[api_min_version_decorator, parameter[function]]:
def function[wrapper, parameter[function, self]]:
if <ast.UnaryOp object at 0x7da20e9b20b0> begin[:]
if compare[name[self].version_check_mode equal[==] constant[created]] begin[:]
variable[version] assign[=] name[created_ver]
<ast.Tuple object at 0x7da20e957190> assign[=] call[name[parse_version_string], parameter[name[version]]]
if compare[name[major] greater[>] name[self].mastodon_major] begin[:]
<ast.Raise object at 0x7da20e957fa0>
return[call[name[function], parameter[name[self], <ast.Starred object at 0x7da20e9558a0>]]]
name[function].__doc__ assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[function].__doc__ + constant[
*Added: Mastodon v]] + name[created_ver]] + constant[, last changed: Mastodon v]] + name[last_changed_ver]] + constant[*]]
return[call[name[decorate], parameter[name[function], name[wrapper]]]]
return[name[api_min_version_decorator]] | keyword[def] identifier[api_version] ( identifier[created_ver] , identifier[last_changed_ver] , identifier[return_value_ver] ):
literal[string]
keyword[def] identifier[api_min_version_decorator] ( identifier[function] ):
keyword[def] identifier[wrapper] ( identifier[function] , identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
keyword[if] keyword[not] identifier[self] . identifier[version_check_mode] == literal[string] :
keyword[if] identifier[self] . identifier[version_check_mode] == literal[string] :
identifier[version] = identifier[created_ver]
keyword[else] :
identifier[version] = identifier[bigger_version] ( identifier[last_changed_ver] , identifier[return_value_ver] )
identifier[major] , identifier[minor] , identifier[patch] = identifier[parse_version_string] ( identifier[version] )
keyword[if] identifier[major] > identifier[self] . identifier[mastodon_major] :
keyword[raise] identifier[MastodonVersionError] ( literal[string] + identifier[version] + literal[string] )
keyword[elif] identifier[major] == identifier[self] . identifier[mastodon_major] keyword[and] identifier[minor] > identifier[self] . identifier[mastodon_minor] :
identifier[print] ( identifier[self] . identifier[mastodon_minor] )
keyword[raise] identifier[MastodonVersionError] ( literal[string] + identifier[version] + literal[string] )
keyword[elif] identifier[major] == identifier[self] . identifier[mastodon_major] keyword[and] identifier[minor] == identifier[self] . identifier[mastodon_minor] keyword[and] identifier[patch] > identifier[self] . identifier[mastodon_patch] :
keyword[raise] identifier[MastodonVersionError] ( literal[string] + identifier[version] + literal[string] + identifier[str] ( identifier[self] . identifier[mastodon_patch] )+ literal[string] )
keyword[return] identifier[function] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] )
identifier[function] . identifier[__doc__] = identifier[function] . identifier[__doc__] + literal[string] + identifier[created_ver] + literal[string] + identifier[last_changed_ver] + literal[string]
keyword[return] identifier[decorate] ( identifier[function] , identifier[wrapper] )
keyword[return] identifier[api_min_version_decorator] | def api_version(created_ver, last_changed_ver, return_value_ver):
"""Version check decorator. Currently only checks Bigger Than."""
def api_min_version_decorator(function):
def wrapper(function, self, *args, **kwargs):
if not self.version_check_mode == 'none':
if self.version_check_mode == 'created':
version = created_ver # depends on [control=['if'], data=[]]
else:
version = bigger_version(last_changed_ver, return_value_ver)
(major, minor, patch) = parse_version_string(version)
if major > self.mastodon_major:
raise MastodonVersionError('Version check failed (Need version ' + version + ')') # depends on [control=['if'], data=[]]
elif major == self.mastodon_major and minor > self.mastodon_minor:
print(self.mastodon_minor)
raise MastodonVersionError('Version check failed (Need version ' + version + ')') # depends on [control=['if'], data=[]]
elif major == self.mastodon_major and minor == self.mastodon_minor and (patch > self.mastodon_patch):
raise MastodonVersionError('Version check failed (Need version ' + version + ', patch is ' + str(self.mastodon_patch) + ')') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return function(self, *args, **kwargs)
function.__doc__ = function.__doc__ + '\n\n *Added: Mastodon v' + created_ver + ', last changed: Mastodon v' + last_changed_ver + '*'
return decorate(function, wrapper)
return api_min_version_decorator |
def ARPLimitExceeded_originator_switch_info_switchIpV4Address(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ARPLimitExceeded = ET.SubElement(config, "ARPLimitExceeded", xmlns="http://brocade.com/ns/brocade-notification-stream")
originator_switch_info = ET.SubElement(ARPLimitExceeded, "originator-switch-info")
switchIpV4Address = ET.SubElement(originator_switch_info, "switchIpV4Address")
switchIpV4Address.text = kwargs.pop('switchIpV4Address')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[ARPLimitExceeded_originator_switch_info_switchIpV4Address, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[ARPLimitExceeded] assign[=] call[name[ET].SubElement, parameter[name[config], constant[ARPLimitExceeded]]]
variable[originator_switch_info] assign[=] call[name[ET].SubElement, parameter[name[ARPLimitExceeded], constant[originator-switch-info]]]
variable[switchIpV4Address] assign[=] call[name[ET].SubElement, parameter[name[originator_switch_info], constant[switchIpV4Address]]]
name[switchIpV4Address].text assign[=] call[name[kwargs].pop, parameter[constant[switchIpV4Address]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[ARPLimitExceeded_originator_switch_info_switchIpV4Address] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[ARPLimitExceeded] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[originator_switch_info] = identifier[ET] . identifier[SubElement] ( identifier[ARPLimitExceeded] , literal[string] )
identifier[switchIpV4Address] = identifier[ET] . identifier[SubElement] ( identifier[originator_switch_info] , literal[string] )
identifier[switchIpV4Address] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def ARPLimitExceeded_originator_switch_info_switchIpV4Address(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
ARPLimitExceeded = ET.SubElement(config, 'ARPLimitExceeded', xmlns='http://brocade.com/ns/brocade-notification-stream')
originator_switch_info = ET.SubElement(ARPLimitExceeded, 'originator-switch-info')
switchIpV4Address = ET.SubElement(originator_switch_info, 'switchIpV4Address')
switchIpV4Address.text = kwargs.pop('switchIpV4Address')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def signature(self):
''' Return the function signature
Returns:
(str, list(str)): name, list parameters type
'''
return self.name, [str(x.type) for x in self.elems] | def function[signature, parameter[self]]:
constant[ Return the function signature
Returns:
(str, list(str)): name, list parameters type
]
return[tuple[[<ast.Attribute object at 0x7da18c4cd510>, <ast.ListComp object at 0x7da18c4ccbe0>]]] | keyword[def] identifier[signature] ( identifier[self] ):
literal[string]
keyword[return] identifier[self] . identifier[name] ,[ identifier[str] ( identifier[x] . identifier[type] ) keyword[for] identifier[x] keyword[in] identifier[self] . identifier[elems] ] | def signature(self):
""" Return the function signature
Returns:
(str, list(str)): name, list parameters type
"""
return (self.name, [str(x.type) for x in self.elems]) |
def add_config(self, config, config_filename):
"""
Updates the content types database with the given configuration.
:param config:
The configuration dictionary.
:param config_filename:
The path of the configuration file.
"""
content_types = config['content-types']
comment_groups = config['comment-groups']
self._comment_groups.update(comment_groups)
self._content_types.update(content_types)
for content_type, patterns in content_types.iteritems():
if not patterns:
raise ValueError('''error: config parse error: \
%s: Missing pattern for content type - `%s`"''' % (config_file, content_type))
for pattern in patterns:
first_character = pattern[0]
last_character = pattern[-1]
if first_character == '.':
# Extension map.
pattern = extension_case_transform_func(pattern)
self._extension_map[pattern] = content_type
elif first_character == '/' and last_character == '/':
# Regular expression map.
self._regexp_map[re.compile(pattern[1:-1])] = content_type
else:
# Filename map.
self._filename_map[pattern] = content_type | def function[add_config, parameter[self, config, config_filename]]:
constant[
Updates the content types database with the given configuration.
:param config:
The configuration dictionary.
:param config_filename:
The path of the configuration file.
]
variable[content_types] assign[=] call[name[config]][constant[content-types]]
variable[comment_groups] assign[=] call[name[config]][constant[comment-groups]]
call[name[self]._comment_groups.update, parameter[name[comment_groups]]]
call[name[self]._content_types.update, parameter[name[content_types]]]
for taget[tuple[[<ast.Name object at 0x7da1b1648790>, <ast.Name object at 0x7da1b164b730>]]] in starred[call[name[content_types].iteritems, parameter[]]] begin[:]
if <ast.UnaryOp object at 0x7da1b164bc10> begin[:]
<ast.Raise object at 0x7da1b16493f0>
for taget[name[pattern]] in starred[name[patterns]] begin[:]
variable[first_character] assign[=] call[name[pattern]][constant[0]]
variable[last_character] assign[=] call[name[pattern]][<ast.UnaryOp object at 0x7da1b1649ff0>]
if compare[name[first_character] equal[==] constant[.]] begin[:]
variable[pattern] assign[=] call[name[extension_case_transform_func], parameter[name[pattern]]]
call[name[self]._extension_map][name[pattern]] assign[=] name[content_type] | keyword[def] identifier[add_config] ( identifier[self] , identifier[config] , identifier[config_filename] ):
literal[string]
identifier[content_types] = identifier[config] [ literal[string] ]
identifier[comment_groups] = identifier[config] [ literal[string] ]
identifier[self] . identifier[_comment_groups] . identifier[update] ( identifier[comment_groups] )
identifier[self] . identifier[_content_types] . identifier[update] ( identifier[content_types] )
keyword[for] identifier[content_type] , identifier[patterns] keyword[in] identifier[content_types] . identifier[iteritems] ():
keyword[if] keyword[not] identifier[patterns] :
keyword[raise] identifier[ValueError] ( literal[string] %( identifier[config_file] , identifier[content_type] ))
keyword[for] identifier[pattern] keyword[in] identifier[patterns] :
identifier[first_character] = identifier[pattern] [ literal[int] ]
identifier[last_character] = identifier[pattern] [- literal[int] ]
keyword[if] identifier[first_character] == literal[string] :
identifier[pattern] = identifier[extension_case_transform_func] ( identifier[pattern] )
identifier[self] . identifier[_extension_map] [ identifier[pattern] ]= identifier[content_type]
keyword[elif] identifier[first_character] == literal[string] keyword[and] identifier[last_character] == literal[string] :
identifier[self] . identifier[_regexp_map] [ identifier[re] . identifier[compile] ( identifier[pattern] [ literal[int] :- literal[int] ])]= identifier[content_type]
keyword[else] :
identifier[self] . identifier[_filename_map] [ identifier[pattern] ]= identifier[content_type] | def add_config(self, config, config_filename):
"""
Updates the content types database with the given configuration.
:param config:
The configuration dictionary.
:param config_filename:
The path of the configuration file.
"""
content_types = config['content-types']
comment_groups = config['comment-groups']
self._comment_groups.update(comment_groups)
self._content_types.update(content_types)
for (content_type, patterns) in content_types.iteritems():
if not patterns:
raise ValueError('error: config parse error: %s: Missing pattern for content type - `%s`"' % (config_file, content_type)) # depends on [control=['if'], data=[]]
for pattern in patterns:
first_character = pattern[0]
last_character = pattern[-1]
if first_character == '.':
# Extension map.
pattern = extension_case_transform_func(pattern)
self._extension_map[pattern] = content_type # depends on [control=['if'], data=[]]
elif first_character == '/' and last_character == '/':
# Regular expression map.
self._regexp_map[re.compile(pattern[1:-1])] = content_type # depends on [control=['if'], data=[]]
else:
# Filename map.
self._filename_map[pattern] = content_type # depends on [control=['for'], data=['pattern']] # depends on [control=['for'], data=[]] |
def plot_eigs(self, colorbar=True, cb_orientation='vertical',
tick_interval=[60, 60], minor_tick_interval=[20, 20],
xlabel='Longitude', ylabel='Latitude',
axes_labelsize=9, tick_labelsize=8, show=True, fname=None,
**kwargs):
"""
Plot the three eigenvalues of the tensor.
Usage
-----
x.plot_eigs([tick_interval, minor_tick_interval, xlabel, ylabel,
colorbar, cb_orientation, cb_label, axes_labelsize,
tick_labelsize, show, fname, **kwargs])
Parameters
----------
tick_interval : list or tuple, optional, default = [60, 60]
Intervals to use when plotting the major x and y ticks. If set to
None, major ticks will not be plotted.
minor_tick_interval : list or tuple, optional, default = [20, 20]
Intervals to use when plotting the minor x and y ticks. If set to
None, minor ticks will not be plotted.
xlabel : str, optional, default = 'Longitude'
Label for the longitude axis.
ylabel : str, optional, default = 'Latitude'
Label for the latitude axis.
colorbar : bool, optional, default = True
If True, plot a colorbar.
cb_orientation : str, optional, default = 'vertical'
Orientation of the colorbar: either 'vertical' or 'horizontal'.
cb_label : str, optional, default = None
Text label for the colorbar.
axes_labelsize : int, optional, default = 9
The font size for the x and y axes labels.
tick_labelsize : int, optional, default = 8
The font size for the x and y tick labels.
show : bool, optional, default = True
If True, plot the image to the screen.
fname : str, optional, default = None
If present, and if axes is not specified, save the image to the
specified file.
kwargs : optional
Keyword arguements that will be sent to the SHGrid.plot()
and plt.imshow() methods.
"""
if colorbar is True:
if cb_orientation == 'horizontal':
scale = 2.3
else:
scale = 1.4
else:
scale = 1.65
figsize = (_mpl.rcParams['figure.figsize'][0],
_mpl.rcParams['figure.figsize'][0] * scale)
fig, ax = _plt.subplots(3, 1, figsize=figsize)
self.plot_eig1(colorbar=colorbar, cb_orientation=cb_orientation,
ax=ax.flat[0], xlabel=xlabel, ylabel=ylabel,
tick_interval=tick_interval,
axes_labelsize=axes_labelsize,
tick_labelsize=tick_labelsize,
minor_tick_interval=minor_tick_interval,
**kwargs)
self.plot_eig2(colorbar=colorbar, cb_orientation=cb_orientation,
ax=ax.flat[1], xlabel=xlabel, ylabel=ylabel,
tick_interval=tick_interval,
axes_labelsize=axes_labelsize,
tick_labelsize=tick_labelsize,
minor_tick_interval=minor_tick_interval,
**kwargs)
self.plot_eig3(colorbar=colorbar, cb_orientation=cb_orientation,
ax=ax.flat[2], xlabel=xlabel, ylabel=ylabel,
tick_interval=tick_interval,
axes_labelsize=axes_labelsize,
tick_labelsize=tick_labelsize,
minor_tick_interval=minor_tick_interval,
**kwargs)
fig.tight_layout(pad=0.5)
if show:
fig.show()
if fname is not None:
fig.savefig(fname)
return fig, ax | def function[plot_eigs, parameter[self, colorbar, cb_orientation, tick_interval, minor_tick_interval, xlabel, ylabel, axes_labelsize, tick_labelsize, show, fname]]:
constant[
Plot the three eigenvalues of the tensor.
Usage
-----
x.plot_eigs([tick_interval, minor_tick_interval, xlabel, ylabel,
colorbar, cb_orientation, cb_label, axes_labelsize,
tick_labelsize, show, fname, **kwargs])
Parameters
----------
tick_interval : list or tuple, optional, default = [60, 60]
Intervals to use when plotting the major x and y ticks. If set to
None, major ticks will not be plotted.
minor_tick_interval : list or tuple, optional, default = [20, 20]
Intervals to use when plotting the minor x and y ticks. If set to
None, minor ticks will not be plotted.
xlabel : str, optional, default = 'Longitude'
Label for the longitude axis.
ylabel : str, optional, default = 'Latitude'
Label for the latitude axis.
colorbar : bool, optional, default = True
If True, plot a colorbar.
cb_orientation : str, optional, default = 'vertical'
Orientation of the colorbar: either 'vertical' or 'horizontal'.
cb_label : str, optional, default = None
Text label for the colorbar.
axes_labelsize : int, optional, default = 9
The font size for the x and y axes labels.
tick_labelsize : int, optional, default = 8
The font size for the x and y tick labels.
show : bool, optional, default = True
If True, plot the image to the screen.
fname : str, optional, default = None
If present, and if axes is not specified, save the image to the
specified file.
kwargs : optional
Keyword arguements that will be sent to the SHGrid.plot()
and plt.imshow() methods.
]
if compare[name[colorbar] is constant[True]] begin[:]
if compare[name[cb_orientation] equal[==] constant[horizontal]] begin[:]
variable[scale] assign[=] constant[2.3]
variable[figsize] assign[=] tuple[[<ast.Subscript object at 0x7da20c6a9900>, <ast.BinOp object at 0x7da20c6a9c30>]]
<ast.Tuple object at 0x7da20c6a8d60> assign[=] call[name[_plt].subplots, parameter[constant[3], constant[1]]]
call[name[self].plot_eig1, parameter[]]
call[name[self].plot_eig2, parameter[]]
call[name[self].plot_eig3, parameter[]]
call[name[fig].tight_layout, parameter[]]
if name[show] begin[:]
call[name[fig].show, parameter[]]
if compare[name[fname] is_not constant[None]] begin[:]
call[name[fig].savefig, parameter[name[fname]]]
return[tuple[[<ast.Name object at 0x7da18bcc9240>, <ast.Name object at 0x7da18bcc8100>]]] | keyword[def] identifier[plot_eigs] ( identifier[self] , identifier[colorbar] = keyword[True] , identifier[cb_orientation] = literal[string] ,
identifier[tick_interval] =[ literal[int] , literal[int] ], identifier[minor_tick_interval] =[ literal[int] , literal[int] ],
identifier[xlabel] = literal[string] , identifier[ylabel] = literal[string] ,
identifier[axes_labelsize] = literal[int] , identifier[tick_labelsize] = literal[int] , identifier[show] = keyword[True] , identifier[fname] = keyword[None] ,
** identifier[kwargs] ):
literal[string]
keyword[if] identifier[colorbar] keyword[is] keyword[True] :
keyword[if] identifier[cb_orientation] == literal[string] :
identifier[scale] = literal[int]
keyword[else] :
identifier[scale] = literal[int]
keyword[else] :
identifier[scale] = literal[int]
identifier[figsize] =( identifier[_mpl] . identifier[rcParams] [ literal[string] ][ literal[int] ],
identifier[_mpl] . identifier[rcParams] [ literal[string] ][ literal[int] ]* identifier[scale] )
identifier[fig] , identifier[ax] = identifier[_plt] . identifier[subplots] ( literal[int] , literal[int] , identifier[figsize] = identifier[figsize] )
identifier[self] . identifier[plot_eig1] ( identifier[colorbar] = identifier[colorbar] , identifier[cb_orientation] = identifier[cb_orientation] ,
identifier[ax] = identifier[ax] . identifier[flat] [ literal[int] ], identifier[xlabel] = identifier[xlabel] , identifier[ylabel] = identifier[ylabel] ,
identifier[tick_interval] = identifier[tick_interval] ,
identifier[axes_labelsize] = identifier[axes_labelsize] ,
identifier[tick_labelsize] = identifier[tick_labelsize] ,
identifier[minor_tick_interval] = identifier[minor_tick_interval] ,
** identifier[kwargs] )
identifier[self] . identifier[plot_eig2] ( identifier[colorbar] = identifier[colorbar] , identifier[cb_orientation] = identifier[cb_orientation] ,
identifier[ax] = identifier[ax] . identifier[flat] [ literal[int] ], identifier[xlabel] = identifier[xlabel] , identifier[ylabel] = identifier[ylabel] ,
identifier[tick_interval] = identifier[tick_interval] ,
identifier[axes_labelsize] = identifier[axes_labelsize] ,
identifier[tick_labelsize] = identifier[tick_labelsize] ,
identifier[minor_tick_interval] = identifier[minor_tick_interval] ,
** identifier[kwargs] )
identifier[self] . identifier[plot_eig3] ( identifier[colorbar] = identifier[colorbar] , identifier[cb_orientation] = identifier[cb_orientation] ,
identifier[ax] = identifier[ax] . identifier[flat] [ literal[int] ], identifier[xlabel] = identifier[xlabel] , identifier[ylabel] = identifier[ylabel] ,
identifier[tick_interval] = identifier[tick_interval] ,
identifier[axes_labelsize] = identifier[axes_labelsize] ,
identifier[tick_labelsize] = identifier[tick_labelsize] ,
identifier[minor_tick_interval] = identifier[minor_tick_interval] ,
** identifier[kwargs] )
identifier[fig] . identifier[tight_layout] ( identifier[pad] = literal[int] )
keyword[if] identifier[show] :
identifier[fig] . identifier[show] ()
keyword[if] identifier[fname] keyword[is] keyword[not] keyword[None] :
identifier[fig] . identifier[savefig] ( identifier[fname] )
keyword[return] identifier[fig] , identifier[ax] | def plot_eigs(self, colorbar=True, cb_orientation='vertical', tick_interval=[60, 60], minor_tick_interval=[20, 20], xlabel='Longitude', ylabel='Latitude', axes_labelsize=9, tick_labelsize=8, show=True, fname=None, **kwargs):
"""
Plot the three eigenvalues of the tensor.
Usage
-----
x.plot_eigs([tick_interval, minor_tick_interval, xlabel, ylabel,
colorbar, cb_orientation, cb_label, axes_labelsize,
tick_labelsize, show, fname, **kwargs])
Parameters
----------
tick_interval : list or tuple, optional, default = [60, 60]
Intervals to use when plotting the major x and y ticks. If set to
None, major ticks will not be plotted.
minor_tick_interval : list or tuple, optional, default = [20, 20]
Intervals to use when plotting the minor x and y ticks. If set to
None, minor ticks will not be plotted.
xlabel : str, optional, default = 'Longitude'
Label for the longitude axis.
ylabel : str, optional, default = 'Latitude'
Label for the latitude axis.
colorbar : bool, optional, default = True
If True, plot a colorbar.
cb_orientation : str, optional, default = 'vertical'
Orientation of the colorbar: either 'vertical' or 'horizontal'.
cb_label : str, optional, default = None
Text label for the colorbar.
axes_labelsize : int, optional, default = 9
The font size for the x and y axes labels.
tick_labelsize : int, optional, default = 8
The font size for the x and y tick labels.
show : bool, optional, default = True
If True, plot the image to the screen.
fname : str, optional, default = None
If present, and if axes is not specified, save the image to the
specified file.
kwargs : optional
Keyword arguements that will be sent to the SHGrid.plot()
and plt.imshow() methods.
"""
if colorbar is True:
if cb_orientation == 'horizontal':
scale = 2.3 # depends on [control=['if'], data=[]]
else:
scale = 1.4 # depends on [control=['if'], data=[]]
else:
scale = 1.65
figsize = (_mpl.rcParams['figure.figsize'][0], _mpl.rcParams['figure.figsize'][0] * scale)
(fig, ax) = _plt.subplots(3, 1, figsize=figsize)
self.plot_eig1(colorbar=colorbar, cb_orientation=cb_orientation, ax=ax.flat[0], xlabel=xlabel, ylabel=ylabel, tick_interval=tick_interval, axes_labelsize=axes_labelsize, tick_labelsize=tick_labelsize, minor_tick_interval=minor_tick_interval, **kwargs)
self.plot_eig2(colorbar=colorbar, cb_orientation=cb_orientation, ax=ax.flat[1], xlabel=xlabel, ylabel=ylabel, tick_interval=tick_interval, axes_labelsize=axes_labelsize, tick_labelsize=tick_labelsize, minor_tick_interval=minor_tick_interval, **kwargs)
self.plot_eig3(colorbar=colorbar, cb_orientation=cb_orientation, ax=ax.flat[2], xlabel=xlabel, ylabel=ylabel, tick_interval=tick_interval, axes_labelsize=axes_labelsize, tick_labelsize=tick_labelsize, minor_tick_interval=minor_tick_interval, **kwargs)
fig.tight_layout(pad=0.5)
if show:
fig.show() # depends on [control=['if'], data=[]]
if fname is not None:
fig.savefig(fname) # depends on [control=['if'], data=['fname']]
return (fig, ax) |
def xcorr_plot(template, image, shift=None, cc=None, cc_vec=None, **kwargs):
"""
Plot a template overlying an image aligned by correlation.
:type template: numpy.ndarray
:param template: Short template image
:type image: numpy.ndarray
:param image: Long master image
:type shift: int
:param shift: Shift to apply to template relative to image, in samples
:type cc: float
:param cc: Cross-correlation at shift
:type cc_vec: numpy.ndarray
:param cc_vec: Cross-correlation vector.
:type save: bool
:param save: Whether to save the plot or not.
:type savefile: str
:param savefile: File name to save to
:returns: :class:`matplotlib.figure.Figure`
.. rubric:: Example
>>> from obspy import read
>>> from eqcorrscan.utils.plotting import xcorr_plot
>>> from eqcorrscan.utils.stacking import align_traces
>>> st = read().detrend('simple').filter('bandpass', freqmin=2, freqmax=15)
>>> shifts, ccs = align_traces([st[0], st[1]], 40)
>>> shift = shifts[1] * st[1].stats.sampling_rate
>>> cc = ccs[1]
>>> xcorr_plot(template=st[1].data, image=st[0].data, shift=shift,
... cc=cc) # doctest: +SKIP
.. image:: ../../plots/xcorr_plot.png
"""
import matplotlib.pyplot as plt
if cc is None or shift is None:
if not isinstance(cc_vec, np.ndarray):
print('Given cc: %s and shift: %s' % (cc, shift))
raise IOError('Must provide either cc_vec, or cc and shift')
shift = np.abs(cc_vec).argmax()
cc = cc_vec[shift]
x = np.arange(len(image))
plt.plot(x, image / abs(image).max(), 'k', lw=1.3, label='Image')
x = np.arange(len(template)) + shift
plt.plot(x, template / abs(template).max(), 'r', lw=1.1, label='Template')
plt.title('Shift=%s, Correlation=%s' % (shift, cc))
fig = plt.gcf()
fig = _finalise_figure(fig=fig, **kwargs) # pragma: no cover
return fig | def function[xcorr_plot, parameter[template, image, shift, cc, cc_vec]]:
constant[
Plot a template overlying an image aligned by correlation.
:type template: numpy.ndarray
:param template: Short template image
:type image: numpy.ndarray
:param image: Long master image
:type shift: int
:param shift: Shift to apply to template relative to image, in samples
:type cc: float
:param cc: Cross-correlation at shift
:type cc_vec: numpy.ndarray
:param cc_vec: Cross-correlation vector.
:type save: bool
:param save: Whether to save the plot or not.
:type savefile: str
:param savefile: File name to save to
:returns: :class:`matplotlib.figure.Figure`
.. rubric:: Example
>>> from obspy import read
>>> from eqcorrscan.utils.plotting import xcorr_plot
>>> from eqcorrscan.utils.stacking import align_traces
>>> st = read().detrend('simple').filter('bandpass', freqmin=2, freqmax=15)
>>> shifts, ccs = align_traces([st[0], st[1]], 40)
>>> shift = shifts[1] * st[1].stats.sampling_rate
>>> cc = ccs[1]
>>> xcorr_plot(template=st[1].data, image=st[0].data, shift=shift,
... cc=cc) # doctest: +SKIP
.. image:: ../../plots/xcorr_plot.png
]
import module[matplotlib.pyplot] as alias[plt]
if <ast.BoolOp object at 0x7da18f09cf40> begin[:]
if <ast.UnaryOp object at 0x7da18f09d450> begin[:]
call[name[print], parameter[binary_operation[constant[Given cc: %s and shift: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18f09d420>, <ast.Name object at 0x7da18f09ed10>]]]]]
<ast.Raise object at 0x7da18f09e920>
variable[shift] assign[=] call[call[name[np].abs, parameter[name[cc_vec]]].argmax, parameter[]]
variable[cc] assign[=] call[name[cc_vec]][name[shift]]
variable[x] assign[=] call[name[np].arange, parameter[call[name[len], parameter[name[image]]]]]
call[name[plt].plot, parameter[name[x], binary_operation[name[image] / call[call[name[abs], parameter[name[image]]].max, parameter[]]], constant[k]]]
variable[x] assign[=] binary_operation[call[name[np].arange, parameter[call[name[len], parameter[name[template]]]]] + name[shift]]
call[name[plt].plot, parameter[name[x], binary_operation[name[template] / call[call[name[abs], parameter[name[template]]].max, parameter[]]], constant[r]]]
call[name[plt].title, parameter[binary_operation[constant[Shift=%s, Correlation=%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18eb56350>, <ast.Name object at 0x7da18eb559c0>]]]]]
variable[fig] assign[=] call[name[plt].gcf, parameter[]]
variable[fig] assign[=] call[name[_finalise_figure], parameter[]]
return[name[fig]] | keyword[def] identifier[xcorr_plot] ( identifier[template] , identifier[image] , identifier[shift] = keyword[None] , identifier[cc] = keyword[None] , identifier[cc_vec] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[import] identifier[matplotlib] . identifier[pyplot] keyword[as] identifier[plt]
keyword[if] identifier[cc] keyword[is] keyword[None] keyword[or] identifier[shift] keyword[is] keyword[None] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[cc_vec] , identifier[np] . identifier[ndarray] ):
identifier[print] ( literal[string] %( identifier[cc] , identifier[shift] ))
keyword[raise] identifier[IOError] ( literal[string] )
identifier[shift] = identifier[np] . identifier[abs] ( identifier[cc_vec] ). identifier[argmax] ()
identifier[cc] = identifier[cc_vec] [ identifier[shift] ]
identifier[x] = identifier[np] . identifier[arange] ( identifier[len] ( identifier[image] ))
identifier[plt] . identifier[plot] ( identifier[x] , identifier[image] / identifier[abs] ( identifier[image] ). identifier[max] (), literal[string] , identifier[lw] = literal[int] , identifier[label] = literal[string] )
identifier[x] = identifier[np] . identifier[arange] ( identifier[len] ( identifier[template] ))+ identifier[shift]
identifier[plt] . identifier[plot] ( identifier[x] , identifier[template] / identifier[abs] ( identifier[template] ). identifier[max] (), literal[string] , identifier[lw] = literal[int] , identifier[label] = literal[string] )
identifier[plt] . identifier[title] ( literal[string] %( identifier[shift] , identifier[cc] ))
identifier[fig] = identifier[plt] . identifier[gcf] ()
identifier[fig] = identifier[_finalise_figure] ( identifier[fig] = identifier[fig] ,** identifier[kwargs] )
keyword[return] identifier[fig] | def xcorr_plot(template, image, shift=None, cc=None, cc_vec=None, **kwargs):
"""
Plot a template overlying an image aligned by correlation.
:type template: numpy.ndarray
:param template: Short template image
:type image: numpy.ndarray
:param image: Long master image
:type shift: int
:param shift: Shift to apply to template relative to image, in samples
:type cc: float
:param cc: Cross-correlation at shift
:type cc_vec: numpy.ndarray
:param cc_vec: Cross-correlation vector.
:type save: bool
:param save: Whether to save the plot or not.
:type savefile: str
:param savefile: File name to save to
:returns: :class:`matplotlib.figure.Figure`
.. rubric:: Example
>>> from obspy import read
>>> from eqcorrscan.utils.plotting import xcorr_plot
>>> from eqcorrscan.utils.stacking import align_traces
>>> st = read().detrend('simple').filter('bandpass', freqmin=2, freqmax=15)
>>> shifts, ccs = align_traces([st[0], st[1]], 40)
>>> shift = shifts[1] * st[1].stats.sampling_rate
>>> cc = ccs[1]
>>> xcorr_plot(template=st[1].data, image=st[0].data, shift=shift,
... cc=cc) # doctest: +SKIP
.. image:: ../../plots/xcorr_plot.png
"""
import matplotlib.pyplot as plt
if cc is None or shift is None:
if not isinstance(cc_vec, np.ndarray):
print('Given cc: %s and shift: %s' % (cc, shift))
raise IOError('Must provide either cc_vec, or cc and shift') # depends on [control=['if'], data=[]]
shift = np.abs(cc_vec).argmax()
cc = cc_vec[shift] # depends on [control=['if'], data=[]]
x = np.arange(len(image))
plt.plot(x, image / abs(image).max(), 'k', lw=1.3, label='Image')
x = np.arange(len(template)) + shift
plt.plot(x, template / abs(template).max(), 'r', lw=1.1, label='Template')
plt.title('Shift=%s, Correlation=%s' % (shift, cc))
fig = plt.gcf()
fig = _finalise_figure(fig=fig, **kwargs) # pragma: no cover
return fig |
def select(self, selections):
'''Make a selection in this
representation. BallAndStickRenderer support selections of
atoms and bonds.
To select the first atom and the first bond you can use the
following code::
from chemlab.mviewer.state import Selection
representation.select({'atoms': Selection([0], system.n_atoms),
'bonds': Selection([0], system.n_bonds)})
Returns the current Selection
'''
if 'atoms' in selections:
self.selection_state['atoms'] = selections['atoms']
self.on_atom_selection_changed()
if 'bonds' in selections:
self.selection_state['bonds'] = selections['bonds']
self.on_bond_selection_changed()
if 'box' in selections:
self.selection_state['box'] = selections['box']
return self.selection_state | def function[select, parameter[self, selections]]:
constant[Make a selection in this
representation. BallAndStickRenderer support selections of
atoms and bonds.
To select the first atom and the first bond you can use the
following code::
from chemlab.mviewer.state import Selection
representation.select({'atoms': Selection([0], system.n_atoms),
'bonds': Selection([0], system.n_bonds)})
Returns the current Selection
]
if compare[constant[atoms] in name[selections]] begin[:]
call[name[self].selection_state][constant[atoms]] assign[=] call[name[selections]][constant[atoms]]
call[name[self].on_atom_selection_changed, parameter[]]
if compare[constant[bonds] in name[selections]] begin[:]
call[name[self].selection_state][constant[bonds]] assign[=] call[name[selections]][constant[bonds]]
call[name[self].on_bond_selection_changed, parameter[]]
if compare[constant[box] in name[selections]] begin[:]
call[name[self].selection_state][constant[box]] assign[=] call[name[selections]][constant[box]]
return[name[self].selection_state] | keyword[def] identifier[select] ( identifier[self] , identifier[selections] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[selections] :
identifier[self] . identifier[selection_state] [ literal[string] ]= identifier[selections] [ literal[string] ]
identifier[self] . identifier[on_atom_selection_changed] ()
keyword[if] literal[string] keyword[in] identifier[selections] :
identifier[self] . identifier[selection_state] [ literal[string] ]= identifier[selections] [ literal[string] ]
identifier[self] . identifier[on_bond_selection_changed] ()
keyword[if] literal[string] keyword[in] identifier[selections] :
identifier[self] . identifier[selection_state] [ literal[string] ]= identifier[selections] [ literal[string] ]
keyword[return] identifier[self] . identifier[selection_state] | def select(self, selections):
"""Make a selection in this
representation. BallAndStickRenderer support selections of
atoms and bonds.
To select the first atom and the first bond you can use the
following code::
from chemlab.mviewer.state import Selection
representation.select({'atoms': Selection([0], system.n_atoms),
'bonds': Selection([0], system.n_bonds)})
Returns the current Selection
"""
if 'atoms' in selections:
self.selection_state['atoms'] = selections['atoms']
self.on_atom_selection_changed() # depends on [control=['if'], data=['selections']]
if 'bonds' in selections:
self.selection_state['bonds'] = selections['bonds']
self.on_bond_selection_changed() # depends on [control=['if'], data=['selections']]
if 'box' in selections:
self.selection_state['box'] = selections['box'] # depends on [control=['if'], data=['selections']]
return self.selection_state |
def safe_load(string):
"""
Parse the provided string returns a dict.
:param string: A string to be parsed.
:return: dict
"""
try:
return yaml.safe_load(string) or {}
except yaml.scanner.ScannerError as e:
sysexit_with_message(str(e)) | def function[safe_load, parameter[string]]:
constant[
Parse the provided string returns a dict.
:param string: A string to be parsed.
:return: dict
]
<ast.Try object at 0x7da20cabcd00> | keyword[def] identifier[safe_load] ( identifier[string] ):
literal[string]
keyword[try] :
keyword[return] identifier[yaml] . identifier[safe_load] ( identifier[string] ) keyword[or] {}
keyword[except] identifier[yaml] . identifier[scanner] . identifier[ScannerError] keyword[as] identifier[e] :
identifier[sysexit_with_message] ( identifier[str] ( identifier[e] )) | def safe_load(string):
"""
Parse the provided string returns a dict.
:param string: A string to be parsed.
:return: dict
"""
try:
return yaml.safe_load(string) or {} # depends on [control=['try'], data=[]]
except yaml.scanner.ScannerError as e:
sysexit_with_message(str(e)) # depends on [control=['except'], data=['e']] |
def add_field_to_work_item_type(self, field, process_id, wit_ref_name):
"""AddFieldToWorkItemType.
[Preview API] Adds a field to a work item type.
:param :class:`<AddProcessWorkItemTypeFieldRequest> <azure.devops.v5_0.work_item_tracking_process.models.AddProcessWorkItemTypeFieldRequest>` field:
:param str process_id: The ID of the process.
:param str wit_ref_name: The reference name of the work item type.
:rtype: :class:`<ProcessWorkItemTypeField> <azure.devops.v5_0.work_item_tracking_process.models.ProcessWorkItemTypeField>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str')
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str')
content = self._serialize.body(field, 'AddProcessWorkItemTypeFieldRequest')
response = self._send(http_method='POST',
location_id='bc0ad8dc-e3f3-46b0-b06c-5bf861793196',
version='5.0-preview.2',
route_values=route_values,
content=content)
return self._deserialize('ProcessWorkItemTypeField', response) | def function[add_field_to_work_item_type, parameter[self, field, process_id, wit_ref_name]]:
constant[AddFieldToWorkItemType.
[Preview API] Adds a field to a work item type.
:param :class:`<AddProcessWorkItemTypeFieldRequest> <azure.devops.v5_0.work_item_tracking_process.models.AddProcessWorkItemTypeFieldRequest>` field:
:param str process_id: The ID of the process.
:param str wit_ref_name: The reference name of the work item type.
:rtype: :class:`<ProcessWorkItemTypeField> <azure.devops.v5_0.work_item_tracking_process.models.ProcessWorkItemTypeField>`
]
variable[route_values] assign[=] dictionary[[], []]
if compare[name[process_id] is_not constant[None]] begin[:]
call[name[route_values]][constant[processId]] assign[=] call[name[self]._serialize.url, parameter[constant[process_id], name[process_id], constant[str]]]
if compare[name[wit_ref_name] is_not constant[None]] begin[:]
call[name[route_values]][constant[witRefName]] assign[=] call[name[self]._serialize.url, parameter[constant[wit_ref_name], name[wit_ref_name], constant[str]]]
variable[content] assign[=] call[name[self]._serialize.body, parameter[name[field], constant[AddProcessWorkItemTypeFieldRequest]]]
variable[response] assign[=] call[name[self]._send, parameter[]]
return[call[name[self]._deserialize, parameter[constant[ProcessWorkItemTypeField], name[response]]]] | keyword[def] identifier[add_field_to_work_item_type] ( identifier[self] , identifier[field] , identifier[process_id] , identifier[wit_ref_name] ):
literal[string]
identifier[route_values] ={}
keyword[if] identifier[process_id] keyword[is] keyword[not] keyword[None] :
identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[process_id] , literal[string] )
keyword[if] identifier[wit_ref_name] keyword[is] keyword[not] keyword[None] :
identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[wit_ref_name] , literal[string] )
identifier[content] = identifier[self] . identifier[_serialize] . identifier[body] ( identifier[field] , literal[string] )
identifier[response] = identifier[self] . identifier[_send] ( identifier[http_method] = literal[string] ,
identifier[location_id] = literal[string] ,
identifier[version] = literal[string] ,
identifier[route_values] = identifier[route_values] ,
identifier[content] = identifier[content] )
keyword[return] identifier[self] . identifier[_deserialize] ( literal[string] , identifier[response] ) | def add_field_to_work_item_type(self, field, process_id, wit_ref_name):
"""AddFieldToWorkItemType.
[Preview API] Adds a field to a work item type.
:param :class:`<AddProcessWorkItemTypeFieldRequest> <azure.devops.v5_0.work_item_tracking_process.models.AddProcessWorkItemTypeFieldRequest>` field:
:param str process_id: The ID of the process.
:param str wit_ref_name: The reference name of the work item type.
:rtype: :class:`<ProcessWorkItemTypeField> <azure.devops.v5_0.work_item_tracking_process.models.ProcessWorkItemTypeField>`
"""
route_values = {}
if process_id is not None:
route_values['processId'] = self._serialize.url('process_id', process_id, 'str') # depends on [control=['if'], data=['process_id']]
if wit_ref_name is not None:
route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str') # depends on [control=['if'], data=['wit_ref_name']]
content = self._serialize.body(field, 'AddProcessWorkItemTypeFieldRequest')
response = self._send(http_method='POST', location_id='bc0ad8dc-e3f3-46b0-b06c-5bf861793196', version='5.0-preview.2', route_values=route_values, content=content)
return self._deserialize('ProcessWorkItemTypeField', response) |
def get_actions(self, request):
"""
Define actions by user's permissions.
"""
actions = super(EntryAdmin, self).get_actions(request)
if not actions:
return actions
if (not request.user.has_perm('zinnia.can_change_author') or
not request.user.has_perm('zinnia.can_view_all')):
del actions['make_mine']
if not request.user.has_perm('zinnia.can_change_status'):
del actions['make_hidden']
del actions['make_published']
if not settings.PING_DIRECTORIES:
del actions['ping_directories']
return actions | def function[get_actions, parameter[self, request]]:
constant[
Define actions by user's permissions.
]
variable[actions] assign[=] call[call[name[super], parameter[name[EntryAdmin], name[self]]].get_actions, parameter[name[request]]]
if <ast.UnaryOp object at 0x7da1b1ddd330> begin[:]
return[name[actions]]
if <ast.BoolOp object at 0x7da1b1dddc30> begin[:]
<ast.Delete object at 0x7da1b1ddd7b0>
if <ast.UnaryOp object at 0x7da1b1ddcfd0> begin[:]
<ast.Delete object at 0x7da1b1ddd480>
<ast.Delete object at 0x7da1b1ddea70>
if <ast.UnaryOp object at 0x7da1b1ddd570> begin[:]
<ast.Delete object at 0x7da1b1ddecb0>
return[name[actions]] | keyword[def] identifier[get_actions] ( identifier[self] , identifier[request] ):
literal[string]
identifier[actions] = identifier[super] ( identifier[EntryAdmin] , identifier[self] ). identifier[get_actions] ( identifier[request] )
keyword[if] keyword[not] identifier[actions] :
keyword[return] identifier[actions]
keyword[if] ( keyword[not] identifier[request] . identifier[user] . identifier[has_perm] ( literal[string] ) keyword[or]
keyword[not] identifier[request] . identifier[user] . identifier[has_perm] ( literal[string] )):
keyword[del] identifier[actions] [ literal[string] ]
keyword[if] keyword[not] identifier[request] . identifier[user] . identifier[has_perm] ( literal[string] ):
keyword[del] identifier[actions] [ literal[string] ]
keyword[del] identifier[actions] [ literal[string] ]
keyword[if] keyword[not] identifier[settings] . identifier[PING_DIRECTORIES] :
keyword[del] identifier[actions] [ literal[string] ]
keyword[return] identifier[actions] | def get_actions(self, request):
"""
Define actions by user's permissions.
"""
actions = super(EntryAdmin, self).get_actions(request)
if not actions:
return actions # depends on [control=['if'], data=[]]
if not request.user.has_perm('zinnia.can_change_author') or not request.user.has_perm('zinnia.can_view_all'):
del actions['make_mine'] # depends on [control=['if'], data=[]]
if not request.user.has_perm('zinnia.can_change_status'):
del actions['make_hidden']
del actions['make_published'] # depends on [control=['if'], data=[]]
if not settings.PING_DIRECTORIES:
del actions['ping_directories'] # depends on [control=['if'], data=[]]
return actions |
def split_at(it, split_value):
"""Splits an iterator C{it} at values of C{split_value}.
Each instance of C{split_value} is swallowed. The iterator produces
subiterators which need to be consumed fully before the next subiterator
can be used.
"""
def _chunk_iterator(first):
v = first
while v != split_value:
yield v
v = next(it)
while True:
yield _chunk_iterator(next(it)) | def function[split_at, parameter[it, split_value]]:
constant[Splits an iterator C{it} at values of C{split_value}.
Each instance of C{split_value} is swallowed. The iterator produces
subiterators which need to be consumed fully before the next subiterator
can be used.
]
def function[_chunk_iterator, parameter[first]]:
variable[v] assign[=] name[first]
while compare[name[v] not_equal[!=] name[split_value]] begin[:]
<ast.Yield object at 0x7da1b059fb80>
variable[v] assign[=] call[name[next], parameter[name[it]]]
while constant[True] begin[:]
<ast.Yield object at 0x7da1b059ec20> | keyword[def] identifier[split_at] ( identifier[it] , identifier[split_value] ):
literal[string]
keyword[def] identifier[_chunk_iterator] ( identifier[first] ):
identifier[v] = identifier[first]
keyword[while] identifier[v] != identifier[split_value] :
keyword[yield] identifier[v]
identifier[v] = identifier[next] ( identifier[it] )
keyword[while] keyword[True] :
keyword[yield] identifier[_chunk_iterator] ( identifier[next] ( identifier[it] )) | def split_at(it, split_value):
"""Splits an iterator C{it} at values of C{split_value}.
Each instance of C{split_value} is swallowed. The iterator produces
subiterators which need to be consumed fully before the next subiterator
can be used.
"""
def _chunk_iterator(first):
v = first
while v != split_value:
yield v
v = next(it) # depends on [control=['while'], data=['v']]
while True:
yield _chunk_iterator(next(it)) # depends on [control=['while'], data=[]] |
async def respond(self, *args, **kwargs):
"""
Responds to the message (not as a reply). Shorthand for
`telethon.client.messages.MessageMethods.send_message`
with ``entity`` already set.
"""
return await self._client.send_message(
await self.get_input_chat(), *args, **kwargs) | <ast.AsyncFunctionDef object at 0x7da1b26acf70> | keyword[async] keyword[def] identifier[respond] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[return] keyword[await] identifier[self] . identifier[_client] . identifier[send_message] (
keyword[await] identifier[self] . identifier[get_input_chat] (),* identifier[args] ,** identifier[kwargs] ) | async def respond(self, *args, **kwargs):
"""
Responds to the message (not as a reply). Shorthand for
`telethon.client.messages.MessageMethods.send_message`
with ``entity`` already set.
"""
return await self._client.send_message(await self.get_input_chat(), *args, **kwargs) |
def docgraph2freqt(docgraph, root=None, include_pos=False,
escape_func=FREQT_ESCAPE_FUNC):
"""convert a docgraph into a FREQT string."""
if root is None:
return u"\n".join(
sentence2freqt(docgraph, sentence, include_pos=include_pos,
escape_func=escape_func)
for sentence in docgraph.sentences)
else:
return sentence2freqt(docgraph, root, include_pos=include_pos,
escape_func=escape_func) | def function[docgraph2freqt, parameter[docgraph, root, include_pos, escape_func]]:
constant[convert a docgraph into a FREQT string.]
if compare[name[root] is constant[None]] begin[:]
return[call[constant[
].join, parameter[<ast.GeneratorExp object at 0x7da1b26ad1e0>]]] | keyword[def] identifier[docgraph2freqt] ( identifier[docgraph] , identifier[root] = keyword[None] , identifier[include_pos] = keyword[False] ,
identifier[escape_func] = identifier[FREQT_ESCAPE_FUNC] ):
literal[string]
keyword[if] identifier[root] keyword[is] keyword[None] :
keyword[return] literal[string] . identifier[join] (
identifier[sentence2freqt] ( identifier[docgraph] , identifier[sentence] , identifier[include_pos] = identifier[include_pos] ,
identifier[escape_func] = identifier[escape_func] )
keyword[for] identifier[sentence] keyword[in] identifier[docgraph] . identifier[sentences] )
keyword[else] :
keyword[return] identifier[sentence2freqt] ( identifier[docgraph] , identifier[root] , identifier[include_pos] = identifier[include_pos] ,
identifier[escape_func] = identifier[escape_func] ) | def docgraph2freqt(docgraph, root=None, include_pos=False, escape_func=FREQT_ESCAPE_FUNC):
"""convert a docgraph into a FREQT string."""
if root is None:
return u'\n'.join((sentence2freqt(docgraph, sentence, include_pos=include_pos, escape_func=escape_func) for sentence in docgraph.sentences)) # depends on [control=['if'], data=[]]
else:
return sentence2freqt(docgraph, root, include_pos=include_pos, escape_func=escape_func) |
def generate_cylindrical_points(start, end, start_radius, end_radius,
linspace_count=_LINSPACE_COUNT):
'''Generate a 3d mesh of a cylinder with start and end points, and varying radius
Based on: http://stackoverflow.com/a/32383775
'''
v = end - start
length = norm(v)
v = v / length
n1, n2 = _get_normals(v)
# pylint: disable=unbalanced-tuple-unpacking
l, theta = np.meshgrid(np.linspace(0, length, linspace_count),
np.linspace(0, 2 * np.pi, linspace_count))
radii = np.linspace(start_radius, end_radius, linspace_count)
rsin = np.multiply(radii, np.sin(theta))
rcos = np.multiply(radii, np.cos(theta))
return np.array([start[i] +
v[i] * l +
n1[i] * rsin + n2[i] * rcos
for i in range(3)]) | def function[generate_cylindrical_points, parameter[start, end, start_radius, end_radius, linspace_count]]:
constant[Generate a 3d mesh of a cylinder with start and end points, and varying radius
Based on: http://stackoverflow.com/a/32383775
]
variable[v] assign[=] binary_operation[name[end] - name[start]]
variable[length] assign[=] call[name[norm], parameter[name[v]]]
variable[v] assign[=] binary_operation[name[v] / name[length]]
<ast.Tuple object at 0x7da2043473d0> assign[=] call[name[_get_normals], parameter[name[v]]]
<ast.Tuple object at 0x7da204347520> assign[=] call[name[np].meshgrid, parameter[call[name[np].linspace, parameter[constant[0], name[length], name[linspace_count]]], call[name[np].linspace, parameter[constant[0], binary_operation[constant[2] * name[np].pi], name[linspace_count]]]]]
variable[radii] assign[=] call[name[np].linspace, parameter[name[start_radius], name[end_radius], name[linspace_count]]]
variable[rsin] assign[=] call[name[np].multiply, parameter[name[radii], call[name[np].sin, parameter[name[theta]]]]]
variable[rcos] assign[=] call[name[np].multiply, parameter[name[radii], call[name[np].cos, parameter[name[theta]]]]]
return[call[name[np].array, parameter[<ast.ListComp object at 0x7da204346380>]]] | keyword[def] identifier[generate_cylindrical_points] ( identifier[start] , identifier[end] , identifier[start_radius] , identifier[end_radius] ,
identifier[linspace_count] = identifier[_LINSPACE_COUNT] ):
literal[string]
identifier[v] = identifier[end] - identifier[start]
identifier[length] = identifier[norm] ( identifier[v] )
identifier[v] = identifier[v] / identifier[length]
identifier[n1] , identifier[n2] = identifier[_get_normals] ( identifier[v] )
identifier[l] , identifier[theta] = identifier[np] . identifier[meshgrid] ( identifier[np] . identifier[linspace] ( literal[int] , identifier[length] , identifier[linspace_count] ),
identifier[np] . identifier[linspace] ( literal[int] , literal[int] * identifier[np] . identifier[pi] , identifier[linspace_count] ))
identifier[radii] = identifier[np] . identifier[linspace] ( identifier[start_radius] , identifier[end_radius] , identifier[linspace_count] )
identifier[rsin] = identifier[np] . identifier[multiply] ( identifier[radii] , identifier[np] . identifier[sin] ( identifier[theta] ))
identifier[rcos] = identifier[np] . identifier[multiply] ( identifier[radii] , identifier[np] . identifier[cos] ( identifier[theta] ))
keyword[return] identifier[np] . identifier[array] ([ identifier[start] [ identifier[i] ]+
identifier[v] [ identifier[i] ]* identifier[l] +
identifier[n1] [ identifier[i] ]* identifier[rsin] + identifier[n2] [ identifier[i] ]* identifier[rcos]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] )]) | def generate_cylindrical_points(start, end, start_radius, end_radius, linspace_count=_LINSPACE_COUNT):
"""Generate a 3d mesh of a cylinder with start and end points, and varying radius
Based on: http://stackoverflow.com/a/32383775
"""
v = end - start
length = norm(v)
v = v / length
(n1, n2) = _get_normals(v)
# pylint: disable=unbalanced-tuple-unpacking
(l, theta) = np.meshgrid(np.linspace(0, length, linspace_count), np.linspace(0, 2 * np.pi, linspace_count))
radii = np.linspace(start_radius, end_radius, linspace_count)
rsin = np.multiply(radii, np.sin(theta))
rcos = np.multiply(radii, np.cos(theta))
return np.array([start[i] + v[i] * l + n1[i] * rsin + n2[i] * rcos for i in range(3)]) |
def check_not(state, *tests, msg):
"""Run multiple subtests that should fail. If all subtests fail, returns original state (for chaining)
- This function is currently only tested in working with ``has_code()`` in the subtests.
- This function can be thought as a ``NOT(x OR y OR ...)`` statement, since all tests it runs must fail
- This function can be considered a direct counterpart of multi.
Args:
state: State instance describing student and solution code, can be omitted if used with Ex()
*tests: one or more sub-SCTs to run
msg: feedback message that is shown in case not all tests specified in ``*tests`` fail.
:Example:
Thh SCT below runs two has_code cases.. ::
Ex().check_not(
has_code('INNER'),
has_code('OUTER'),
incorrect_msg="Don't use `INNER` or `OUTER`!"
)
If students use ``INNER (JOIN)`` or ``OUTER (JOIN)`` in their code, this test will fail.
"""
for test in iter_tests(tests):
try:
test(state)
except TestFail:
# it fails, as expected, off to next one
continue
return state.report(Feedback(msg))
# return original state, so can be chained
return state | def function[check_not, parameter[state]]:
constant[Run multiple subtests that should fail. If all subtests fail, returns original state (for chaining)
- This function is currently only tested in working with ``has_code()`` in the subtests.
- This function can be thought as a ``NOT(x OR y OR ...)`` statement, since all tests it runs must fail
- This function can be considered a direct counterpart of multi.
Args:
state: State instance describing student and solution code, can be omitted if used with Ex()
*tests: one or more sub-SCTs to run
msg: feedback message that is shown in case not all tests specified in ``*tests`` fail.
:Example:
Thh SCT below runs two has_code cases.. ::
Ex().check_not(
has_code('INNER'),
has_code('OUTER'),
incorrect_msg="Don't use `INNER` or `OUTER`!"
)
If students use ``INNER (JOIN)`` or ``OUTER (JOIN)`` in their code, this test will fail.
]
for taget[name[test]] in starred[call[name[iter_tests], parameter[name[tests]]]] begin[:]
<ast.Try object at 0x7da18f7238e0>
return[call[name[state].report, parameter[call[name[Feedback], parameter[name[msg]]]]]]
return[name[state]] | keyword[def] identifier[check_not] ( identifier[state] ,* identifier[tests] , identifier[msg] ):
literal[string]
keyword[for] identifier[test] keyword[in] identifier[iter_tests] ( identifier[tests] ):
keyword[try] :
identifier[test] ( identifier[state] )
keyword[except] identifier[TestFail] :
keyword[continue]
keyword[return] identifier[state] . identifier[report] ( identifier[Feedback] ( identifier[msg] ))
keyword[return] identifier[state] | def check_not(state, *tests, msg):
"""Run multiple subtests that should fail. If all subtests fail, returns original state (for chaining)
- This function is currently only tested in working with ``has_code()`` in the subtests.
- This function can be thought as a ``NOT(x OR y OR ...)`` statement, since all tests it runs must fail
- This function can be considered a direct counterpart of multi.
Args:
state: State instance describing student and solution code, can be omitted if used with Ex()
*tests: one or more sub-SCTs to run
msg: feedback message that is shown in case not all tests specified in ``*tests`` fail.
:Example:
Thh SCT below runs two has_code cases.. ::
Ex().check_not(
has_code('INNER'),
has_code('OUTER'),
incorrect_msg="Don't use `INNER` or `OUTER`!"
)
If students use ``INNER (JOIN)`` or ``OUTER (JOIN)`` in their code, this test will fail.
"""
for test in iter_tests(tests):
try:
test(state) # depends on [control=['try'], data=[]]
except TestFail:
# it fails, as expected, off to next one
continue # depends on [control=['except'], data=[]]
return state.report(Feedback(msg)) # depends on [control=['for'], data=['test']]
# return original state, so can be chained
return state |
def p_propertyDeclaration_4(p):
"""propertyDeclaration_4 : dataType propertyName array defaultValue ';'"""
p[0] = CIMProperty(p[2], p[4], type=p[1], is_array=True,
array_size=p[3]) | def function[p_propertyDeclaration_4, parameter[p]]:
constant[propertyDeclaration_4 : dataType propertyName array defaultValue ';']
call[name[p]][constant[0]] assign[=] call[name[CIMProperty], parameter[call[name[p]][constant[2]], call[name[p]][constant[4]]]] | keyword[def] identifier[p_propertyDeclaration_4] ( identifier[p] ):
literal[string]
identifier[p] [ literal[int] ]= identifier[CIMProperty] ( identifier[p] [ literal[int] ], identifier[p] [ literal[int] ], identifier[type] = identifier[p] [ literal[int] ], identifier[is_array] = keyword[True] ,
identifier[array_size] = identifier[p] [ literal[int] ]) | def p_propertyDeclaration_4(p):
"""propertyDeclaration_4 : dataType propertyName array defaultValue ';'"""
p[0] = CIMProperty(p[2], p[4], type=p[1], is_array=True, array_size=p[3]) |
def describe_batch_predictions(FilterVariable=None, EQ=None, GT=None, LT=None, GE=None, LE=None, NE=None, Prefix=None, SortOrder=None, NextToken=None, Limit=None):
"""
Returns a list of BatchPrediction operations that match the search criteria in the request.
See also: AWS API Documentation
:example: response = client.describe_batch_predictions(
FilterVariable='CreatedAt'|'LastUpdatedAt'|'Status'|'Name'|'IAMUser'|'MLModelId'|'DataSourceId'|'DataURI',
EQ='string',
GT='string',
LT='string',
GE='string',
LE='string',
NE='string',
Prefix='string',
SortOrder='asc'|'dsc',
NextToken='string',
Limit=123
)
:type FilterVariable: string
:param FilterVariable: Use one of the following variables to filter a list of BatchPrediction :
CreatedAt - Sets the search criteria to the BatchPrediction creation date.
Status - Sets the search criteria to the BatchPrediction status.
Name - Sets the search criteria to the contents of the BatchPrediction **** Name .
IAMUser - Sets the search criteria to the user account that invoked the BatchPrediction creation.
MLModelId - Sets the search criteria to the MLModel used in the BatchPrediction .
DataSourceId - Sets the search criteria to the DataSource used in the BatchPrediction .
DataURI - Sets the search criteria to the data file(s) used in the BatchPrediction . The URL can identify either a file or an Amazon Simple Storage Solution (Amazon S3) bucket or directory.
:type EQ: string
:param EQ: The equal to operator. The BatchPrediction results will have FilterVariable values that exactly match the value specified with EQ .
:type GT: string
:param GT: The greater than operator. The BatchPrediction results will have FilterVariable values that are greater than the value specified with GT .
:type LT: string
:param LT: The less than operator. The BatchPrediction results will have FilterVariable values that are less than the value specified with LT .
:type GE: string
:param GE: The greater than or equal to operator. The BatchPrediction results will have FilterVariable values that are greater than or equal to the value specified with GE .
:type LE: string
:param LE: The less than or equal to operator. The BatchPrediction results will have FilterVariable values that are less than or equal to the value specified with LE .
:type NE: string
:param NE: The not equal to operator. The BatchPrediction results will have FilterVariable values not equal to the value specified with NE .
:type Prefix: string
:param Prefix: A string that is found at the beginning of a variable, such as Name or Id .
For example, a Batch Prediction operation could have the Name 2014-09-09-HolidayGiftMailer . To search for this BatchPrediction , select Name for the FilterVariable and any of the following strings for the Prefix :
2014-09
2014-09-09
2014-09-09-Holiday
:type SortOrder: string
:param SortOrder: A two-value parameter that determines the sequence of the resulting list of MLModel s.
asc - Arranges the list in ascending order (A-Z, 0-9).
dsc - Arranges the list in descending order (Z-A, 9-0).
Results are sorted by FilterVariable .
:type NextToken: string
:param NextToken: An ID of the page in the paginated results.
:type Limit: integer
:param Limit: The number of pages of information to include in the result. The range of acceptable values is 1 through 100 . The default value is 100 .
:rtype: dict
:return: {
'Results': [
{
'BatchPredictionId': 'string',
'MLModelId': 'string',
'BatchPredictionDataSourceId': 'string',
'InputDataLocationS3': 'string',
'CreatedByIamUser': 'string',
'CreatedAt': datetime(2015, 1, 1),
'LastUpdatedAt': datetime(2015, 1, 1),
'Name': 'string',
'Status': 'PENDING'|'INPROGRESS'|'FAILED'|'COMPLETED'|'DELETED',
'OutputUri': 'string',
'Message': 'string',
'ComputeTime': 123,
'FinishedAt': datetime(2015, 1, 1),
'StartedAt': datetime(2015, 1, 1),
'TotalRecordCount': 123,
'InvalidRecordCount': 123
},
],
'NextToken': 'string'
}
:returns:
PENDING - Amazon Machine Learning (Amazon ML) submitted a request to generate predictions for a batch of observations.
INPROGRESS - The process is underway.
FAILED - The request to perform a batch prediction did not run to completion. It is not usable.
COMPLETED - The batch prediction process completed successfully.
DELETED - The BatchPrediction is marked as deleted. It is not usable.
"""
pass | def function[describe_batch_predictions, parameter[FilterVariable, EQ, GT, LT, GE, LE, NE, Prefix, SortOrder, NextToken, Limit]]:
constant[
Returns a list of BatchPrediction operations that match the search criteria in the request.
See also: AWS API Documentation
:example: response = client.describe_batch_predictions(
FilterVariable='CreatedAt'|'LastUpdatedAt'|'Status'|'Name'|'IAMUser'|'MLModelId'|'DataSourceId'|'DataURI',
EQ='string',
GT='string',
LT='string',
GE='string',
LE='string',
NE='string',
Prefix='string',
SortOrder='asc'|'dsc',
NextToken='string',
Limit=123
)
:type FilterVariable: string
:param FilterVariable: Use one of the following variables to filter a list of BatchPrediction :
CreatedAt - Sets the search criteria to the BatchPrediction creation date.
Status - Sets the search criteria to the BatchPrediction status.
Name - Sets the search criteria to the contents of the BatchPrediction **** Name .
IAMUser - Sets the search criteria to the user account that invoked the BatchPrediction creation.
MLModelId - Sets the search criteria to the MLModel used in the BatchPrediction .
DataSourceId - Sets the search criteria to the DataSource used in the BatchPrediction .
DataURI - Sets the search criteria to the data file(s) used in the BatchPrediction . The URL can identify either a file or an Amazon Simple Storage Solution (Amazon S3) bucket or directory.
:type EQ: string
:param EQ: The equal to operator. The BatchPrediction results will have FilterVariable values that exactly match the value specified with EQ .
:type GT: string
:param GT: The greater than operator. The BatchPrediction results will have FilterVariable values that are greater than the value specified with GT .
:type LT: string
:param LT: The less than operator. The BatchPrediction results will have FilterVariable values that are less than the value specified with LT .
:type GE: string
:param GE: The greater than or equal to operator. The BatchPrediction results will have FilterVariable values that are greater than or equal to the value specified with GE .
:type LE: string
:param LE: The less than or equal to operator. The BatchPrediction results will have FilterVariable values that are less than or equal to the value specified with LE .
:type NE: string
:param NE: The not equal to operator. The BatchPrediction results will have FilterVariable values not equal to the value specified with NE .
:type Prefix: string
:param Prefix: A string that is found at the beginning of a variable, such as Name or Id .
For example, a Batch Prediction operation could have the Name 2014-09-09-HolidayGiftMailer . To search for this BatchPrediction , select Name for the FilterVariable and any of the following strings for the Prefix :
2014-09
2014-09-09
2014-09-09-Holiday
:type SortOrder: string
:param SortOrder: A two-value parameter that determines the sequence of the resulting list of MLModel s.
asc - Arranges the list in ascending order (A-Z, 0-9).
dsc - Arranges the list in descending order (Z-A, 9-0).
Results are sorted by FilterVariable .
:type NextToken: string
:param NextToken: An ID of the page in the paginated results.
:type Limit: integer
:param Limit: The number of pages of information to include in the result. The range of acceptable values is 1 through 100 . The default value is 100 .
:rtype: dict
:return: {
'Results': [
{
'BatchPredictionId': 'string',
'MLModelId': 'string',
'BatchPredictionDataSourceId': 'string',
'InputDataLocationS3': 'string',
'CreatedByIamUser': 'string',
'CreatedAt': datetime(2015, 1, 1),
'LastUpdatedAt': datetime(2015, 1, 1),
'Name': 'string',
'Status': 'PENDING'|'INPROGRESS'|'FAILED'|'COMPLETED'|'DELETED',
'OutputUri': 'string',
'Message': 'string',
'ComputeTime': 123,
'FinishedAt': datetime(2015, 1, 1),
'StartedAt': datetime(2015, 1, 1),
'TotalRecordCount': 123,
'InvalidRecordCount': 123
},
],
'NextToken': 'string'
}
:returns:
PENDING - Amazon Machine Learning (Amazon ML) submitted a request to generate predictions for a batch of observations.
INPROGRESS - The process is underway.
FAILED - The request to perform a batch prediction did not run to completion. It is not usable.
COMPLETED - The batch prediction process completed successfully.
DELETED - The BatchPrediction is marked as deleted. It is not usable.
]
pass | keyword[def] identifier[describe_batch_predictions] ( identifier[FilterVariable] = keyword[None] , identifier[EQ] = keyword[None] , identifier[GT] = keyword[None] , identifier[LT] = keyword[None] , identifier[GE] = keyword[None] , identifier[LE] = keyword[None] , identifier[NE] = keyword[None] , identifier[Prefix] = keyword[None] , identifier[SortOrder] = keyword[None] , identifier[NextToken] = keyword[None] , identifier[Limit] = keyword[None] ):
literal[string]
keyword[pass] | def describe_batch_predictions(FilterVariable=None, EQ=None, GT=None, LT=None, GE=None, LE=None, NE=None, Prefix=None, SortOrder=None, NextToken=None, Limit=None):
"""
Returns a list of BatchPrediction operations that match the search criteria in the request.
See also: AWS API Documentation
:example: response = client.describe_batch_predictions(
FilterVariable='CreatedAt'|'LastUpdatedAt'|'Status'|'Name'|'IAMUser'|'MLModelId'|'DataSourceId'|'DataURI',
EQ='string',
GT='string',
LT='string',
GE='string',
LE='string',
NE='string',
Prefix='string',
SortOrder='asc'|'dsc',
NextToken='string',
Limit=123
)
:type FilterVariable: string
:param FilterVariable: Use one of the following variables to filter a list of BatchPrediction :
CreatedAt - Sets the search criteria to the BatchPrediction creation date.
Status - Sets the search criteria to the BatchPrediction status.
Name - Sets the search criteria to the contents of the BatchPrediction **** Name .
IAMUser - Sets the search criteria to the user account that invoked the BatchPrediction creation.
MLModelId - Sets the search criteria to the MLModel used in the BatchPrediction .
DataSourceId - Sets the search criteria to the DataSource used in the BatchPrediction .
DataURI - Sets the search criteria to the data file(s) used in the BatchPrediction . The URL can identify either a file or an Amazon Simple Storage Solution (Amazon S3) bucket or directory.
:type EQ: string
:param EQ: The equal to operator. The BatchPrediction results will have FilterVariable values that exactly match the value specified with EQ .
:type GT: string
:param GT: The greater than operator. The BatchPrediction results will have FilterVariable values that are greater than the value specified with GT .
:type LT: string
:param LT: The less than operator. The BatchPrediction results will have FilterVariable values that are less than the value specified with LT .
:type GE: string
:param GE: The greater than or equal to operator. The BatchPrediction results will have FilterVariable values that are greater than or equal to the value specified with GE .
:type LE: string
:param LE: The less than or equal to operator. The BatchPrediction results will have FilterVariable values that are less than or equal to the value specified with LE .
:type NE: string
:param NE: The not equal to operator. The BatchPrediction results will have FilterVariable values not equal to the value specified with NE .
:type Prefix: string
:param Prefix: A string that is found at the beginning of a variable, such as Name or Id .
For example, a Batch Prediction operation could have the Name 2014-09-09-HolidayGiftMailer . To search for this BatchPrediction , select Name for the FilterVariable and any of the following strings for the Prefix :
2014-09
2014-09-09
2014-09-09-Holiday
:type SortOrder: string
:param SortOrder: A two-value parameter that determines the sequence of the resulting list of MLModel s.
asc - Arranges the list in ascending order (A-Z, 0-9).
dsc - Arranges the list in descending order (Z-A, 9-0).
Results are sorted by FilterVariable .
:type NextToken: string
:param NextToken: An ID of the page in the paginated results.
:type Limit: integer
:param Limit: The number of pages of information to include in the result. The range of acceptable values is 1 through 100 . The default value is 100 .
:rtype: dict
:return: {
'Results': [
{
'BatchPredictionId': 'string',
'MLModelId': 'string',
'BatchPredictionDataSourceId': 'string',
'InputDataLocationS3': 'string',
'CreatedByIamUser': 'string',
'CreatedAt': datetime(2015, 1, 1),
'LastUpdatedAt': datetime(2015, 1, 1),
'Name': 'string',
'Status': 'PENDING'|'INPROGRESS'|'FAILED'|'COMPLETED'|'DELETED',
'OutputUri': 'string',
'Message': 'string',
'ComputeTime': 123,
'FinishedAt': datetime(2015, 1, 1),
'StartedAt': datetime(2015, 1, 1),
'TotalRecordCount': 123,
'InvalidRecordCount': 123
},
],
'NextToken': 'string'
}
:returns:
PENDING - Amazon Machine Learning (Amazon ML) submitted a request to generate predictions for a batch of observations.
INPROGRESS - The process is underway.
FAILED - The request to perform a batch prediction did not run to completion. It is not usable.
COMPLETED - The batch prediction process completed successfully.
DELETED - The BatchPrediction is marked as deleted. It is not usable.
"""
pass |
def enable(iface):
'''
Enable an interface
CLI Example:
.. code-block:: bash
salt -G 'os_family:Windows' ip.enable 'Local Area Connection #2'
'''
if is_enabled(iface):
return True
cmd = ['netsh', 'interface', 'set', 'interface',
'name={0}'.format(iface),
'admin=ENABLED']
__salt__['cmd.run'](cmd, python_shell=False)
return is_enabled(iface) | def function[enable, parameter[iface]]:
constant[
Enable an interface
CLI Example:
.. code-block:: bash
salt -G 'os_family:Windows' ip.enable 'Local Area Connection #2'
]
if call[name[is_enabled], parameter[name[iface]]] begin[:]
return[constant[True]]
variable[cmd] assign[=] list[[<ast.Constant object at 0x7da20c7cb5e0>, <ast.Constant object at 0x7da20c7c9f30>, <ast.Constant object at 0x7da20c7c8250>, <ast.Constant object at 0x7da20c7cb2b0>, <ast.Call object at 0x7da20c7cb5b0>, <ast.Constant object at 0x7da20c6c5e10>]]
call[call[name[__salt__]][constant[cmd.run]], parameter[name[cmd]]]
return[call[name[is_enabled], parameter[name[iface]]]] | keyword[def] identifier[enable] ( identifier[iface] ):
literal[string]
keyword[if] identifier[is_enabled] ( identifier[iface] ):
keyword[return] keyword[True]
identifier[cmd] =[ literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] . identifier[format] ( identifier[iface] ),
literal[string] ]
identifier[__salt__] [ literal[string] ]( identifier[cmd] , identifier[python_shell] = keyword[False] )
keyword[return] identifier[is_enabled] ( identifier[iface] ) | def enable(iface):
"""
Enable an interface
CLI Example:
.. code-block:: bash
salt -G 'os_family:Windows' ip.enable 'Local Area Connection #2'
"""
if is_enabled(iface):
return True # depends on [control=['if'], data=[]]
cmd = ['netsh', 'interface', 'set', 'interface', 'name={0}'.format(iface), 'admin=ENABLED']
__salt__['cmd.run'](cmd, python_shell=False)
return is_enabled(iface) |
def delete_port_postcommit(self, context):
"""Delete the port from CVX"""
port = context.current
log_context("delete_port_postcommit: port", port)
self._delete_port_resources(port, context.host)
self._try_to_release_dynamic_segment(context) | def function[delete_port_postcommit, parameter[self, context]]:
constant[Delete the port from CVX]
variable[port] assign[=] name[context].current
call[name[log_context], parameter[constant[delete_port_postcommit: port], name[port]]]
call[name[self]._delete_port_resources, parameter[name[port], name[context].host]]
call[name[self]._try_to_release_dynamic_segment, parameter[name[context]]] | keyword[def] identifier[delete_port_postcommit] ( identifier[self] , identifier[context] ):
literal[string]
identifier[port] = identifier[context] . identifier[current]
identifier[log_context] ( literal[string] , identifier[port] )
identifier[self] . identifier[_delete_port_resources] ( identifier[port] , identifier[context] . identifier[host] )
identifier[self] . identifier[_try_to_release_dynamic_segment] ( identifier[context] ) | def delete_port_postcommit(self, context):
"""Delete the port from CVX"""
port = context.current
log_context('delete_port_postcommit: port', port)
self._delete_port_resources(port, context.host)
self._try_to_release_dynamic_segment(context) |
def peer_bfd_timers(self, **kwargs):
"""Configure BFD for BGP globally.
Args:
rbridge_id (str): Rbridge to configure. (1, 225, etc)
peer_ip (str): Peer IPv4 address for BFD setting.
tx (str): BFD transmit interval in milliseconds (300, 500, etc)
rx (str): BFD receive interval in milliseconds (300, 500, etc)
multiplier (str): BFD multiplier. (3, 7, 5, etc)
delete (bool): True if BFD configuration should be deleted.
Default value will be False if not specified.
get (bool): Get config instead of editing config. (True, False)
callback (function): A function executed upon completion of the
method. The only parameter passed to `callback` will be the
``ElementTree`` `config`.
Returns:
Return value of `callback`.
Raises:
KeyError: if `tx`, `rx`, or `multiplier` is not passed.
Examples:
>>> import pynos.device
>>> switches = ['10.24.39.230']
>>> auth = ('admin', 'password')
>>> for switch in switches:
... conn = (switch, '22')
... with pynos.device.Device(conn=conn, auth=auth) as dev:
... output = dev.bgp.neighbor(ip_addr='10.10.10.20',
... remote_as='65535', rbridge_id='230')
... output = dev.bgp.peer_bfd_timers(peer_ip='10.10.10.20',
... rx='300', tx='300', multiplier='3', rbridge_id='230')
... output = dev.bgp.peer_bfd_timers(peer_ip='10.10.10.20',
... rx='300', tx='300', multiplier='3', rbridge_id='230',
... get=True)
... output = dev.bgp.peer_bfd_timers(peer_ip='10.10.10.20',
... rx='300', tx='300', multiplier='3',
... rbridge_id='230', delete=True)
... output = dev.bgp.neighbor(ip_addr='10.10.10.20',
... delete=True, rbridge_id='230', remote_as='65535')
"""
kwargs['min_tx'] = kwargs.pop('tx')
kwargs['min_rx'] = kwargs.pop('rx')
kwargs['router_bgp_neighbor_address'] = kwargs.pop('peer_ip')
kwargs['delete'] = kwargs.pop('delete', False)
callback = kwargs.pop('callback', self._callback)
bfd_tx = self._peer_bfd_tx(**kwargs)
bfd_rx = self._peer_bfd_rx(**kwargs)
bfd_multiplier = self._peer_bfd_multiplier(**kwargs)
if kwargs.pop('get', False):
return self._peer_get_bfd(bfd_tx, bfd_rx, bfd_multiplier)
config = pynos.utilities.merge_xml(bfd_tx, bfd_rx)
config = pynos.utilities.merge_xml(config, bfd_multiplier)
return callback(config) | def function[peer_bfd_timers, parameter[self]]:
constant[Configure BFD for BGP globally.
Args:
rbridge_id (str): Rbridge to configure. (1, 225, etc)
peer_ip (str): Peer IPv4 address for BFD setting.
tx (str): BFD transmit interval in milliseconds (300, 500, etc)
rx (str): BFD receive interval in milliseconds (300, 500, etc)
multiplier (str): BFD multiplier. (3, 7, 5, etc)
delete (bool): True if BFD configuration should be deleted.
Default value will be False if not specified.
get (bool): Get config instead of editing config. (True, False)
callback (function): A function executed upon completion of the
method. The only parameter passed to `callback` will be the
``ElementTree`` `config`.
Returns:
Return value of `callback`.
Raises:
KeyError: if `tx`, `rx`, or `multiplier` is not passed.
Examples:
>>> import pynos.device
>>> switches = ['10.24.39.230']
>>> auth = ('admin', 'password')
>>> for switch in switches:
... conn = (switch, '22')
... with pynos.device.Device(conn=conn, auth=auth) as dev:
... output = dev.bgp.neighbor(ip_addr='10.10.10.20',
... remote_as='65535', rbridge_id='230')
... output = dev.bgp.peer_bfd_timers(peer_ip='10.10.10.20',
... rx='300', tx='300', multiplier='3', rbridge_id='230')
... output = dev.bgp.peer_bfd_timers(peer_ip='10.10.10.20',
... rx='300', tx='300', multiplier='3', rbridge_id='230',
... get=True)
... output = dev.bgp.peer_bfd_timers(peer_ip='10.10.10.20',
... rx='300', tx='300', multiplier='3',
... rbridge_id='230', delete=True)
... output = dev.bgp.neighbor(ip_addr='10.10.10.20',
... delete=True, rbridge_id='230', remote_as='65535')
]
call[name[kwargs]][constant[min_tx]] assign[=] call[name[kwargs].pop, parameter[constant[tx]]]
call[name[kwargs]][constant[min_rx]] assign[=] call[name[kwargs].pop, parameter[constant[rx]]]
call[name[kwargs]][constant[router_bgp_neighbor_address]] assign[=] call[name[kwargs].pop, parameter[constant[peer_ip]]]
call[name[kwargs]][constant[delete]] assign[=] call[name[kwargs].pop, parameter[constant[delete], constant[False]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
variable[bfd_tx] assign[=] call[name[self]._peer_bfd_tx, parameter[]]
variable[bfd_rx] assign[=] call[name[self]._peer_bfd_rx, parameter[]]
variable[bfd_multiplier] assign[=] call[name[self]._peer_bfd_multiplier, parameter[]]
if call[name[kwargs].pop, parameter[constant[get], constant[False]]] begin[:]
return[call[name[self]._peer_get_bfd, parameter[name[bfd_tx], name[bfd_rx], name[bfd_multiplier]]]]
variable[config] assign[=] call[name[pynos].utilities.merge_xml, parameter[name[bfd_tx], name[bfd_rx]]]
variable[config] assign[=] call[name[pynos].utilities.merge_xml, parameter[name[config], name[bfd_multiplier]]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[peer_bfd_timers] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[kwargs] [ literal[string] ]= identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[kwargs] [ literal[string] ]= identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[kwargs] [ literal[string] ]= identifier[kwargs] . identifier[pop] ( literal[string] , keyword[False] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
identifier[bfd_tx] = identifier[self] . identifier[_peer_bfd_tx] (** identifier[kwargs] )
identifier[bfd_rx] = identifier[self] . identifier[_peer_bfd_rx] (** identifier[kwargs] )
identifier[bfd_multiplier] = identifier[self] . identifier[_peer_bfd_multiplier] (** identifier[kwargs] )
keyword[if] identifier[kwargs] . identifier[pop] ( literal[string] , keyword[False] ):
keyword[return] identifier[self] . identifier[_peer_get_bfd] ( identifier[bfd_tx] , identifier[bfd_rx] , identifier[bfd_multiplier] )
identifier[config] = identifier[pynos] . identifier[utilities] . identifier[merge_xml] ( identifier[bfd_tx] , identifier[bfd_rx] )
identifier[config] = identifier[pynos] . identifier[utilities] . identifier[merge_xml] ( identifier[config] , identifier[bfd_multiplier] )
keyword[return] identifier[callback] ( identifier[config] ) | def peer_bfd_timers(self, **kwargs):
"""Configure BFD for BGP globally.
Args:
rbridge_id (str): Rbridge to configure. (1, 225, etc)
peer_ip (str): Peer IPv4 address for BFD setting.
tx (str): BFD transmit interval in milliseconds (300, 500, etc)
rx (str): BFD receive interval in milliseconds (300, 500, etc)
multiplier (str): BFD multiplier. (3, 7, 5, etc)
delete (bool): True if BFD configuration should be deleted.
Default value will be False if not specified.
get (bool): Get config instead of editing config. (True, False)
callback (function): A function executed upon completion of the
method. The only parameter passed to `callback` will be the
``ElementTree`` `config`.
Returns:
Return value of `callback`.
Raises:
KeyError: if `tx`, `rx`, or `multiplier` is not passed.
Examples:
>>> import pynos.device
>>> switches = ['10.24.39.230']
>>> auth = ('admin', 'password')
>>> for switch in switches:
... conn = (switch, '22')
... with pynos.device.Device(conn=conn, auth=auth) as dev:
... output = dev.bgp.neighbor(ip_addr='10.10.10.20',
... remote_as='65535', rbridge_id='230')
... output = dev.bgp.peer_bfd_timers(peer_ip='10.10.10.20',
... rx='300', tx='300', multiplier='3', rbridge_id='230')
... output = dev.bgp.peer_bfd_timers(peer_ip='10.10.10.20',
... rx='300', tx='300', multiplier='3', rbridge_id='230',
... get=True)
... output = dev.bgp.peer_bfd_timers(peer_ip='10.10.10.20',
... rx='300', tx='300', multiplier='3',
... rbridge_id='230', delete=True)
... output = dev.bgp.neighbor(ip_addr='10.10.10.20',
... delete=True, rbridge_id='230', remote_as='65535')
"""
kwargs['min_tx'] = kwargs.pop('tx')
kwargs['min_rx'] = kwargs.pop('rx')
kwargs['router_bgp_neighbor_address'] = kwargs.pop('peer_ip')
kwargs['delete'] = kwargs.pop('delete', False)
callback = kwargs.pop('callback', self._callback)
bfd_tx = self._peer_bfd_tx(**kwargs)
bfd_rx = self._peer_bfd_rx(**kwargs)
bfd_multiplier = self._peer_bfd_multiplier(**kwargs)
if kwargs.pop('get', False):
return self._peer_get_bfd(bfd_tx, bfd_rx, bfd_multiplier) # depends on [control=['if'], data=[]]
config = pynos.utilities.merge_xml(bfd_tx, bfd_rx)
config = pynos.utilities.merge_xml(config, bfd_multiplier)
return callback(config) |
def _check_valid_data(self, data):
"""Checks that the given data is a float array with four channels.
Parameters
----------
data : :obj:`numpy.ndarray`
The data to check.
Raises
------
ValueError
If the data is invalid.
"""
if data.dtype.type is not np.float32 and \
data.dtype.type is not np.float64:
raise ValueError(
'Illegal data type. RGB-D images only support float arrays')
if len(data.shape) != 3 and data.shape[2] != 4:
raise ValueError(
'Illegal data type. RGB-D images only support four channel')
color_data = data[:, :, :3]
if np.any((color_data < 0) | (color_data > BINARY_IM_MAX_VAL)):
raise ValueError(
'Color channels must be in the range (0, BINARY_IM_MAX_VAL)') | def function[_check_valid_data, parameter[self, data]]:
constant[Checks that the given data is a float array with four channels.
Parameters
----------
data : :obj:`numpy.ndarray`
The data to check.
Raises
------
ValueError
If the data is invalid.
]
if <ast.BoolOp object at 0x7da1b05901c0> begin[:]
<ast.Raise object at 0x7da1b0591c30>
if <ast.BoolOp object at 0x7da1b0593dc0> begin[:]
<ast.Raise object at 0x7da1b0577370>
variable[color_data] assign[=] call[name[data]][tuple[[<ast.Slice object at 0x7da1b0575210>, <ast.Slice object at 0x7da1b0574e50>, <ast.Slice object at 0x7da1b05752d0>]]]
if call[name[np].any, parameter[binary_operation[compare[name[color_data] less[<] constant[0]] <ast.BitOr object at 0x7da2590d6aa0> compare[name[color_data] greater[>] name[BINARY_IM_MAX_VAL]]]]] begin[:]
<ast.Raise object at 0x7da1b0575f60> | keyword[def] identifier[_check_valid_data] ( identifier[self] , identifier[data] ):
literal[string]
keyword[if] identifier[data] . identifier[dtype] . identifier[type] keyword[is] keyword[not] identifier[np] . identifier[float32] keyword[and] identifier[data] . identifier[dtype] . identifier[type] keyword[is] keyword[not] identifier[np] . identifier[float64] :
keyword[raise] identifier[ValueError] (
literal[string] )
keyword[if] identifier[len] ( identifier[data] . identifier[shape] )!= literal[int] keyword[and] identifier[data] . identifier[shape] [ literal[int] ]!= literal[int] :
keyword[raise] identifier[ValueError] (
literal[string] )
identifier[color_data] = identifier[data] [:,:,: literal[int] ]
keyword[if] identifier[np] . identifier[any] (( identifier[color_data] < literal[int] )|( identifier[color_data] > identifier[BINARY_IM_MAX_VAL] )):
keyword[raise] identifier[ValueError] (
literal[string] ) | def _check_valid_data(self, data):
"""Checks that the given data is a float array with four channels.
Parameters
----------
data : :obj:`numpy.ndarray`
The data to check.
Raises
------
ValueError
If the data is invalid.
"""
if data.dtype.type is not np.float32 and data.dtype.type is not np.float64:
raise ValueError('Illegal data type. RGB-D images only support float arrays') # depends on [control=['if'], data=[]]
if len(data.shape) != 3 and data.shape[2] != 4:
raise ValueError('Illegal data type. RGB-D images only support four channel') # depends on [control=['if'], data=[]]
color_data = data[:, :, :3]
if np.any((color_data < 0) | (color_data > BINARY_IM_MAX_VAL)):
raise ValueError('Color channels must be in the range (0, BINARY_IM_MAX_VAL)') # depends on [control=['if'], data=[]] |
def check_ip(original_ip):
'''
Checks the format of an IP address and returns it if it is correct. Otherwise it returns None.
'''
ip = original_ip.strip()
parts = ip.split('.')
if len(parts) != 4:
return None
for p in parts:
try:
p = int(p)
if (p < 0) or (p > 255):
return None
except:
return None
return ip | def function[check_ip, parameter[original_ip]]:
constant[
Checks the format of an IP address and returns it if it is correct. Otherwise it returns None.
]
variable[ip] assign[=] call[name[original_ip].strip, parameter[]]
variable[parts] assign[=] call[name[ip].split, parameter[constant[.]]]
if compare[call[name[len], parameter[name[parts]]] not_equal[!=] constant[4]] begin[:]
return[constant[None]]
for taget[name[p]] in starred[name[parts]] begin[:]
<ast.Try object at 0x7da204564eb0>
return[name[ip]] | keyword[def] identifier[check_ip] ( identifier[original_ip] ):
literal[string]
identifier[ip] = identifier[original_ip] . identifier[strip] ()
identifier[parts] = identifier[ip] . identifier[split] ( literal[string] )
keyword[if] identifier[len] ( identifier[parts] )!= literal[int] :
keyword[return] keyword[None]
keyword[for] identifier[p] keyword[in] identifier[parts] :
keyword[try] :
identifier[p] = identifier[int] ( identifier[p] )
keyword[if] ( identifier[p] < literal[int] ) keyword[or] ( identifier[p] > literal[int] ):
keyword[return] keyword[None]
keyword[except] :
keyword[return] keyword[None]
keyword[return] identifier[ip] | def check_ip(original_ip):
"""
Checks the format of an IP address and returns it if it is correct. Otherwise it returns None.
"""
ip = original_ip.strip()
parts = ip.split('.')
if len(parts) != 4:
return None # depends on [control=['if'], data=[]]
for p in parts:
try:
p = int(p)
if p < 0 or p > 255:
return None # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except:
return None # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['p']]
return ip |
def identify_groups(ref_labels, pred_labels, return_overlaps=False):
"""Which predicted label explains which reference label?
A predicted label explains the reference label which maximizes the minimum
of ``relative_overlaps_pred`` and ``relative_overlaps_ref``.
Compare this with ``compute_association_matrix_of_groups``.
Returns
-------
A dictionary of length ``len(np.unique(ref_labels))`` that stores for each
reference label the predicted label that best explains it.
If ``return_overlaps`` is ``True``, this will in addition return the overlap
of the reference group with the predicted group; normalized with respect to
the reference group size and the predicted group size, respectively.
"""
ref_unique, ref_counts = np.unique(ref_labels, return_counts=True)
ref_dict = dict(zip(ref_unique, ref_counts))
pred_unique, pred_counts = np.unique(pred_labels, return_counts=True)
pred_dict = dict(zip(pred_unique, pred_counts))
associated_predictions = {}
associated_overlaps = {}
for ref_label in ref_unique:
sub_pred_unique, sub_pred_counts = np.unique(pred_labels[ref_label == ref_labels], return_counts=True)
relative_overlaps_pred = [sub_pred_counts[i] / pred_dict[n] for i, n in enumerate(sub_pred_unique)]
relative_overlaps_ref = [sub_pred_counts[i] / ref_dict[ref_label] for i, n in enumerate(sub_pred_unique)]
relative_overlaps = np.c_[relative_overlaps_pred, relative_overlaps_ref]
relative_overlaps_min = np.min(relative_overlaps, axis=1)
pred_best_index = np.argsort(relative_overlaps_min)[::-1]
associated_predictions[ref_label] = sub_pred_unique[pred_best_index]
associated_overlaps[ref_label] = relative_overlaps[pred_best_index]
if return_overlaps: return associated_predictions, associated_overlaps
else: return associated_predictions | def function[identify_groups, parameter[ref_labels, pred_labels, return_overlaps]]:
constant[Which predicted label explains which reference label?
A predicted label explains the reference label which maximizes the minimum
of ``relative_overlaps_pred`` and ``relative_overlaps_ref``.
Compare this with ``compute_association_matrix_of_groups``.
Returns
-------
A dictionary of length ``len(np.unique(ref_labels))`` that stores for each
reference label the predicted label that best explains it.
If ``return_overlaps`` is ``True``, this will in addition return the overlap
of the reference group with the predicted group; normalized with respect to
the reference group size and the predicted group size, respectively.
]
<ast.Tuple object at 0x7da20c990280> assign[=] call[name[np].unique, parameter[name[ref_labels]]]
variable[ref_dict] assign[=] call[name[dict], parameter[call[name[zip], parameter[name[ref_unique], name[ref_counts]]]]]
<ast.Tuple object at 0x7da20c993700> assign[=] call[name[np].unique, parameter[name[pred_labels]]]
variable[pred_dict] assign[=] call[name[dict], parameter[call[name[zip], parameter[name[pred_unique], name[pred_counts]]]]]
variable[associated_predictions] assign[=] dictionary[[], []]
variable[associated_overlaps] assign[=] dictionary[[], []]
for taget[name[ref_label]] in starred[name[ref_unique]] begin[:]
<ast.Tuple object at 0x7da20c990b50> assign[=] call[name[np].unique, parameter[call[name[pred_labels]][compare[name[ref_label] equal[==] name[ref_labels]]]]]
variable[relative_overlaps_pred] assign[=] <ast.ListComp object at 0x7da20c993970>
variable[relative_overlaps_ref] assign[=] <ast.ListComp object at 0x7da20c993370>
variable[relative_overlaps] assign[=] call[name[np].c_][tuple[[<ast.Name object at 0x7da20c992b30>, <ast.Name object at 0x7da20c992d10>]]]
variable[relative_overlaps_min] assign[=] call[name[np].min, parameter[name[relative_overlaps]]]
variable[pred_best_index] assign[=] call[call[name[np].argsort, parameter[name[relative_overlaps_min]]]][<ast.Slice object at 0x7da20c992fe0>]
call[name[associated_predictions]][name[ref_label]] assign[=] call[name[sub_pred_unique]][name[pred_best_index]]
call[name[associated_overlaps]][name[ref_label]] assign[=] call[name[relative_overlaps]][name[pred_best_index]]
if name[return_overlaps] begin[:]
return[tuple[[<ast.Name object at 0x7da2043447c0>, <ast.Name object at 0x7da2043442e0>]]] | keyword[def] identifier[identify_groups] ( identifier[ref_labels] , identifier[pred_labels] , identifier[return_overlaps] = keyword[False] ):
literal[string]
identifier[ref_unique] , identifier[ref_counts] = identifier[np] . identifier[unique] ( identifier[ref_labels] , identifier[return_counts] = keyword[True] )
identifier[ref_dict] = identifier[dict] ( identifier[zip] ( identifier[ref_unique] , identifier[ref_counts] ))
identifier[pred_unique] , identifier[pred_counts] = identifier[np] . identifier[unique] ( identifier[pred_labels] , identifier[return_counts] = keyword[True] )
identifier[pred_dict] = identifier[dict] ( identifier[zip] ( identifier[pred_unique] , identifier[pred_counts] ))
identifier[associated_predictions] ={}
identifier[associated_overlaps] ={}
keyword[for] identifier[ref_label] keyword[in] identifier[ref_unique] :
identifier[sub_pred_unique] , identifier[sub_pred_counts] = identifier[np] . identifier[unique] ( identifier[pred_labels] [ identifier[ref_label] == identifier[ref_labels] ], identifier[return_counts] = keyword[True] )
identifier[relative_overlaps_pred] =[ identifier[sub_pred_counts] [ identifier[i] ]/ identifier[pred_dict] [ identifier[n] ] keyword[for] identifier[i] , identifier[n] keyword[in] identifier[enumerate] ( identifier[sub_pred_unique] )]
identifier[relative_overlaps_ref] =[ identifier[sub_pred_counts] [ identifier[i] ]/ identifier[ref_dict] [ identifier[ref_label] ] keyword[for] identifier[i] , identifier[n] keyword[in] identifier[enumerate] ( identifier[sub_pred_unique] )]
identifier[relative_overlaps] = identifier[np] . identifier[c_] [ identifier[relative_overlaps_pred] , identifier[relative_overlaps_ref] ]
identifier[relative_overlaps_min] = identifier[np] . identifier[min] ( identifier[relative_overlaps] , identifier[axis] = literal[int] )
identifier[pred_best_index] = identifier[np] . identifier[argsort] ( identifier[relative_overlaps_min] )[::- literal[int] ]
identifier[associated_predictions] [ identifier[ref_label] ]= identifier[sub_pred_unique] [ identifier[pred_best_index] ]
identifier[associated_overlaps] [ identifier[ref_label] ]= identifier[relative_overlaps] [ identifier[pred_best_index] ]
keyword[if] identifier[return_overlaps] : keyword[return] identifier[associated_predictions] , identifier[associated_overlaps]
keyword[else] : keyword[return] identifier[associated_predictions] | def identify_groups(ref_labels, pred_labels, return_overlaps=False):
"""Which predicted label explains which reference label?
A predicted label explains the reference label which maximizes the minimum
of ``relative_overlaps_pred`` and ``relative_overlaps_ref``.
Compare this with ``compute_association_matrix_of_groups``.
Returns
-------
A dictionary of length ``len(np.unique(ref_labels))`` that stores for each
reference label the predicted label that best explains it.
If ``return_overlaps`` is ``True``, this will in addition return the overlap
of the reference group with the predicted group; normalized with respect to
the reference group size and the predicted group size, respectively.
"""
(ref_unique, ref_counts) = np.unique(ref_labels, return_counts=True)
ref_dict = dict(zip(ref_unique, ref_counts))
(pred_unique, pred_counts) = np.unique(pred_labels, return_counts=True)
pred_dict = dict(zip(pred_unique, pred_counts))
associated_predictions = {}
associated_overlaps = {}
for ref_label in ref_unique:
(sub_pred_unique, sub_pred_counts) = np.unique(pred_labels[ref_label == ref_labels], return_counts=True)
relative_overlaps_pred = [sub_pred_counts[i] / pred_dict[n] for (i, n) in enumerate(sub_pred_unique)]
relative_overlaps_ref = [sub_pred_counts[i] / ref_dict[ref_label] for (i, n) in enumerate(sub_pred_unique)]
relative_overlaps = np.c_[relative_overlaps_pred, relative_overlaps_ref]
relative_overlaps_min = np.min(relative_overlaps, axis=1)
pred_best_index = np.argsort(relative_overlaps_min)[::-1]
associated_predictions[ref_label] = sub_pred_unique[pred_best_index]
associated_overlaps[ref_label] = relative_overlaps[pred_best_index] # depends on [control=['for'], data=['ref_label']]
if return_overlaps:
return (associated_predictions, associated_overlaps) # depends on [control=['if'], data=[]]
else:
return associated_predictions |
def _AddEventData(self, event_data):
"""Adds event data.
Args:
event_data (EventData): event data.
"""
identifier = event_data.GetIdentifier()
lookup_key = identifier.CopyToString()
self._storage_writer.AddEventData(event_data)
identifier = event_data.GetIdentifier()
self._event_data_identifier_mappings[lookup_key] = identifier | def function[_AddEventData, parameter[self, event_data]]:
constant[Adds event data.
Args:
event_data (EventData): event data.
]
variable[identifier] assign[=] call[name[event_data].GetIdentifier, parameter[]]
variable[lookup_key] assign[=] call[name[identifier].CopyToString, parameter[]]
call[name[self]._storage_writer.AddEventData, parameter[name[event_data]]]
variable[identifier] assign[=] call[name[event_data].GetIdentifier, parameter[]]
call[name[self]._event_data_identifier_mappings][name[lookup_key]] assign[=] name[identifier] | keyword[def] identifier[_AddEventData] ( identifier[self] , identifier[event_data] ):
literal[string]
identifier[identifier] = identifier[event_data] . identifier[GetIdentifier] ()
identifier[lookup_key] = identifier[identifier] . identifier[CopyToString] ()
identifier[self] . identifier[_storage_writer] . identifier[AddEventData] ( identifier[event_data] )
identifier[identifier] = identifier[event_data] . identifier[GetIdentifier] ()
identifier[self] . identifier[_event_data_identifier_mappings] [ identifier[lookup_key] ]= identifier[identifier] | def _AddEventData(self, event_data):
"""Adds event data.
Args:
event_data (EventData): event data.
"""
identifier = event_data.GetIdentifier()
lookup_key = identifier.CopyToString()
self._storage_writer.AddEventData(event_data)
identifier = event_data.GetIdentifier()
self._event_data_identifier_mappings[lookup_key] = identifier |
def add(self, artifact_type: ArtifactType, src_path: str,
dst_path: str=None):
"""Add an artifact of type `artifact_type` at `src_path`.
`src_path` should be the path of the file relative to project root.
`dst_path`, if given, is the desired path of the artifact in dependent
targets, relative to its base path (by type).
"""
if dst_path is None:
dst_path = src_path
other_src_path = self._artifacts[artifact_type].setdefault(
dst_path, src_path)
if src_path != other_src_path:
raise RuntimeError(
'{} artifact with dest path {} exists with different src '
'path: {} != {}'.format(artifact_type, dst_path, src_path,
other_src_path)) | def function[add, parameter[self, artifact_type, src_path, dst_path]]:
constant[Add an artifact of type `artifact_type` at `src_path`.
`src_path` should be the path of the file relative to project root.
`dst_path`, if given, is the desired path of the artifact in dependent
targets, relative to its base path (by type).
]
if compare[name[dst_path] is constant[None]] begin[:]
variable[dst_path] assign[=] name[src_path]
variable[other_src_path] assign[=] call[call[name[self]._artifacts][name[artifact_type]].setdefault, parameter[name[dst_path], name[src_path]]]
if compare[name[src_path] not_equal[!=] name[other_src_path]] begin[:]
<ast.Raise object at 0x7da20e955330> | keyword[def] identifier[add] ( identifier[self] , identifier[artifact_type] : identifier[ArtifactType] , identifier[src_path] : identifier[str] ,
identifier[dst_path] : identifier[str] = keyword[None] ):
literal[string]
keyword[if] identifier[dst_path] keyword[is] keyword[None] :
identifier[dst_path] = identifier[src_path]
identifier[other_src_path] = identifier[self] . identifier[_artifacts] [ identifier[artifact_type] ]. identifier[setdefault] (
identifier[dst_path] , identifier[src_path] )
keyword[if] identifier[src_path] != identifier[other_src_path] :
keyword[raise] identifier[RuntimeError] (
literal[string]
literal[string] . identifier[format] ( identifier[artifact_type] , identifier[dst_path] , identifier[src_path] ,
identifier[other_src_path] )) | def add(self, artifact_type: ArtifactType, src_path: str, dst_path: str=None):
"""Add an artifact of type `artifact_type` at `src_path`.
`src_path` should be the path of the file relative to project root.
`dst_path`, if given, is the desired path of the artifact in dependent
targets, relative to its base path (by type).
"""
if dst_path is None:
dst_path = src_path # depends on [control=['if'], data=['dst_path']]
other_src_path = self._artifacts[artifact_type].setdefault(dst_path, src_path)
if src_path != other_src_path:
raise RuntimeError('{} artifact with dest path {} exists with different src path: {} != {}'.format(artifact_type, dst_path, src_path, other_src_path)) # depends on [control=['if'], data=['src_path', 'other_src_path']] |
def get(cls): # type: () -> Shell
"""
Retrieve the current shell.
"""
if cls._shell is not None:
return cls._shell
try:
name, path = detect_shell(os.getpid())
except (RuntimeError, ShellDetectionFailure):
raise RuntimeError("Unable to detect the current shell.")
cls._shell = cls(name, path)
return cls._shell | def function[get, parameter[cls]]:
constant[
Retrieve the current shell.
]
if compare[name[cls]._shell is_not constant[None]] begin[:]
return[name[cls]._shell]
<ast.Try object at 0x7da18f7207f0>
name[cls]._shell assign[=] call[name[cls], parameter[name[name], name[path]]]
return[name[cls]._shell] | keyword[def] identifier[get] ( identifier[cls] ):
literal[string]
keyword[if] identifier[cls] . identifier[_shell] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[cls] . identifier[_shell]
keyword[try] :
identifier[name] , identifier[path] = identifier[detect_shell] ( identifier[os] . identifier[getpid] ())
keyword[except] ( identifier[RuntimeError] , identifier[ShellDetectionFailure] ):
keyword[raise] identifier[RuntimeError] ( literal[string] )
identifier[cls] . identifier[_shell] = identifier[cls] ( identifier[name] , identifier[path] )
keyword[return] identifier[cls] . identifier[_shell] | def get(cls): # type: () -> Shell
'\n Retrieve the current shell.\n '
if cls._shell is not None:
return cls._shell # depends on [control=['if'], data=[]]
try:
(name, path) = detect_shell(os.getpid()) # depends on [control=['try'], data=[]]
except (RuntimeError, ShellDetectionFailure):
raise RuntimeError('Unable to detect the current shell.') # depends on [control=['except'], data=[]]
cls._shell = cls(name, path)
return cls._shell |
def get_visible_child(self, parent, locator, params=None, timeout=None):
"""
Get child-element both present AND visible in the DOM.
If timeout is 0 (zero) return WebElement instance or None, else we wait and retry for timeout and raise
TimeoutException should the element not be found.
:param parent: parent-element
:param locator: locator tuple
:param params: (optional) locator params
:param timeout: (optional) time to wait for element (default: self._explicit_wait)
:return: WebElement instance
"""
return self.get_present_child(parent, locator, params, timeout, True) | def function[get_visible_child, parameter[self, parent, locator, params, timeout]]:
constant[
Get child-element both present AND visible in the DOM.
If timeout is 0 (zero) return WebElement instance or None, else we wait and retry for timeout and raise
TimeoutException should the element not be found.
:param parent: parent-element
:param locator: locator tuple
:param params: (optional) locator params
:param timeout: (optional) time to wait for element (default: self._explicit_wait)
:return: WebElement instance
]
return[call[name[self].get_present_child, parameter[name[parent], name[locator], name[params], name[timeout], constant[True]]]] | keyword[def] identifier[get_visible_child] ( identifier[self] , identifier[parent] , identifier[locator] , identifier[params] = keyword[None] , identifier[timeout] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[get_present_child] ( identifier[parent] , identifier[locator] , identifier[params] , identifier[timeout] , keyword[True] ) | def get_visible_child(self, parent, locator, params=None, timeout=None):
"""
Get child-element both present AND visible in the DOM.
If timeout is 0 (zero) return WebElement instance or None, else we wait and retry for timeout and raise
TimeoutException should the element not be found.
:param parent: parent-element
:param locator: locator tuple
:param params: (optional) locator params
:param timeout: (optional) time to wait for element (default: self._explicit_wait)
:return: WebElement instance
"""
return self.get_present_child(parent, locator, params, timeout, True) |
def toggle_white_spaces(self):
"""
Toggles document white spaces display.
:return: Method success.
:rtype: bool
"""
text_option = self.get_default_text_option()
if text_option.flags().__int__():
text_option = QTextOption()
text_option.setTabStop(self.tabStopWidth())
else:
text_option.setFlags(
text_option.flags() | QTextOption.ShowTabsAndSpaces | QTextOption.ShowLineAndParagraphSeparators)
self.set_default_text_option(text_option)
return True | def function[toggle_white_spaces, parameter[self]]:
constant[
Toggles document white spaces display.
:return: Method success.
:rtype: bool
]
variable[text_option] assign[=] call[name[self].get_default_text_option, parameter[]]
if call[call[name[text_option].flags, parameter[]].__int__, parameter[]] begin[:]
variable[text_option] assign[=] call[name[QTextOption], parameter[]]
call[name[text_option].setTabStop, parameter[call[name[self].tabStopWidth, parameter[]]]]
call[name[self].set_default_text_option, parameter[name[text_option]]]
return[constant[True]] | keyword[def] identifier[toggle_white_spaces] ( identifier[self] ):
literal[string]
identifier[text_option] = identifier[self] . identifier[get_default_text_option] ()
keyword[if] identifier[text_option] . identifier[flags] (). identifier[__int__] ():
identifier[text_option] = identifier[QTextOption] ()
identifier[text_option] . identifier[setTabStop] ( identifier[self] . identifier[tabStopWidth] ())
keyword[else] :
identifier[text_option] . identifier[setFlags] (
identifier[text_option] . identifier[flags] ()| identifier[QTextOption] . identifier[ShowTabsAndSpaces] | identifier[QTextOption] . identifier[ShowLineAndParagraphSeparators] )
identifier[self] . identifier[set_default_text_option] ( identifier[text_option] )
keyword[return] keyword[True] | def toggle_white_spaces(self):
"""
Toggles document white spaces display.
:return: Method success.
:rtype: bool
"""
text_option = self.get_default_text_option()
if text_option.flags().__int__():
text_option = QTextOption()
text_option.setTabStop(self.tabStopWidth()) # depends on [control=['if'], data=[]]
else:
text_option.setFlags(text_option.flags() | QTextOption.ShowTabsAndSpaces | QTextOption.ShowLineAndParagraphSeparators)
self.set_default_text_option(text_option)
return True |
def precompute(self, distance_modulus_array=None):
"""
DEPRECATED: ADW 20170627
Precompute color probabilities for background ('u_background')
and signal ('u_color') for each star in catalog. Precompute
observable fraction in each ROI pixel. # Precompute still
operates over the full ROI, not just the likelihood region
Parameters:
-----------
distance_modulus_array : Array of distance moduli
Returns:
--------
None
"""
msg = "'%s.precompute': ADW 2017-09-20"%self.__class__.__name__
DeprecationWarning(msg)
if distance_modulus_array is not None:
self.distance_modulus_array = distance_modulus_array
else:
self.distance_modulus_array = sel
# Observable fraction for each pixel
self.u_color_array = [[]] * len(self.distance_modulus_array)
self.observable_fraction_sparse_array = [[]] * len(self.distance_modulus_array)
logger.info('Looping over distance moduli in precompute ...')
for ii, distance_modulus in enumerate(self.distance_modulus_array):
logger.info(' (%i/%i) Distance Modulus = %.2f ...'%(ii+1, len(self.distance_modulus_array), distance_modulus))
self.u_color_array[ii] = False
if self.config['scan']['color_lut_infile'] is not None:
DeprecationWarning("'color_lut' is deprecated")
logger.info(' Precomputing signal color from %s'%(self.config['scan']['color_lut_infile']))
self.u_color_array[ii] = ugali.analysis.color_lut.readColorLUT(self.config['scan']['color_lut_infile'],
distance_modulus,
self.loglike.catalog.mag_1,
self.loglike.catalog.mag_2,
self.loglike.catalog.mag_err_1,
self.loglike.catalog.mag_err_2)
if not np.any(self.u_color_array[ii]):
logger.info(' Precomputing signal color on the fly...')
self.u_color_array[ii] = self.loglike.calc_signal_color(distance_modulus)
# Calculate over all pixels in ROI
self.observable_fraction_sparse_array[ii] = self.loglike.calc_observable_fraction(distance_modulus)
self.u_color_array = np.array(self.u_color_array) | def function[precompute, parameter[self, distance_modulus_array]]:
constant[
DEPRECATED: ADW 20170627
Precompute color probabilities for background ('u_background')
and signal ('u_color') for each star in catalog. Precompute
observable fraction in each ROI pixel. # Precompute still
operates over the full ROI, not just the likelihood region
Parameters:
-----------
distance_modulus_array : Array of distance moduli
Returns:
--------
None
]
variable[msg] assign[=] binary_operation[constant['%s.precompute': ADW 2017-09-20] <ast.Mod object at 0x7da2590d6920> name[self].__class__.__name__]
call[name[DeprecationWarning], parameter[name[msg]]]
if compare[name[distance_modulus_array] is_not constant[None]] begin[:]
name[self].distance_modulus_array assign[=] name[distance_modulus_array]
name[self].u_color_array assign[=] binary_operation[list[[<ast.List object at 0x7da18bcc9e70>]] * call[name[len], parameter[name[self].distance_modulus_array]]]
name[self].observable_fraction_sparse_array assign[=] binary_operation[list[[<ast.List object at 0x7da18bcc9c30>]] * call[name[len], parameter[name[self].distance_modulus_array]]]
call[name[logger].info, parameter[constant[Looping over distance moduli in precompute ...]]]
for taget[tuple[[<ast.Name object at 0x7da18bcc9ae0>, <ast.Name object at 0x7da18bcc8760>]]] in starred[call[name[enumerate], parameter[name[self].distance_modulus_array]]] begin[:]
call[name[logger].info, parameter[binary_operation[constant[ (%i/%i) Distance Modulus = %.2f ...] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.BinOp object at 0x7da18bccb010>, <ast.Call object at 0x7da18bccbb50>, <ast.Name object at 0x7da18bccb8e0>]]]]]
call[name[self].u_color_array][name[ii]] assign[=] constant[False]
if compare[call[call[name[self].config][constant[scan]]][constant[color_lut_infile]] is_not constant[None]] begin[:]
call[name[DeprecationWarning], parameter[constant['color_lut' is deprecated]]]
call[name[logger].info, parameter[binary_operation[constant[ Precomputing signal color from %s] <ast.Mod object at 0x7da2590d6920> call[call[name[self].config][constant[scan]]][constant[color_lut_infile]]]]]
call[name[self].u_color_array][name[ii]] assign[=] call[name[ugali].analysis.color_lut.readColorLUT, parameter[call[call[name[self].config][constant[scan]]][constant[color_lut_infile]], name[distance_modulus], name[self].loglike.catalog.mag_1, name[self].loglike.catalog.mag_2, name[self].loglike.catalog.mag_err_1, name[self].loglike.catalog.mag_err_2]]
if <ast.UnaryOp object at 0x7da18bcc8df0> begin[:]
call[name[logger].info, parameter[constant[ Precomputing signal color on the fly...]]]
call[name[self].u_color_array][name[ii]] assign[=] call[name[self].loglike.calc_signal_color, parameter[name[distance_modulus]]]
call[name[self].observable_fraction_sparse_array][name[ii]] assign[=] call[name[self].loglike.calc_observable_fraction, parameter[name[distance_modulus]]]
name[self].u_color_array assign[=] call[name[np].array, parameter[name[self].u_color_array]] | keyword[def] identifier[precompute] ( identifier[self] , identifier[distance_modulus_array] = keyword[None] ):
literal[string]
identifier[msg] = literal[string] % identifier[self] . identifier[__class__] . identifier[__name__]
identifier[DeprecationWarning] ( identifier[msg] )
keyword[if] identifier[distance_modulus_array] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[distance_modulus_array] = identifier[distance_modulus_array]
keyword[else] :
identifier[self] . identifier[distance_modulus_array] = identifier[sel]
identifier[self] . identifier[u_color_array] =[[]]* identifier[len] ( identifier[self] . identifier[distance_modulus_array] )
identifier[self] . identifier[observable_fraction_sparse_array] =[[]]* identifier[len] ( identifier[self] . identifier[distance_modulus_array] )
identifier[logger] . identifier[info] ( literal[string] )
keyword[for] identifier[ii] , identifier[distance_modulus] keyword[in] identifier[enumerate] ( identifier[self] . identifier[distance_modulus_array] ):
identifier[logger] . identifier[info] ( literal[string] %( identifier[ii] + literal[int] , identifier[len] ( identifier[self] . identifier[distance_modulus_array] ), identifier[distance_modulus] ))
identifier[self] . identifier[u_color_array] [ identifier[ii] ]= keyword[False]
keyword[if] identifier[self] . identifier[config] [ literal[string] ][ literal[string] ] keyword[is] keyword[not] keyword[None] :
identifier[DeprecationWarning] ( literal[string] )
identifier[logger] . identifier[info] ( literal[string] %( identifier[self] . identifier[config] [ literal[string] ][ literal[string] ]))
identifier[self] . identifier[u_color_array] [ identifier[ii] ]= identifier[ugali] . identifier[analysis] . identifier[color_lut] . identifier[readColorLUT] ( identifier[self] . identifier[config] [ literal[string] ][ literal[string] ],
identifier[distance_modulus] ,
identifier[self] . identifier[loglike] . identifier[catalog] . identifier[mag_1] ,
identifier[self] . identifier[loglike] . identifier[catalog] . identifier[mag_2] ,
identifier[self] . identifier[loglike] . identifier[catalog] . identifier[mag_err_1] ,
identifier[self] . identifier[loglike] . identifier[catalog] . identifier[mag_err_2] )
keyword[if] keyword[not] identifier[np] . identifier[any] ( identifier[self] . identifier[u_color_array] [ identifier[ii] ]):
identifier[logger] . identifier[info] ( literal[string] )
identifier[self] . identifier[u_color_array] [ identifier[ii] ]= identifier[self] . identifier[loglike] . identifier[calc_signal_color] ( identifier[distance_modulus] )
identifier[self] . identifier[observable_fraction_sparse_array] [ identifier[ii] ]= identifier[self] . identifier[loglike] . identifier[calc_observable_fraction] ( identifier[distance_modulus] )
identifier[self] . identifier[u_color_array] = identifier[np] . identifier[array] ( identifier[self] . identifier[u_color_array] ) | def precompute(self, distance_modulus_array=None):
"""
DEPRECATED: ADW 20170627
Precompute color probabilities for background ('u_background')
and signal ('u_color') for each star in catalog. Precompute
observable fraction in each ROI pixel. # Precompute still
operates over the full ROI, not just the likelihood region
Parameters:
-----------
distance_modulus_array : Array of distance moduli
Returns:
--------
None
"""
msg = "'%s.precompute': ADW 2017-09-20" % self.__class__.__name__
DeprecationWarning(msg)
if distance_modulus_array is not None:
self.distance_modulus_array = distance_modulus_array # depends on [control=['if'], data=['distance_modulus_array']]
else:
self.distance_modulus_array = sel
# Observable fraction for each pixel
self.u_color_array = [[]] * len(self.distance_modulus_array)
self.observable_fraction_sparse_array = [[]] * len(self.distance_modulus_array)
logger.info('Looping over distance moduli in precompute ...')
for (ii, distance_modulus) in enumerate(self.distance_modulus_array):
logger.info(' (%i/%i) Distance Modulus = %.2f ...' % (ii + 1, len(self.distance_modulus_array), distance_modulus))
self.u_color_array[ii] = False
if self.config['scan']['color_lut_infile'] is not None:
DeprecationWarning("'color_lut' is deprecated")
logger.info(' Precomputing signal color from %s' % self.config['scan']['color_lut_infile'])
self.u_color_array[ii] = ugali.analysis.color_lut.readColorLUT(self.config['scan']['color_lut_infile'], distance_modulus, self.loglike.catalog.mag_1, self.loglike.catalog.mag_2, self.loglike.catalog.mag_err_1, self.loglike.catalog.mag_err_2) # depends on [control=['if'], data=[]]
if not np.any(self.u_color_array[ii]):
logger.info(' Precomputing signal color on the fly...')
self.u_color_array[ii] = self.loglike.calc_signal_color(distance_modulus) # depends on [control=['if'], data=[]]
# Calculate over all pixels in ROI
self.observable_fraction_sparse_array[ii] = self.loglike.calc_observable_fraction(distance_modulus) # depends on [control=['for'], data=[]]
self.u_color_array = np.array(self.u_color_array) |
def get_variables(self, include_nontrainable=False):
"""
Returns the TensorFlow variables used by the baseline.
Returns:
List of variables
"""
if include_nontrainable:
return [self.all_variables[key] for key in sorted(self.all_variables)]
else:
return [self.variables[key] for key in sorted(self.variables)] | def function[get_variables, parameter[self, include_nontrainable]]:
constant[
Returns the TensorFlow variables used by the baseline.
Returns:
List of variables
]
if name[include_nontrainable] begin[:]
return[<ast.ListComp object at 0x7da18f812620>] | keyword[def] identifier[get_variables] ( identifier[self] , identifier[include_nontrainable] = keyword[False] ):
literal[string]
keyword[if] identifier[include_nontrainable] :
keyword[return] [ identifier[self] . identifier[all_variables] [ identifier[key] ] keyword[for] identifier[key] keyword[in] identifier[sorted] ( identifier[self] . identifier[all_variables] )]
keyword[else] :
keyword[return] [ identifier[self] . identifier[variables] [ identifier[key] ] keyword[for] identifier[key] keyword[in] identifier[sorted] ( identifier[self] . identifier[variables] )] | def get_variables(self, include_nontrainable=False):
"""
Returns the TensorFlow variables used by the baseline.
Returns:
List of variables
"""
if include_nontrainable:
return [self.all_variables[key] for key in sorted(self.all_variables)] # depends on [control=['if'], data=[]]
else:
return [self.variables[key] for key in sorted(self.variables)] |
def cinder(*arg):
"""
Cinder annotation for adding function to process cinder notification.
if event_type include wildcard, will put {pattern: function} into process_wildcard dict
else will put {event_type: function} into process dict
:param arg: event_type of notification
"""
check_event_type(Openstack.Cinder, *arg)
event_type = arg[0]
def decorator(func):
if event_type.find("*") != -1:
event_type_pattern = pre_compile(event_type)
cinder_customer_process_wildcard[event_type_pattern] = func
else:
cinder_customer_process[event_type] = func
log.info("add function {0} to process event_type:{1}".format(func.__name__, event_type))
@functools.wraps(func)
def wrapper(*args, **kwargs):
func(*args, **kwargs)
return wrapper
return decorator | def function[cinder, parameter[]]:
constant[
Cinder annotation for adding function to process cinder notification.
if event_type include wildcard, will put {pattern: function} into process_wildcard dict
else will put {event_type: function} into process dict
:param arg: event_type of notification
]
call[name[check_event_type], parameter[name[Openstack].Cinder, <ast.Starred object at 0x7da20e955330>]]
variable[event_type] assign[=] call[name[arg]][constant[0]]
def function[decorator, parameter[func]]:
if compare[call[name[event_type].find, parameter[constant[*]]] not_equal[!=] <ast.UnaryOp object at 0x7da20e954310>] begin[:]
variable[event_type_pattern] assign[=] call[name[pre_compile], parameter[name[event_type]]]
call[name[cinder_customer_process_wildcard]][name[event_type_pattern]] assign[=] name[func]
call[name[log].info, parameter[call[constant[add function {0} to process event_type:{1}].format, parameter[name[func].__name__, name[event_type]]]]]
def function[wrapper, parameter[]]:
call[name[func], parameter[<ast.Starred object at 0x7da2054a6590>]]
return[name[wrapper]]
return[name[decorator]] | keyword[def] identifier[cinder] (* identifier[arg] ):
literal[string]
identifier[check_event_type] ( identifier[Openstack] . identifier[Cinder] ,* identifier[arg] )
identifier[event_type] = identifier[arg] [ literal[int] ]
keyword[def] identifier[decorator] ( identifier[func] ):
keyword[if] identifier[event_type] . identifier[find] ( literal[string] )!=- literal[int] :
identifier[event_type_pattern] = identifier[pre_compile] ( identifier[event_type] )
identifier[cinder_customer_process_wildcard] [ identifier[event_type_pattern] ]= identifier[func]
keyword[else] :
identifier[cinder_customer_process] [ identifier[event_type] ]= identifier[func]
identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[func] . identifier[__name__] , identifier[event_type] ))
@ identifier[functools] . identifier[wraps] ( identifier[func] )
keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ):
identifier[func] (* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[wrapper]
keyword[return] identifier[decorator] | def cinder(*arg):
"""
Cinder annotation for adding function to process cinder notification.
if event_type include wildcard, will put {pattern: function} into process_wildcard dict
else will put {event_type: function} into process dict
:param arg: event_type of notification
"""
check_event_type(Openstack.Cinder, *arg)
event_type = arg[0]
def decorator(func):
if event_type.find('*') != -1:
event_type_pattern = pre_compile(event_type)
cinder_customer_process_wildcard[event_type_pattern] = func # depends on [control=['if'], data=[]]
else:
cinder_customer_process[event_type] = func
log.info('add function {0} to process event_type:{1}'.format(func.__name__, event_type))
@functools.wraps(func)
def wrapper(*args, **kwargs):
func(*args, **kwargs)
return wrapper
return decorator |
def on_save_interpretation_button(self, event):
"""
on the save button the interpretation is saved to pmag_results_table
data in all coordinate systems
"""
if self.current_fit:
self.current_fit.saved = True
calculation_type = self.current_fit.get(self.COORDINATE_SYSTEM)[
'calculation_type']
tmin = str(self.tmin_box.GetValue())
tmax = str(self.tmax_box.GetValue())
self.current_fit.put(self.s, 'specimen', self.get_PCA_parameters(
self.s, self.current_fit, tmin, tmax, 'specimen', calculation_type))
if len(self.Data[self.s]['zijdblock_geo']) > 0:
self.current_fit.put(self.s, 'geographic', self.get_PCA_parameters(
self.s, self.current_fit, tmin, tmax, 'geographic', calculation_type))
if len(self.Data[self.s]['zijdblock_tilt']) > 0:
self.current_fit.put(self.s, 'tilt-corrected', self.get_PCA_parameters(
self.s, self.current_fit, tmin, tmax, 'tilt-corrected', calculation_type))
# calculate high level data
self.calculate_high_levels_data()
self.plot_high_levels_data()
self.on_menu_save_interpretation(event)
self.update_selection()
self.close_warning = True | def function[on_save_interpretation_button, parameter[self, event]]:
constant[
on the save button the interpretation is saved to pmag_results_table
data in all coordinate systems
]
if name[self].current_fit begin[:]
name[self].current_fit.saved assign[=] constant[True]
variable[calculation_type] assign[=] call[call[name[self].current_fit.get, parameter[name[self].COORDINATE_SYSTEM]]][constant[calculation_type]]
variable[tmin] assign[=] call[name[str], parameter[call[name[self].tmin_box.GetValue, parameter[]]]]
variable[tmax] assign[=] call[name[str], parameter[call[name[self].tmax_box.GetValue, parameter[]]]]
call[name[self].current_fit.put, parameter[name[self].s, constant[specimen], call[name[self].get_PCA_parameters, parameter[name[self].s, name[self].current_fit, name[tmin], name[tmax], constant[specimen], name[calculation_type]]]]]
if compare[call[name[len], parameter[call[call[name[self].Data][name[self].s]][constant[zijdblock_geo]]]] greater[>] constant[0]] begin[:]
call[name[self].current_fit.put, parameter[name[self].s, constant[geographic], call[name[self].get_PCA_parameters, parameter[name[self].s, name[self].current_fit, name[tmin], name[tmax], constant[geographic], name[calculation_type]]]]]
if compare[call[name[len], parameter[call[call[name[self].Data][name[self].s]][constant[zijdblock_tilt]]]] greater[>] constant[0]] begin[:]
call[name[self].current_fit.put, parameter[name[self].s, constant[tilt-corrected], call[name[self].get_PCA_parameters, parameter[name[self].s, name[self].current_fit, name[tmin], name[tmax], constant[tilt-corrected], name[calculation_type]]]]]
call[name[self].calculate_high_levels_data, parameter[]]
call[name[self].plot_high_levels_data, parameter[]]
call[name[self].on_menu_save_interpretation, parameter[name[event]]]
call[name[self].update_selection, parameter[]]
name[self].close_warning assign[=] constant[True] | keyword[def] identifier[on_save_interpretation_button] ( identifier[self] , identifier[event] ):
literal[string]
keyword[if] identifier[self] . identifier[current_fit] :
identifier[self] . identifier[current_fit] . identifier[saved] = keyword[True]
identifier[calculation_type] = identifier[self] . identifier[current_fit] . identifier[get] ( identifier[self] . identifier[COORDINATE_SYSTEM] )[
literal[string] ]
identifier[tmin] = identifier[str] ( identifier[self] . identifier[tmin_box] . identifier[GetValue] ())
identifier[tmax] = identifier[str] ( identifier[self] . identifier[tmax_box] . identifier[GetValue] ())
identifier[self] . identifier[current_fit] . identifier[put] ( identifier[self] . identifier[s] , literal[string] , identifier[self] . identifier[get_PCA_parameters] (
identifier[self] . identifier[s] , identifier[self] . identifier[current_fit] , identifier[tmin] , identifier[tmax] , literal[string] , identifier[calculation_type] ))
keyword[if] identifier[len] ( identifier[self] . identifier[Data] [ identifier[self] . identifier[s] ][ literal[string] ])> literal[int] :
identifier[self] . identifier[current_fit] . identifier[put] ( identifier[self] . identifier[s] , literal[string] , identifier[self] . identifier[get_PCA_parameters] (
identifier[self] . identifier[s] , identifier[self] . identifier[current_fit] , identifier[tmin] , identifier[tmax] , literal[string] , identifier[calculation_type] ))
keyword[if] identifier[len] ( identifier[self] . identifier[Data] [ identifier[self] . identifier[s] ][ literal[string] ])> literal[int] :
identifier[self] . identifier[current_fit] . identifier[put] ( identifier[self] . identifier[s] , literal[string] , identifier[self] . identifier[get_PCA_parameters] (
identifier[self] . identifier[s] , identifier[self] . identifier[current_fit] , identifier[tmin] , identifier[tmax] , literal[string] , identifier[calculation_type] ))
identifier[self] . identifier[calculate_high_levels_data] ()
identifier[self] . identifier[plot_high_levels_data] ()
identifier[self] . identifier[on_menu_save_interpretation] ( identifier[event] )
identifier[self] . identifier[update_selection] ()
identifier[self] . identifier[close_warning] = keyword[True] | def on_save_interpretation_button(self, event):
"""
on the save button the interpretation is saved to pmag_results_table
data in all coordinate systems
"""
if self.current_fit:
self.current_fit.saved = True
calculation_type = self.current_fit.get(self.COORDINATE_SYSTEM)['calculation_type']
tmin = str(self.tmin_box.GetValue())
tmax = str(self.tmax_box.GetValue())
self.current_fit.put(self.s, 'specimen', self.get_PCA_parameters(self.s, self.current_fit, tmin, tmax, 'specimen', calculation_type))
if len(self.Data[self.s]['zijdblock_geo']) > 0:
self.current_fit.put(self.s, 'geographic', self.get_PCA_parameters(self.s, self.current_fit, tmin, tmax, 'geographic', calculation_type)) # depends on [control=['if'], data=[]]
if len(self.Data[self.s]['zijdblock_tilt']) > 0:
self.current_fit.put(self.s, 'tilt-corrected', self.get_PCA_parameters(self.s, self.current_fit, tmin, tmax, 'tilt-corrected', calculation_type)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# calculate high level data
self.calculate_high_levels_data()
self.plot_high_levels_data()
self.on_menu_save_interpretation(event)
self.update_selection()
self.close_warning = True |
def list(self):
"""
List all available data logging sessions
"""
# We have to open this queue before we make the request, to ensure we don't miss the response.
queue = self._pebble.get_endpoint_queue(DataLogging)
self._pebble.send_packet(DataLogging(data=DataLoggingReportOpenSessions(sessions=[])))
sessions = []
while True:
try:
result = queue.get(timeout=2).data
except TimeoutError:
break
if isinstance(result, DataLoggingDespoolOpenSession):
self._pebble.send_packet(DataLogging(data=DataLoggingACK(
session_id=result.session_id)))
sessions.append(result.__dict__)
queue.close()
return sessions | def function[list, parameter[self]]:
constant[
List all available data logging sessions
]
variable[queue] assign[=] call[name[self]._pebble.get_endpoint_queue, parameter[name[DataLogging]]]
call[name[self]._pebble.send_packet, parameter[call[name[DataLogging], parameter[]]]]
variable[sessions] assign[=] list[[]]
while constant[True] begin[:]
<ast.Try object at 0x7da1b0a4c8e0>
if call[name[isinstance], parameter[name[result], name[DataLoggingDespoolOpenSession]]] begin[:]
call[name[self]._pebble.send_packet, parameter[call[name[DataLogging], parameter[]]]]
call[name[sessions].append, parameter[name[result].__dict__]]
call[name[queue].close, parameter[]]
return[name[sessions]] | keyword[def] identifier[list] ( identifier[self] ):
literal[string]
identifier[queue] = identifier[self] . identifier[_pebble] . identifier[get_endpoint_queue] ( identifier[DataLogging] )
identifier[self] . identifier[_pebble] . identifier[send_packet] ( identifier[DataLogging] ( identifier[data] = identifier[DataLoggingReportOpenSessions] ( identifier[sessions] =[])))
identifier[sessions] =[]
keyword[while] keyword[True] :
keyword[try] :
identifier[result] = identifier[queue] . identifier[get] ( identifier[timeout] = literal[int] ). identifier[data]
keyword[except] identifier[TimeoutError] :
keyword[break]
keyword[if] identifier[isinstance] ( identifier[result] , identifier[DataLoggingDespoolOpenSession] ):
identifier[self] . identifier[_pebble] . identifier[send_packet] ( identifier[DataLogging] ( identifier[data] = identifier[DataLoggingACK] (
identifier[session_id] = identifier[result] . identifier[session_id] )))
identifier[sessions] . identifier[append] ( identifier[result] . identifier[__dict__] )
identifier[queue] . identifier[close] ()
keyword[return] identifier[sessions] | def list(self):
"""
List all available data logging sessions
"""
# We have to open this queue before we make the request, to ensure we don't miss the response.
queue = self._pebble.get_endpoint_queue(DataLogging)
self._pebble.send_packet(DataLogging(data=DataLoggingReportOpenSessions(sessions=[])))
sessions = []
while True:
try:
result = queue.get(timeout=2).data # depends on [control=['try'], data=[]]
except TimeoutError:
break # depends on [control=['except'], data=[]]
if isinstance(result, DataLoggingDespoolOpenSession):
self._pebble.send_packet(DataLogging(data=DataLoggingACK(session_id=result.session_id)))
sessions.append(result.__dict__) # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
queue.close()
return sessions |
def print_ldamodel_topic_words(topic_word_distrib, vocab, n_top=10, row_labels=DEFAULT_TOPIC_NAME_FMT):
"""Print `n_top` values from a LDA model's topic-word distributions."""
print_ldamodel_distribution(topic_word_distrib, row_labels=row_labels, val_labels=vocab,
top_n=n_top) | def function[print_ldamodel_topic_words, parameter[topic_word_distrib, vocab, n_top, row_labels]]:
constant[Print `n_top` values from a LDA model's topic-word distributions.]
call[name[print_ldamodel_distribution], parameter[name[topic_word_distrib]]] | keyword[def] identifier[print_ldamodel_topic_words] ( identifier[topic_word_distrib] , identifier[vocab] , identifier[n_top] = literal[int] , identifier[row_labels] = identifier[DEFAULT_TOPIC_NAME_FMT] ):
literal[string]
identifier[print_ldamodel_distribution] ( identifier[topic_word_distrib] , identifier[row_labels] = identifier[row_labels] , identifier[val_labels] = identifier[vocab] ,
identifier[top_n] = identifier[n_top] ) | def print_ldamodel_topic_words(topic_word_distrib, vocab, n_top=10, row_labels=DEFAULT_TOPIC_NAME_FMT):
"""Print `n_top` values from a LDA model's topic-word distributions."""
print_ldamodel_distribution(topic_word_distrib, row_labels=row_labels, val_labels=vocab, top_n=n_top) |
def sec(x, context=None):
"""
Return the secant of ``x``.
"""
return _apply_function_in_current_context(
BigFloat,
mpfr.mpfr_sec,
(BigFloat._implicit_convert(x),),
context,
) | def function[sec, parameter[x, context]]:
constant[
Return the secant of ``x``.
]
return[call[name[_apply_function_in_current_context], parameter[name[BigFloat], name[mpfr].mpfr_sec, tuple[[<ast.Call object at 0x7da18ede7760>]], name[context]]]] | keyword[def] identifier[sec] ( identifier[x] , identifier[context] = keyword[None] ):
literal[string]
keyword[return] identifier[_apply_function_in_current_context] (
identifier[BigFloat] ,
identifier[mpfr] . identifier[mpfr_sec] ,
( identifier[BigFloat] . identifier[_implicit_convert] ( identifier[x] ),),
identifier[context] ,
) | def sec(x, context=None):
"""
Return the secant of ``x``.
"""
return _apply_function_in_current_context(BigFloat, mpfr.mpfr_sec, (BigFloat._implicit_convert(x),), context) |
def sample(self, n):
""" Samples data into a Pandas DataFrame.
Args:
n: number of sampled counts.
Returns:
A dataframe containing sampled data.
Raises:
Exception if n is larger than number of rows.
"""
row_total_count = 0
row_counts = []
for file in self.files:
with _util.open_local_or_gcs(file, 'r') as f:
num_lines = sum(1 for line in f)
row_total_count += num_lines
row_counts.append(num_lines)
names = None
dtype = None
if self._schema:
_MAPPINGS = {
'FLOAT': np.float64,
'INTEGER': np.int64,
'TIMESTAMP': np.datetime64,
'BOOLEAN': np.bool,
}
names = [x['name'] for x in self._schema]
dtype = {x['name']: _MAPPINGS.get(x['type'], object) for x in self._schema}
skip_count = row_total_count - n
# Get all skipped indexes. These will be distributed into each file.
# Note that random.sample will raise Exception if skip_count is greater than rows count.
skip_all = sorted(random.sample(range(0, row_total_count), skip_count))
dfs = []
for file, row_count in zip(self.files, row_counts):
skip = [x for x in skip_all if x < row_count]
skip_all = [x - row_count for x in skip_all if x >= row_count]
with _util.open_local_or_gcs(file, 'r') as f:
dfs.append(pd.read_csv(f, skiprows=skip, names=names, dtype=dtype, header=None))
return pd.concat(dfs, axis=0, ignore_index=True) | def function[sample, parameter[self, n]]:
constant[ Samples data into a Pandas DataFrame.
Args:
n: number of sampled counts.
Returns:
A dataframe containing sampled data.
Raises:
Exception if n is larger than number of rows.
]
variable[row_total_count] assign[=] constant[0]
variable[row_counts] assign[=] list[[]]
for taget[name[file]] in starred[name[self].files] begin[:]
with call[name[_util].open_local_or_gcs, parameter[name[file], constant[r]]] begin[:]
variable[num_lines] assign[=] call[name[sum], parameter[<ast.GeneratorExp object at 0x7da1b1121ea0>]]
<ast.AugAssign object at 0x7da1b1120df0>
call[name[row_counts].append, parameter[name[num_lines]]]
variable[names] assign[=] constant[None]
variable[dtype] assign[=] constant[None]
if name[self]._schema begin[:]
variable[_MAPPINGS] assign[=] dictionary[[<ast.Constant object at 0x7da1b11215a0>, <ast.Constant object at 0x7da1b1120c70>, <ast.Constant object at 0x7da1b1121030>, <ast.Constant object at 0x7da1b1121360>], [<ast.Attribute object at 0x7da1b1120a30>, <ast.Attribute object at 0x7da1b1120b80>, <ast.Attribute object at 0x7da1b11224a0>, <ast.Attribute object at 0x7da1b1121330>]]
variable[names] assign[=] <ast.ListComp object at 0x7da1b11221d0>
variable[dtype] assign[=] <ast.DictComp object at 0x7da1b1122860>
variable[skip_count] assign[=] binary_operation[name[row_total_count] - name[n]]
variable[skip_all] assign[=] call[name[sorted], parameter[call[name[random].sample, parameter[call[name[range], parameter[constant[0], name[row_total_count]]], name[skip_count]]]]]
variable[dfs] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b1122fb0>, <ast.Name object at 0x7da1b1122ce0>]]] in starred[call[name[zip], parameter[name[self].files, name[row_counts]]]] begin[:]
variable[skip] assign[=] <ast.ListComp object at 0x7da1b11229b0>
variable[skip_all] assign[=] <ast.ListComp object at 0x7da1b1123010>
with call[name[_util].open_local_or_gcs, parameter[name[file], constant[r]]] begin[:]
call[name[dfs].append, parameter[call[name[pd].read_csv, parameter[name[f]]]]]
return[call[name[pd].concat, parameter[name[dfs]]]] | keyword[def] identifier[sample] ( identifier[self] , identifier[n] ):
literal[string]
identifier[row_total_count] = literal[int]
identifier[row_counts] =[]
keyword[for] identifier[file] keyword[in] identifier[self] . identifier[files] :
keyword[with] identifier[_util] . identifier[open_local_or_gcs] ( identifier[file] , literal[string] ) keyword[as] identifier[f] :
identifier[num_lines] = identifier[sum] ( literal[int] keyword[for] identifier[line] keyword[in] identifier[f] )
identifier[row_total_count] += identifier[num_lines]
identifier[row_counts] . identifier[append] ( identifier[num_lines] )
identifier[names] = keyword[None]
identifier[dtype] = keyword[None]
keyword[if] identifier[self] . identifier[_schema] :
identifier[_MAPPINGS] ={
literal[string] : identifier[np] . identifier[float64] ,
literal[string] : identifier[np] . identifier[int64] ,
literal[string] : identifier[np] . identifier[datetime64] ,
literal[string] : identifier[np] . identifier[bool] ,
}
identifier[names] =[ identifier[x] [ literal[string] ] keyword[for] identifier[x] keyword[in] identifier[self] . identifier[_schema] ]
identifier[dtype] ={ identifier[x] [ literal[string] ]: identifier[_MAPPINGS] . identifier[get] ( identifier[x] [ literal[string] ], identifier[object] ) keyword[for] identifier[x] keyword[in] identifier[self] . identifier[_schema] }
identifier[skip_count] = identifier[row_total_count] - identifier[n]
identifier[skip_all] = identifier[sorted] ( identifier[random] . identifier[sample] ( identifier[range] ( literal[int] , identifier[row_total_count] ), identifier[skip_count] ))
identifier[dfs] =[]
keyword[for] identifier[file] , identifier[row_count] keyword[in] identifier[zip] ( identifier[self] . identifier[files] , identifier[row_counts] ):
identifier[skip] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[skip_all] keyword[if] identifier[x] < identifier[row_count] ]
identifier[skip_all] =[ identifier[x] - identifier[row_count] keyword[for] identifier[x] keyword[in] identifier[skip_all] keyword[if] identifier[x] >= identifier[row_count] ]
keyword[with] identifier[_util] . identifier[open_local_or_gcs] ( identifier[file] , literal[string] ) keyword[as] identifier[f] :
identifier[dfs] . identifier[append] ( identifier[pd] . identifier[read_csv] ( identifier[f] , identifier[skiprows] = identifier[skip] , identifier[names] = identifier[names] , identifier[dtype] = identifier[dtype] , identifier[header] = keyword[None] ))
keyword[return] identifier[pd] . identifier[concat] ( identifier[dfs] , identifier[axis] = literal[int] , identifier[ignore_index] = keyword[True] ) | def sample(self, n):
""" Samples data into a Pandas DataFrame.
Args:
n: number of sampled counts.
Returns:
A dataframe containing sampled data.
Raises:
Exception if n is larger than number of rows.
"""
row_total_count = 0
row_counts = []
for file in self.files:
with _util.open_local_or_gcs(file, 'r') as f:
num_lines = sum((1 for line in f))
row_total_count += num_lines
row_counts.append(num_lines) # depends on [control=['with'], data=['f']] # depends on [control=['for'], data=['file']]
names = None
dtype = None
if self._schema:
_MAPPINGS = {'FLOAT': np.float64, 'INTEGER': np.int64, 'TIMESTAMP': np.datetime64, 'BOOLEAN': np.bool}
names = [x['name'] for x in self._schema]
dtype = {x['name']: _MAPPINGS.get(x['type'], object) for x in self._schema} # depends on [control=['if'], data=[]]
skip_count = row_total_count - n
# Get all skipped indexes. These will be distributed into each file.
# Note that random.sample will raise Exception if skip_count is greater than rows count.
skip_all = sorted(random.sample(range(0, row_total_count), skip_count))
dfs = []
for (file, row_count) in zip(self.files, row_counts):
skip = [x for x in skip_all if x < row_count]
skip_all = [x - row_count for x in skip_all if x >= row_count]
with _util.open_local_or_gcs(file, 'r') as f:
dfs.append(pd.read_csv(f, skiprows=skip, names=names, dtype=dtype, header=None)) # depends on [control=['with'], data=['f']] # depends on [control=['for'], data=[]]
return pd.concat(dfs, axis=0, ignore_index=True) |
def fixed_terms(self):
'''Return dict of all and only fixed effects in model.'''
return {k: v for (k, v) in self.terms.items() if not v.random} | def function[fixed_terms, parameter[self]]:
constant[Return dict of all and only fixed effects in model.]
return[<ast.DictComp object at 0x7da1b1660c10>] | keyword[def] identifier[fixed_terms] ( identifier[self] ):
literal[string]
keyword[return] { identifier[k] : identifier[v] keyword[for] ( identifier[k] , identifier[v] ) keyword[in] identifier[self] . identifier[terms] . identifier[items] () keyword[if] keyword[not] identifier[v] . identifier[random] } | def fixed_terms(self):
"""Return dict of all and only fixed effects in model."""
return {k: v for (k, v) in self.terms.items() if not v.random} |
def pop(self, i):
"""
Pop a column from the H2OFrame at index i.
:param i: The index (int) or name (str) of the column to pop.
:returns: an H2OFrame containing the column dropped from the current frame; the current frame is modified
in-place and loses the column.
"""
if is_type(i, str): i = self.names.index(i)
col = H2OFrame._expr(expr=ExprNode("cols", self, i))
old_cache = self._ex._cache
self._ex = ExprNode("cols", self, -(i + 1))
self._ex._cache.ncols -= 1
self._ex._cache.names = old_cache.names[:i] + old_cache.names[i + 1:]
self._ex._cache.types = {name: old_cache.types[name] for name in self._ex._cache.names}
self._ex._cache._data = None
col._ex._cache.ncols = 1
col._ex._cache.names = [old_cache.names[i]]
return col | def function[pop, parameter[self, i]]:
constant[
Pop a column from the H2OFrame at index i.
:param i: The index (int) or name (str) of the column to pop.
:returns: an H2OFrame containing the column dropped from the current frame; the current frame is modified
in-place and loses the column.
]
if call[name[is_type], parameter[name[i], name[str]]] begin[:]
variable[i] assign[=] call[name[self].names.index, parameter[name[i]]]
variable[col] assign[=] call[name[H2OFrame]._expr, parameter[]]
variable[old_cache] assign[=] name[self]._ex._cache
name[self]._ex assign[=] call[name[ExprNode], parameter[constant[cols], name[self], <ast.UnaryOp object at 0x7da20e9b3760>]]
<ast.AugAssign object at 0x7da20e9b25f0>
name[self]._ex._cache.names assign[=] binary_operation[call[name[old_cache].names][<ast.Slice object at 0x7da20e9b2080>] + call[name[old_cache].names][<ast.Slice object at 0x7da20e9b3b20>]]
name[self]._ex._cache.types assign[=] <ast.DictComp object at 0x7da20e9b2b60>
name[self]._ex._cache._data assign[=] constant[None]
name[col]._ex._cache.ncols assign[=] constant[1]
name[col]._ex._cache.names assign[=] list[[<ast.Subscript object at 0x7da1b0370e50>]]
return[name[col]] | keyword[def] identifier[pop] ( identifier[self] , identifier[i] ):
literal[string]
keyword[if] identifier[is_type] ( identifier[i] , identifier[str] ): identifier[i] = identifier[self] . identifier[names] . identifier[index] ( identifier[i] )
identifier[col] = identifier[H2OFrame] . identifier[_expr] ( identifier[expr] = identifier[ExprNode] ( literal[string] , identifier[self] , identifier[i] ))
identifier[old_cache] = identifier[self] . identifier[_ex] . identifier[_cache]
identifier[self] . identifier[_ex] = identifier[ExprNode] ( literal[string] , identifier[self] ,-( identifier[i] + literal[int] ))
identifier[self] . identifier[_ex] . identifier[_cache] . identifier[ncols] -= literal[int]
identifier[self] . identifier[_ex] . identifier[_cache] . identifier[names] = identifier[old_cache] . identifier[names] [: identifier[i] ]+ identifier[old_cache] . identifier[names] [ identifier[i] + literal[int] :]
identifier[self] . identifier[_ex] . identifier[_cache] . identifier[types] ={ identifier[name] : identifier[old_cache] . identifier[types] [ identifier[name] ] keyword[for] identifier[name] keyword[in] identifier[self] . identifier[_ex] . identifier[_cache] . identifier[names] }
identifier[self] . identifier[_ex] . identifier[_cache] . identifier[_data] = keyword[None]
identifier[col] . identifier[_ex] . identifier[_cache] . identifier[ncols] = literal[int]
identifier[col] . identifier[_ex] . identifier[_cache] . identifier[names] =[ identifier[old_cache] . identifier[names] [ identifier[i] ]]
keyword[return] identifier[col] | def pop(self, i):
"""
Pop a column from the H2OFrame at index i.
:param i: The index (int) or name (str) of the column to pop.
:returns: an H2OFrame containing the column dropped from the current frame; the current frame is modified
in-place and loses the column.
"""
if is_type(i, str):
i = self.names.index(i) # depends on [control=['if'], data=[]]
col = H2OFrame._expr(expr=ExprNode('cols', self, i))
old_cache = self._ex._cache
self._ex = ExprNode('cols', self, -(i + 1))
self._ex._cache.ncols -= 1
self._ex._cache.names = old_cache.names[:i] + old_cache.names[i + 1:]
self._ex._cache.types = {name: old_cache.types[name] for name in self._ex._cache.names}
self._ex._cache._data = None
col._ex._cache.ncols = 1
col._ex._cache.names = [old_cache.names[i]]
return col |
def gffselect(args):
"""
%prog gffselect gmaplocation.bed expectedlocation.bed translated.ids tag
Try to match up the expected location and gmap locations for particular
genes. translated.ids was generated by fasta.translate --ids. tag must be
one of "complete|pseudogene|partial".
"""
from jcvi.formats.bed import intersectBed_wao
p = OptionParser(gffselect.__doc__)
opts, args = p.parse_args(args)
if len(args) != 4:
sys.exit(not p.print_help())
gmapped, expected, idsfile, tag = args
data = get_tags(idsfile)
completeness = dict((a.replace("mrna", "path"), c) \
for (a, b, c) in data)
seen = set()
idsfile = expected.rsplit(".", 1)[0] + ".ids"
fw = open(idsfile, "w")
cnt = 0
for a, b in intersectBed_wao(expected, gmapped):
if b is None:
continue
aname, bbname = a.accn, b.accn
bname = bbname.split(".")[0]
if completeness[bbname] != tag:
continue
if aname == bname:
if bname in seen:
continue
seen.add(bname)
print(bbname, file=fw)
cnt += 1
fw.close()
logging.debug("Total {0} records written to `{1}`.".format(cnt, idsfile)) | def function[gffselect, parameter[args]]:
constant[
%prog gffselect gmaplocation.bed expectedlocation.bed translated.ids tag
Try to match up the expected location and gmap locations for particular
genes. translated.ids was generated by fasta.translate --ids. tag must be
one of "complete|pseudogene|partial".
]
from relative_module[jcvi.formats.bed] import module[intersectBed_wao]
variable[p] assign[=] call[name[OptionParser], parameter[name[gffselect].__doc__]]
<ast.Tuple object at 0x7da207f01fc0> assign[=] call[name[p].parse_args, parameter[name[args]]]
if compare[call[name[len], parameter[name[args]]] not_equal[!=] constant[4]] begin[:]
call[name[sys].exit, parameter[<ast.UnaryOp object at 0x7da18ede41f0>]]
<ast.Tuple object at 0x7da18ede44f0> assign[=] name[args]
variable[data] assign[=] call[name[get_tags], parameter[name[idsfile]]]
variable[completeness] assign[=] call[name[dict], parameter[<ast.GeneratorExp object at 0x7da18ede7730>]]
variable[seen] assign[=] call[name[set], parameter[]]
variable[idsfile] assign[=] binary_operation[call[call[name[expected].rsplit, parameter[constant[.], constant[1]]]][constant[0]] + constant[.ids]]
variable[fw] assign[=] call[name[open], parameter[name[idsfile], constant[w]]]
variable[cnt] assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da18ede62f0>, <ast.Name object at 0x7da18ede7520>]]] in starred[call[name[intersectBed_wao], parameter[name[expected], name[gmapped]]]] begin[:]
if compare[name[b] is constant[None]] begin[:]
continue
<ast.Tuple object at 0x7da18ede7e50> assign[=] tuple[[<ast.Attribute object at 0x7da18ede61a0>, <ast.Attribute object at 0x7da18ede43a0>]]
variable[bname] assign[=] call[call[name[bbname].split, parameter[constant[.]]]][constant[0]]
if compare[call[name[completeness]][name[bbname]] not_equal[!=] name[tag]] begin[:]
continue
if compare[name[aname] equal[==] name[bname]] begin[:]
if compare[name[bname] in name[seen]] begin[:]
continue
call[name[seen].add, parameter[name[bname]]]
call[name[print], parameter[name[bbname]]]
<ast.AugAssign object at 0x7da18ede4700>
call[name[fw].close, parameter[]]
call[name[logging].debug, parameter[call[constant[Total {0} records written to `{1}`.].format, parameter[name[cnt], name[idsfile]]]]] | keyword[def] identifier[gffselect] ( identifier[args] ):
literal[string]
keyword[from] identifier[jcvi] . identifier[formats] . identifier[bed] keyword[import] identifier[intersectBed_wao]
identifier[p] = identifier[OptionParser] ( identifier[gffselect] . identifier[__doc__] )
identifier[opts] , identifier[args] = identifier[p] . identifier[parse_args] ( identifier[args] )
keyword[if] identifier[len] ( identifier[args] )!= literal[int] :
identifier[sys] . identifier[exit] ( keyword[not] identifier[p] . identifier[print_help] ())
identifier[gmapped] , identifier[expected] , identifier[idsfile] , identifier[tag] = identifier[args]
identifier[data] = identifier[get_tags] ( identifier[idsfile] )
identifier[completeness] = identifier[dict] (( identifier[a] . identifier[replace] ( literal[string] , literal[string] ), identifier[c] ) keyword[for] ( identifier[a] , identifier[b] , identifier[c] ) keyword[in] identifier[data] )
identifier[seen] = identifier[set] ()
identifier[idsfile] = identifier[expected] . identifier[rsplit] ( literal[string] , literal[int] )[ literal[int] ]+ literal[string]
identifier[fw] = identifier[open] ( identifier[idsfile] , literal[string] )
identifier[cnt] = literal[int]
keyword[for] identifier[a] , identifier[b] keyword[in] identifier[intersectBed_wao] ( identifier[expected] , identifier[gmapped] ):
keyword[if] identifier[b] keyword[is] keyword[None] :
keyword[continue]
identifier[aname] , identifier[bbname] = identifier[a] . identifier[accn] , identifier[b] . identifier[accn]
identifier[bname] = identifier[bbname] . identifier[split] ( literal[string] )[ literal[int] ]
keyword[if] identifier[completeness] [ identifier[bbname] ]!= identifier[tag] :
keyword[continue]
keyword[if] identifier[aname] == identifier[bname] :
keyword[if] identifier[bname] keyword[in] identifier[seen] :
keyword[continue]
identifier[seen] . identifier[add] ( identifier[bname] )
identifier[print] ( identifier[bbname] , identifier[file] = identifier[fw] )
identifier[cnt] += literal[int]
identifier[fw] . identifier[close] ()
identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[cnt] , identifier[idsfile] )) | def gffselect(args):
"""
%prog gffselect gmaplocation.bed expectedlocation.bed translated.ids tag
Try to match up the expected location and gmap locations for particular
genes. translated.ids was generated by fasta.translate --ids. tag must be
one of "complete|pseudogene|partial".
"""
from jcvi.formats.bed import intersectBed_wao
p = OptionParser(gffselect.__doc__)
(opts, args) = p.parse_args(args)
if len(args) != 4:
sys.exit(not p.print_help()) # depends on [control=['if'], data=[]]
(gmapped, expected, idsfile, tag) = args
data = get_tags(idsfile)
completeness = dict(((a.replace('mrna', 'path'), c) for (a, b, c) in data))
seen = set()
idsfile = expected.rsplit('.', 1)[0] + '.ids'
fw = open(idsfile, 'w')
cnt = 0
for (a, b) in intersectBed_wao(expected, gmapped):
if b is None:
continue # depends on [control=['if'], data=[]]
(aname, bbname) = (a.accn, b.accn)
bname = bbname.split('.')[0]
if completeness[bbname] != tag:
continue # depends on [control=['if'], data=[]]
if aname == bname:
if bname in seen:
continue # depends on [control=['if'], data=[]]
seen.add(bname)
print(bbname, file=fw)
cnt += 1 # depends on [control=['if'], data=['bname']] # depends on [control=['for'], data=[]]
fw.close()
logging.debug('Total {0} records written to `{1}`.'.format(cnt, idsfile)) |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.