code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def GetSyncMoConfigFilePath():
""" Method returs the path of SyncMoConfig.xml file. """
return os.path.join(os.path.join(os.path.dirname(__file__), "resources"), "SyncMoConfig.xml") | def function[GetSyncMoConfigFilePath, parameter[]]:
constant[ Method returs the path of SyncMoConfig.xml file. ]
return[call[name[os].path.join, parameter[call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[name[__file__]]], constant[resources]]], constant[SyncMoConfig.xml]]]] | keyword[def] identifier[GetSyncMoConfigFilePath] ():
literal[string]
keyword[return] identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[__file__] ), literal[string] ), literal[string] ) | def GetSyncMoConfigFilePath():
""" Method returs the path of SyncMoConfig.xml file. """
return os.path.join(os.path.join(os.path.dirname(__file__), 'resources'), 'SyncMoConfig.xml') |
def _gsa_update_velocity(velocity, acceleration):
"""Stochastically update velocity given acceleration.
In GSA paper, velocity is v_i, acceleration is a_i
"""
# The GSA algorithm specifies that the new velocity for each dimension
# is a sum of a random fraction of its current velocity in that dimension,
# and its acceleration in that dimension
# For this reason we sum the dimensions individually instead of simply
# using vec_a+vec_b
new_velocity = []
for vel, acc in zip(velocity, acceleration):
new_velocity.append(random.uniform(0.0, 1.0) * vel + acc)
return new_velocity | def function[_gsa_update_velocity, parameter[velocity, acceleration]]:
constant[Stochastically update velocity given acceleration.
In GSA paper, velocity is v_i, acceleration is a_i
]
variable[new_velocity] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b04ca650>, <ast.Name object at 0x7da1b04c8fd0>]]] in starred[call[name[zip], parameter[name[velocity], name[acceleration]]]] begin[:]
call[name[new_velocity].append, parameter[binary_operation[binary_operation[call[name[random].uniform, parameter[constant[0.0], constant[1.0]]] * name[vel]] + name[acc]]]]
return[name[new_velocity]] | keyword[def] identifier[_gsa_update_velocity] ( identifier[velocity] , identifier[acceleration] ):
literal[string]
identifier[new_velocity] =[]
keyword[for] identifier[vel] , identifier[acc] keyword[in] identifier[zip] ( identifier[velocity] , identifier[acceleration] ):
identifier[new_velocity] . identifier[append] ( identifier[random] . identifier[uniform] ( literal[int] , literal[int] )* identifier[vel] + identifier[acc] )
keyword[return] identifier[new_velocity] | def _gsa_update_velocity(velocity, acceleration):
"""Stochastically update velocity given acceleration.
In GSA paper, velocity is v_i, acceleration is a_i
"""
# The GSA algorithm specifies that the new velocity for each dimension
# is a sum of a random fraction of its current velocity in that dimension,
# and its acceleration in that dimension
# For this reason we sum the dimensions individually instead of simply
# using vec_a+vec_b
new_velocity = []
for (vel, acc) in zip(velocity, acceleration):
new_velocity.append(random.uniform(0.0, 1.0) * vel + acc) # depends on [control=['for'], data=[]]
return new_velocity |
def array_info(self, dump=None, paths=None, attrs=True,
standardize_dims=True, pwd=None, use_rel_paths=True,
alternative_paths={}, ds_description={'fname', 'store'},
full_ds=True, copy=False, **kwargs):
"""
Get dimension informations on you arrays
This method returns a dictionary containing informations on the
array in this instance
Parameters
----------
dump: bool
If True and the dataset has not been dumped so far, it is dumped to
a temporary file or the one generated by `paths` is used. If it is
False or both, `dump` and `paths` are None, no data will be stored.
If it is None and `paths` is not None, `dump` is set to True.
%(get_filename_ds.parameters.no_ds|dump)s
attrs: bool, optional
If True (default), the :attr:`ArrayList.attrs` and
:attr:`xarray.DataArray.attrs` attributes are included in the
returning dictionary
standardize_dims: bool, optional
If True (default), the real dimension names in the dataset are
replaced by x, y, z and t to be more general.
pwd: str
Path to the working directory from where the data can be imported.
If None, use the current working directory.
use_rel_paths: bool, optional
If True (default), paths relative to the current working directory
are used. Otherwise absolute paths to `pwd` are used
ds_description: 'all' or set of {'fname', 'ds', 'num', 'arr', 'store'}
Keys to describe the datasets of the arrays. If all, all keys
are used. The key descriptions are
fname
the file name is inserted in the ``'fname'`` key
store
the data store class and module is inserted in the ``'store'``
key
ds
the dataset is inserted in the ``'ds'`` key
num
The unique number assigned to the dataset is inserted in the
``'num'`` key
arr
The array itself is inserted in the ``'arr'`` key
full_ds: bool
If True and ``'ds'`` is in `ds_description`, the entire dataset is
included. Otherwise, only the DataArray converted to a dataset is
included
copy: bool
If True, the arrays and datasets are deep copied
Other Parameters
----------------
%(get_filename_ds.other_parameters)s
Returns
-------
OrderedDict
An ordered mapping from array names to dimensions and filename
corresponding to the array
See Also
--------
from_dict"""
saved_ds = kwargs.pop('_saved_ds', {})
def get_alternative(f):
return next(filter(lambda t: osp.samefile(f, t[0]),
six.iteritems(alternative_paths)), [False, f])
if copy:
def copy_obj(obj):
# try to get the number of the dataset and create only one copy
# copy for each dataset
try:
num = obj.psy.num
except AttributeError:
pass
else:
try:
return saved_ds[num]
except KeyError:
saved_ds[num] = obj.psy.copy(True)
return saved_ds[num]
return obj.psy.copy(True)
else:
def copy_obj(obj):
return obj
ret = OrderedDict()
if ds_description == 'all':
ds_description = {'fname', 'ds', 'num', 'arr', 'store'}
if paths is not None:
if dump is None:
dump = True
paths = iter(paths)
elif dump is None:
dump = False
if pwd is None:
pwd = getcwd()
for arr in self:
if isinstance(arr, InteractiveList):
ret[arr.arr_name] = arr.array_info(
dump, paths, pwd=pwd, attrs=attrs,
standardize_dims=standardize_dims,
use_rel_paths=use_rel_paths, ds_description=ds_description,
alternative_paths=alternative_paths, copy=copy,
_saved_ds=saved_ds, **kwargs)
else:
if standardize_dims:
idims = arr.psy.decoder.standardize_dims(
next(arr.psy.iter_base_variables), arr.psy.idims)
else:
idims = arr.psy.idims
ret[arr.psy.arr_name] = d = {'dims': idims}
if 'variable' in arr.coords:
d['name'] = [list(arr.coords['variable'].values)]
else:
d['name'] = arr.name
if 'fname' in ds_description or 'store' in ds_description:
fname, store_mod, store_cls = get_filename_ds(
arr.psy.base, dump=dump, paths=paths, **kwargs)
if 'store' in ds_description:
d['store'] = (store_mod, store_cls)
if 'fname' in ds_description:
d['fname'] = []
for i, f in enumerate(safe_list(fname)):
if (f is None or utils.is_remote_url(f)):
d['fname'].append(f)
else:
found, f = get_alternative(f)
if use_rel_paths:
f = osp.relpath(f, pwd)
else:
f = osp.abspath(f)
d['fname'].append(f)
if fname is None or isinstance(fname,
six.string_types):
d['fname'] = d['fname'][0]
else:
d['fname'] = tuple(safe_list(fname))
if arr.psy.base.psy._concat_dim is not None:
d['concat_dim'] = arr.psy.base.psy._concat_dim
if 'ds' in ds_description:
if full_ds:
d['ds'] = copy_obj(arr.psy.base)
else:
d['ds'] = copy_obj(arr.to_dataset())
if 'num' in ds_description:
d['num'] = arr.psy.base.psy.num
if 'arr' in ds_description:
d['arr'] = copy_obj(arr)
if attrs:
d['attrs'] = arr.attrs
ret['attrs'] = self.attrs
return ret | def function[array_info, parameter[self, dump, paths, attrs, standardize_dims, pwd, use_rel_paths, alternative_paths, ds_description, full_ds, copy]]:
constant[
Get dimension informations on you arrays
This method returns a dictionary containing informations on the
array in this instance
Parameters
----------
dump: bool
If True and the dataset has not been dumped so far, it is dumped to
a temporary file or the one generated by `paths` is used. If it is
False or both, `dump` and `paths` are None, no data will be stored.
If it is None and `paths` is not None, `dump` is set to True.
%(get_filename_ds.parameters.no_ds|dump)s
attrs: bool, optional
If True (default), the :attr:`ArrayList.attrs` and
:attr:`xarray.DataArray.attrs` attributes are included in the
returning dictionary
standardize_dims: bool, optional
If True (default), the real dimension names in the dataset are
replaced by x, y, z and t to be more general.
pwd: str
Path to the working directory from where the data can be imported.
If None, use the current working directory.
use_rel_paths: bool, optional
If True (default), paths relative to the current working directory
are used. Otherwise absolute paths to `pwd` are used
ds_description: 'all' or set of {'fname', 'ds', 'num', 'arr', 'store'}
Keys to describe the datasets of the arrays. If all, all keys
are used. The key descriptions are
fname
the file name is inserted in the ``'fname'`` key
store
the data store class and module is inserted in the ``'store'``
key
ds
the dataset is inserted in the ``'ds'`` key
num
The unique number assigned to the dataset is inserted in the
``'num'`` key
arr
The array itself is inserted in the ``'arr'`` key
full_ds: bool
If True and ``'ds'`` is in `ds_description`, the entire dataset is
included. Otherwise, only the DataArray converted to a dataset is
included
copy: bool
If True, the arrays and datasets are deep copied
Other Parameters
----------------
%(get_filename_ds.other_parameters)s
Returns
-------
OrderedDict
An ordered mapping from array names to dimensions and filename
corresponding to the array
See Also
--------
from_dict]
variable[saved_ds] assign[=] call[name[kwargs].pop, parameter[constant[_saved_ds], dictionary[[], []]]]
def function[get_alternative, parameter[f]]:
return[call[name[next], parameter[call[name[filter], parameter[<ast.Lambda object at 0x7da18f00e560>, call[name[six].iteritems, parameter[name[alternative_paths]]]]], list[[<ast.Constant object at 0x7da18f00db40>, <ast.Name object at 0x7da18f00c490>]]]]]
if name[copy] begin[:]
def function[copy_obj, parameter[obj]]:
<ast.Try object at 0x7da18f00e770>
return[call[name[obj].psy.copy, parameter[constant[True]]]]
variable[ret] assign[=] call[name[OrderedDict], parameter[]]
if compare[name[ds_description] equal[==] constant[all]] begin[:]
variable[ds_description] assign[=] <ast.Set object at 0x7da18f00cc40>
if compare[name[paths] is_not constant[None]] begin[:]
if compare[name[dump] is constant[None]] begin[:]
variable[dump] assign[=] constant[True]
variable[paths] assign[=] call[name[iter], parameter[name[paths]]]
if compare[name[pwd] is constant[None]] begin[:]
variable[pwd] assign[=] call[name[getcwd], parameter[]]
for taget[name[arr]] in starred[name[self]] begin[:]
if call[name[isinstance], parameter[name[arr], name[InteractiveList]]] begin[:]
call[name[ret]][name[arr].arr_name] assign[=] call[name[arr].array_info, parameter[name[dump], name[paths]]]
call[name[ret]][constant[attrs]] assign[=] name[self].attrs
return[name[ret]] | keyword[def] identifier[array_info] ( identifier[self] , identifier[dump] = keyword[None] , identifier[paths] = keyword[None] , identifier[attrs] = keyword[True] ,
identifier[standardize_dims] = keyword[True] , identifier[pwd] = keyword[None] , identifier[use_rel_paths] = keyword[True] ,
identifier[alternative_paths] ={}, identifier[ds_description] ={ literal[string] , literal[string] },
identifier[full_ds] = keyword[True] , identifier[copy] = keyword[False] ,** identifier[kwargs] ):
literal[string]
identifier[saved_ds] = identifier[kwargs] . identifier[pop] ( literal[string] ,{})
keyword[def] identifier[get_alternative] ( identifier[f] ):
keyword[return] identifier[next] ( identifier[filter] ( keyword[lambda] identifier[t] : identifier[osp] . identifier[samefile] ( identifier[f] , identifier[t] [ literal[int] ]),
identifier[six] . identifier[iteritems] ( identifier[alternative_paths] )),[ keyword[False] , identifier[f] ])
keyword[if] identifier[copy] :
keyword[def] identifier[copy_obj] ( identifier[obj] ):
keyword[try] :
identifier[num] = identifier[obj] . identifier[psy] . identifier[num]
keyword[except] identifier[AttributeError] :
keyword[pass]
keyword[else] :
keyword[try] :
keyword[return] identifier[saved_ds] [ identifier[num] ]
keyword[except] identifier[KeyError] :
identifier[saved_ds] [ identifier[num] ]= identifier[obj] . identifier[psy] . identifier[copy] ( keyword[True] )
keyword[return] identifier[saved_ds] [ identifier[num] ]
keyword[return] identifier[obj] . identifier[psy] . identifier[copy] ( keyword[True] )
keyword[else] :
keyword[def] identifier[copy_obj] ( identifier[obj] ):
keyword[return] identifier[obj]
identifier[ret] = identifier[OrderedDict] ()
keyword[if] identifier[ds_description] == literal[string] :
identifier[ds_description] ={ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] }
keyword[if] identifier[paths] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[dump] keyword[is] keyword[None] :
identifier[dump] = keyword[True]
identifier[paths] = identifier[iter] ( identifier[paths] )
keyword[elif] identifier[dump] keyword[is] keyword[None] :
identifier[dump] = keyword[False]
keyword[if] identifier[pwd] keyword[is] keyword[None] :
identifier[pwd] = identifier[getcwd] ()
keyword[for] identifier[arr] keyword[in] identifier[self] :
keyword[if] identifier[isinstance] ( identifier[arr] , identifier[InteractiveList] ):
identifier[ret] [ identifier[arr] . identifier[arr_name] ]= identifier[arr] . identifier[array_info] (
identifier[dump] , identifier[paths] , identifier[pwd] = identifier[pwd] , identifier[attrs] = identifier[attrs] ,
identifier[standardize_dims] = identifier[standardize_dims] ,
identifier[use_rel_paths] = identifier[use_rel_paths] , identifier[ds_description] = identifier[ds_description] ,
identifier[alternative_paths] = identifier[alternative_paths] , identifier[copy] = identifier[copy] ,
identifier[_saved_ds] = identifier[saved_ds] ,** identifier[kwargs] )
keyword[else] :
keyword[if] identifier[standardize_dims] :
identifier[idims] = identifier[arr] . identifier[psy] . identifier[decoder] . identifier[standardize_dims] (
identifier[next] ( identifier[arr] . identifier[psy] . identifier[iter_base_variables] ), identifier[arr] . identifier[psy] . identifier[idims] )
keyword[else] :
identifier[idims] = identifier[arr] . identifier[psy] . identifier[idims]
identifier[ret] [ identifier[arr] . identifier[psy] . identifier[arr_name] ]= identifier[d] ={ literal[string] : identifier[idims] }
keyword[if] literal[string] keyword[in] identifier[arr] . identifier[coords] :
identifier[d] [ literal[string] ]=[ identifier[list] ( identifier[arr] . identifier[coords] [ literal[string] ]. identifier[values] )]
keyword[else] :
identifier[d] [ literal[string] ]= identifier[arr] . identifier[name]
keyword[if] literal[string] keyword[in] identifier[ds_description] keyword[or] literal[string] keyword[in] identifier[ds_description] :
identifier[fname] , identifier[store_mod] , identifier[store_cls] = identifier[get_filename_ds] (
identifier[arr] . identifier[psy] . identifier[base] , identifier[dump] = identifier[dump] , identifier[paths] = identifier[paths] ,** identifier[kwargs] )
keyword[if] literal[string] keyword[in] identifier[ds_description] :
identifier[d] [ literal[string] ]=( identifier[store_mod] , identifier[store_cls] )
keyword[if] literal[string] keyword[in] identifier[ds_description] :
identifier[d] [ literal[string] ]=[]
keyword[for] identifier[i] , identifier[f] keyword[in] identifier[enumerate] ( identifier[safe_list] ( identifier[fname] )):
keyword[if] ( identifier[f] keyword[is] keyword[None] keyword[or] identifier[utils] . identifier[is_remote_url] ( identifier[f] )):
identifier[d] [ literal[string] ]. identifier[append] ( identifier[f] )
keyword[else] :
identifier[found] , identifier[f] = identifier[get_alternative] ( identifier[f] )
keyword[if] identifier[use_rel_paths] :
identifier[f] = identifier[osp] . identifier[relpath] ( identifier[f] , identifier[pwd] )
keyword[else] :
identifier[f] = identifier[osp] . identifier[abspath] ( identifier[f] )
identifier[d] [ literal[string] ]. identifier[append] ( identifier[f] )
keyword[if] identifier[fname] keyword[is] keyword[None] keyword[or] identifier[isinstance] ( identifier[fname] ,
identifier[six] . identifier[string_types] ):
identifier[d] [ literal[string] ]= identifier[d] [ literal[string] ][ literal[int] ]
keyword[else] :
identifier[d] [ literal[string] ]= identifier[tuple] ( identifier[safe_list] ( identifier[fname] ))
keyword[if] identifier[arr] . identifier[psy] . identifier[base] . identifier[psy] . identifier[_concat_dim] keyword[is] keyword[not] keyword[None] :
identifier[d] [ literal[string] ]= identifier[arr] . identifier[psy] . identifier[base] . identifier[psy] . identifier[_concat_dim]
keyword[if] literal[string] keyword[in] identifier[ds_description] :
keyword[if] identifier[full_ds] :
identifier[d] [ literal[string] ]= identifier[copy_obj] ( identifier[arr] . identifier[psy] . identifier[base] )
keyword[else] :
identifier[d] [ literal[string] ]= identifier[copy_obj] ( identifier[arr] . identifier[to_dataset] ())
keyword[if] literal[string] keyword[in] identifier[ds_description] :
identifier[d] [ literal[string] ]= identifier[arr] . identifier[psy] . identifier[base] . identifier[psy] . identifier[num]
keyword[if] literal[string] keyword[in] identifier[ds_description] :
identifier[d] [ literal[string] ]= identifier[copy_obj] ( identifier[arr] )
keyword[if] identifier[attrs] :
identifier[d] [ literal[string] ]= identifier[arr] . identifier[attrs]
identifier[ret] [ literal[string] ]= identifier[self] . identifier[attrs]
keyword[return] identifier[ret] | def array_info(self, dump=None, paths=None, attrs=True, standardize_dims=True, pwd=None, use_rel_paths=True, alternative_paths={}, ds_description={'fname', 'store'}, full_ds=True, copy=False, **kwargs):
"""
Get dimension informations on you arrays
This method returns a dictionary containing informations on the
array in this instance
Parameters
----------
dump: bool
If True and the dataset has not been dumped so far, it is dumped to
a temporary file or the one generated by `paths` is used. If it is
False or both, `dump` and `paths` are None, no data will be stored.
If it is None and `paths` is not None, `dump` is set to True.
%(get_filename_ds.parameters.no_ds|dump)s
attrs: bool, optional
If True (default), the :attr:`ArrayList.attrs` and
:attr:`xarray.DataArray.attrs` attributes are included in the
returning dictionary
standardize_dims: bool, optional
If True (default), the real dimension names in the dataset are
replaced by x, y, z and t to be more general.
pwd: str
Path to the working directory from where the data can be imported.
If None, use the current working directory.
use_rel_paths: bool, optional
If True (default), paths relative to the current working directory
are used. Otherwise absolute paths to `pwd` are used
ds_description: 'all' or set of {'fname', 'ds', 'num', 'arr', 'store'}
Keys to describe the datasets of the arrays. If all, all keys
are used. The key descriptions are
fname
the file name is inserted in the ``'fname'`` key
store
the data store class and module is inserted in the ``'store'``
key
ds
the dataset is inserted in the ``'ds'`` key
num
The unique number assigned to the dataset is inserted in the
``'num'`` key
arr
The array itself is inserted in the ``'arr'`` key
full_ds: bool
If True and ``'ds'`` is in `ds_description`, the entire dataset is
included. Otherwise, only the DataArray converted to a dataset is
included
copy: bool
If True, the arrays and datasets are deep copied
Other Parameters
----------------
%(get_filename_ds.other_parameters)s
Returns
-------
OrderedDict
An ordered mapping from array names to dimensions and filename
corresponding to the array
See Also
--------
from_dict"""
saved_ds = kwargs.pop('_saved_ds', {})
def get_alternative(f):
return next(filter(lambda t: osp.samefile(f, t[0]), six.iteritems(alternative_paths)), [False, f])
if copy:
def copy_obj(obj):
# try to get the number of the dataset and create only one copy
# copy for each dataset
try:
num = obj.psy.num # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]]
else:
try:
return saved_ds[num] # depends on [control=['try'], data=[]]
except KeyError:
saved_ds[num] = obj.psy.copy(True)
return saved_ds[num] # depends on [control=['except'], data=[]]
return obj.psy.copy(True) # depends on [control=['if'], data=[]]
else:
def copy_obj(obj):
return obj
ret = OrderedDict()
if ds_description == 'all':
ds_description = {'fname', 'ds', 'num', 'arr', 'store'} # depends on [control=['if'], data=['ds_description']]
if paths is not None:
if dump is None:
dump = True # depends on [control=['if'], data=['dump']]
paths = iter(paths) # depends on [control=['if'], data=['paths']]
elif dump is None:
dump = False # depends on [control=['if'], data=['dump']]
if pwd is None:
pwd = getcwd() # depends on [control=['if'], data=['pwd']]
for arr in self:
if isinstance(arr, InteractiveList):
ret[arr.arr_name] = arr.array_info(dump, paths, pwd=pwd, attrs=attrs, standardize_dims=standardize_dims, use_rel_paths=use_rel_paths, ds_description=ds_description, alternative_paths=alternative_paths, copy=copy, _saved_ds=saved_ds, **kwargs) # depends on [control=['if'], data=[]]
else:
if standardize_dims:
idims = arr.psy.decoder.standardize_dims(next(arr.psy.iter_base_variables), arr.psy.idims) # depends on [control=['if'], data=[]]
else:
idims = arr.psy.idims
ret[arr.psy.arr_name] = d = {'dims': idims}
if 'variable' in arr.coords:
d['name'] = [list(arr.coords['variable'].values)] # depends on [control=['if'], data=[]]
else:
d['name'] = arr.name
if 'fname' in ds_description or 'store' in ds_description:
(fname, store_mod, store_cls) = get_filename_ds(arr.psy.base, dump=dump, paths=paths, **kwargs)
if 'store' in ds_description:
d['store'] = (store_mod, store_cls) # depends on [control=['if'], data=[]]
if 'fname' in ds_description:
d['fname'] = []
for (i, f) in enumerate(safe_list(fname)):
if f is None or utils.is_remote_url(f):
d['fname'].append(f) # depends on [control=['if'], data=[]]
else:
(found, f) = get_alternative(f)
if use_rel_paths:
f = osp.relpath(f, pwd) # depends on [control=['if'], data=[]]
else:
f = osp.abspath(f)
d['fname'].append(f) # depends on [control=['for'], data=[]]
if fname is None or isinstance(fname, six.string_types):
d['fname'] = d['fname'][0] # depends on [control=['if'], data=[]]
else:
d['fname'] = tuple(safe_list(fname))
if arr.psy.base.psy._concat_dim is not None:
d['concat_dim'] = arr.psy.base.psy._concat_dim # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if 'ds' in ds_description:
if full_ds:
d['ds'] = copy_obj(arr.psy.base) # depends on [control=['if'], data=[]]
else:
d['ds'] = copy_obj(arr.to_dataset()) # depends on [control=['if'], data=[]]
if 'num' in ds_description:
d['num'] = arr.psy.base.psy.num # depends on [control=['if'], data=[]]
if 'arr' in ds_description:
d['arr'] = copy_obj(arr) # depends on [control=['if'], data=[]]
if attrs:
d['attrs'] = arr.attrs # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['arr']]
ret['attrs'] = self.attrs
return ret |
def _colorify(self, data):
"""
Retun colored string.
:param data: The string to colorify.
:type data: str
:return: A colored string.
:rtype: str
"""
if self.template in ["Generic", "Less"]:
# The template is in the list of template that need the coloration.
if (
self.data_to_print[1].lower() in PyFunceble.STATUS["list"]["up"]
or self.data_to_print[1].lower() in PyFunceble.STATUS["list"]["valid"]
):
# The status is in the list of up status.
# We print the data with a green background.
data = PyFunceble.Fore.BLACK + PyFunceble.Back.GREEN + data
elif self.data_to_print[1].lower() in PyFunceble.STATUS["list"]["down"]:
# The status is in the list of down status.
# We print the data with a red background.
data = PyFunceble.Fore.BLACK + PyFunceble.Back.RED + data
else:
# The status is not in the list of up and down status.
# We print the data with a cyan background.
data = PyFunceble.Fore.BLACK + PyFunceble.Back.CYAN + data
# We return the data.
return data | def function[_colorify, parameter[self, data]]:
constant[
Retun colored string.
:param data: The string to colorify.
:type data: str
:return: A colored string.
:rtype: str
]
if compare[name[self].template in list[[<ast.Constant object at 0x7da20e9572e0>, <ast.Constant object at 0x7da20e956a10>]]] begin[:]
if <ast.BoolOp object at 0x7da20e956770> begin[:]
variable[data] assign[=] binary_operation[binary_operation[name[PyFunceble].Fore.BLACK + name[PyFunceble].Back.GREEN] + name[data]]
return[name[data]] | keyword[def] identifier[_colorify] ( identifier[self] , identifier[data] ):
literal[string]
keyword[if] identifier[self] . identifier[template] keyword[in] [ literal[string] , literal[string] ]:
keyword[if] (
identifier[self] . identifier[data_to_print] [ literal[int] ]. identifier[lower] () keyword[in] identifier[PyFunceble] . identifier[STATUS] [ literal[string] ][ literal[string] ]
keyword[or] identifier[self] . identifier[data_to_print] [ literal[int] ]. identifier[lower] () keyword[in] identifier[PyFunceble] . identifier[STATUS] [ literal[string] ][ literal[string] ]
):
identifier[data] = identifier[PyFunceble] . identifier[Fore] . identifier[BLACK] + identifier[PyFunceble] . identifier[Back] . identifier[GREEN] + identifier[data]
keyword[elif] identifier[self] . identifier[data_to_print] [ literal[int] ]. identifier[lower] () keyword[in] identifier[PyFunceble] . identifier[STATUS] [ literal[string] ][ literal[string] ]:
identifier[data] = identifier[PyFunceble] . identifier[Fore] . identifier[BLACK] + identifier[PyFunceble] . identifier[Back] . identifier[RED] + identifier[data]
keyword[else] :
identifier[data] = identifier[PyFunceble] . identifier[Fore] . identifier[BLACK] + identifier[PyFunceble] . identifier[Back] . identifier[CYAN] + identifier[data]
keyword[return] identifier[data] | def _colorify(self, data):
"""
Retun colored string.
:param data: The string to colorify.
:type data: str
:return: A colored string.
:rtype: str
"""
if self.template in ['Generic', 'Less']:
# The template is in the list of template that need the coloration.
if self.data_to_print[1].lower() in PyFunceble.STATUS['list']['up'] or self.data_to_print[1].lower() in PyFunceble.STATUS['list']['valid']:
# The status is in the list of up status.
# We print the data with a green background.
data = PyFunceble.Fore.BLACK + PyFunceble.Back.GREEN + data # depends on [control=['if'], data=[]]
elif self.data_to_print[1].lower() in PyFunceble.STATUS['list']['down']:
# The status is in the list of down status.
# We print the data with a red background.
data = PyFunceble.Fore.BLACK + PyFunceble.Back.RED + data # depends on [control=['if'], data=[]]
else:
# The status is not in the list of up and down status.
# We print the data with a cyan background.
data = PyFunceble.Fore.BLACK + PyFunceble.Back.CYAN + data # depends on [control=['if'], data=[]]
# We return the data.
return data |
def _convenienceMatch(self, role, attr, match):
"""Method used by role based convenience functions to find a match"""
kwargs = {}
# If the user supplied some text to search for,
# supply that in the kwargs
if match:
kwargs[attr] = match
return self.findAll(AXRole=role, **kwargs) | def function[_convenienceMatch, parameter[self, role, attr, match]]:
constant[Method used by role based convenience functions to find a match]
variable[kwargs] assign[=] dictionary[[], []]
if name[match] begin[:]
call[name[kwargs]][name[attr]] assign[=] name[match]
return[call[name[self].findAll, parameter[]]] | keyword[def] identifier[_convenienceMatch] ( identifier[self] , identifier[role] , identifier[attr] , identifier[match] ):
literal[string]
identifier[kwargs] ={}
keyword[if] identifier[match] :
identifier[kwargs] [ identifier[attr] ]= identifier[match]
keyword[return] identifier[self] . identifier[findAll] ( identifier[AXRole] = identifier[role] ,** identifier[kwargs] ) | def _convenienceMatch(self, role, attr, match):
"""Method used by role based convenience functions to find a match"""
kwargs = {}
# If the user supplied some text to search for,
# supply that in the kwargs
if match:
kwargs[attr] = match # depends on [control=['if'], data=[]]
return self.findAll(AXRole=role, **kwargs) |
def apply_t0(self, hits):
"""Apply only t0s"""
if HAVE_NUMBA:
apply_t0_nb(
hits.time, hits.dom_id, hits.channel_id, self._lookup_tables
)
else:
n = len(hits)
cal = np.empty(n)
lookup = self._calib_by_dom_and_channel
for i in range(n):
calib = lookup[hits['dom_id'][i]][hits['channel_id'][i]]
cal[i] = calib[6]
hits.time += cal
return hits | def function[apply_t0, parameter[self, hits]]:
constant[Apply only t0s]
if name[HAVE_NUMBA] begin[:]
call[name[apply_t0_nb], parameter[name[hits].time, name[hits].dom_id, name[hits].channel_id, name[self]._lookup_tables]]
return[name[hits]] | keyword[def] identifier[apply_t0] ( identifier[self] , identifier[hits] ):
literal[string]
keyword[if] identifier[HAVE_NUMBA] :
identifier[apply_t0_nb] (
identifier[hits] . identifier[time] , identifier[hits] . identifier[dom_id] , identifier[hits] . identifier[channel_id] , identifier[self] . identifier[_lookup_tables]
)
keyword[else] :
identifier[n] = identifier[len] ( identifier[hits] )
identifier[cal] = identifier[np] . identifier[empty] ( identifier[n] )
identifier[lookup] = identifier[self] . identifier[_calib_by_dom_and_channel]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n] ):
identifier[calib] = identifier[lookup] [ identifier[hits] [ literal[string] ][ identifier[i] ]][ identifier[hits] [ literal[string] ][ identifier[i] ]]
identifier[cal] [ identifier[i] ]= identifier[calib] [ literal[int] ]
identifier[hits] . identifier[time] += identifier[cal]
keyword[return] identifier[hits] | def apply_t0(self, hits):
"""Apply only t0s"""
if HAVE_NUMBA:
apply_t0_nb(hits.time, hits.dom_id, hits.channel_id, self._lookup_tables) # depends on [control=['if'], data=[]]
else:
n = len(hits)
cal = np.empty(n)
lookup = self._calib_by_dom_and_channel
for i in range(n):
calib = lookup[hits['dom_id'][i]][hits['channel_id'][i]]
cal[i] = calib[6] # depends on [control=['for'], data=['i']]
hits.time += cal
return hits |
def deprecated(func, solution):
"""
Mark a parser or combiner as deprecated, and give a message of how to fix
this. This will emit a warning in the logs when the function is used.
When combined with modifications to conftest, this causes deprecations to
become fatal errors when testing, so they get fixed.
Arguments:
func (function): the function or method being deprecated.
solution (str): a string describing the replacement class, method or
function that replaces the thing being deprecated. For example,
"use the `fnord()` function" or "use the `search()` method with
the parameter `name='(value)'`".
"""
def get_name_line(src):
for line in src:
if "@" not in line:
return line.strip()
path = inspect.getsourcefile(func)
src, line_no = inspect.getsourcelines(func)
name = get_name_line(src) or "Unknown"
the_msg = "<{c}> at {p}:{l} is deprecated: {s}".format(
c=name, p=path, l=line_no, s=solution
)
warnings.warn(the_msg, DeprecationWarning) | def function[deprecated, parameter[func, solution]]:
constant[
Mark a parser or combiner as deprecated, and give a message of how to fix
this. This will emit a warning in the logs when the function is used.
When combined with modifications to conftest, this causes deprecations to
become fatal errors when testing, so they get fixed.
Arguments:
func (function): the function or method being deprecated.
solution (str): a string describing the replacement class, method or
function that replaces the thing being deprecated. For example,
"use the `fnord()` function" or "use the `search()` method with
the parameter `name='(value)'`".
]
def function[get_name_line, parameter[src]]:
for taget[name[line]] in starred[name[src]] begin[:]
if compare[constant[@] <ast.NotIn object at 0x7da2590d7190> name[line]] begin[:]
return[call[name[line].strip, parameter[]]]
variable[path] assign[=] call[name[inspect].getsourcefile, parameter[name[func]]]
<ast.Tuple object at 0x7da18f810df0> assign[=] call[name[inspect].getsourcelines, parameter[name[func]]]
variable[name] assign[=] <ast.BoolOp object at 0x7da18f811d80>
variable[the_msg] assign[=] call[constant[<{c}> at {p}:{l} is deprecated: {s}].format, parameter[]]
call[name[warnings].warn, parameter[name[the_msg], name[DeprecationWarning]]] | keyword[def] identifier[deprecated] ( identifier[func] , identifier[solution] ):
literal[string]
keyword[def] identifier[get_name_line] ( identifier[src] ):
keyword[for] identifier[line] keyword[in] identifier[src] :
keyword[if] literal[string] keyword[not] keyword[in] identifier[line] :
keyword[return] identifier[line] . identifier[strip] ()
identifier[path] = identifier[inspect] . identifier[getsourcefile] ( identifier[func] )
identifier[src] , identifier[line_no] = identifier[inspect] . identifier[getsourcelines] ( identifier[func] )
identifier[name] = identifier[get_name_line] ( identifier[src] ) keyword[or] literal[string]
identifier[the_msg] = literal[string] . identifier[format] (
identifier[c] = identifier[name] , identifier[p] = identifier[path] , identifier[l] = identifier[line_no] , identifier[s] = identifier[solution]
)
identifier[warnings] . identifier[warn] ( identifier[the_msg] , identifier[DeprecationWarning] ) | def deprecated(func, solution):
"""
Mark a parser or combiner as deprecated, and give a message of how to fix
this. This will emit a warning in the logs when the function is used.
When combined with modifications to conftest, this causes deprecations to
become fatal errors when testing, so they get fixed.
Arguments:
func (function): the function or method being deprecated.
solution (str): a string describing the replacement class, method or
function that replaces the thing being deprecated. For example,
"use the `fnord()` function" or "use the `search()` method with
the parameter `name='(value)'`".
"""
def get_name_line(src):
for line in src:
if '@' not in line:
return line.strip() # depends on [control=['if'], data=['line']] # depends on [control=['for'], data=['line']]
path = inspect.getsourcefile(func)
(src, line_no) = inspect.getsourcelines(func)
name = get_name_line(src) or 'Unknown'
the_msg = '<{c}> at {p}:{l} is deprecated: {s}'.format(c=name, p=path, l=line_no, s=solution)
warnings.warn(the_msg, DeprecationWarning) |
def resize_by_area(img, size):
"""image resize function used by quite a few image problems."""
return tf.to_int64(
tf.image.resize_images(img, [size, size], tf.image.ResizeMethod.AREA)) | def function[resize_by_area, parameter[img, size]]:
constant[image resize function used by quite a few image problems.]
return[call[name[tf].to_int64, parameter[call[name[tf].image.resize_images, parameter[name[img], list[[<ast.Name object at 0x7da18f00f220>, <ast.Name object at 0x7da18f00f040>]], name[tf].image.ResizeMethod.AREA]]]]] | keyword[def] identifier[resize_by_area] ( identifier[img] , identifier[size] ):
literal[string]
keyword[return] identifier[tf] . identifier[to_int64] (
identifier[tf] . identifier[image] . identifier[resize_images] ( identifier[img] ,[ identifier[size] , identifier[size] ], identifier[tf] . identifier[image] . identifier[ResizeMethod] . identifier[AREA] )) | def resize_by_area(img, size):
"""image resize function used by quite a few image problems."""
return tf.to_int64(tf.image.resize_images(img, [size, size], tf.image.ResizeMethod.AREA)) |
def keep_vertices(self, indices_to_keep, ret_kept_edges=False):
'''
Keep the given vertices and discard the others, and any edges to which
they may belong.
If `ret_kept_edges` is `True`, return the original indices of the kept
edges. Otherwise return `self` for chaining.
'''
if self.v is None:
return
initial_num_verts = self.v.shape[0]
if self.e is not None:
initial_num_edges = self.e.shape[0]
e_indices_to_keep = self.all_edges_with_verts(indices_to_keep, as_boolean=True)
self.v = self.v[indices_to_keep]
if self.e is not None:
v_old_to_new = np.zeros(initial_num_verts, dtype=int)
e_old_to_new = np.zeros(initial_num_edges, dtype=int)
v_old_to_new[indices_to_keep] = np.arange(len(indices_to_keep), dtype=int)
self.e = v_old_to_new[self.e[e_indices_to_keep]]
e_old_to_new[e_indices_to_keep] = np.arange(self.e.shape[0], dtype=int)
else:
e_indices_to_keep = []
return np.nonzero(e_indices_to_keep)[0] if ret_kept_edges else self | def function[keep_vertices, parameter[self, indices_to_keep, ret_kept_edges]]:
constant[
Keep the given vertices and discard the others, and any edges to which
they may belong.
If `ret_kept_edges` is `True`, return the original indices of the kept
edges. Otherwise return `self` for chaining.
]
if compare[name[self].v is constant[None]] begin[:]
return[None]
variable[initial_num_verts] assign[=] call[name[self].v.shape][constant[0]]
if compare[name[self].e is_not constant[None]] begin[:]
variable[initial_num_edges] assign[=] call[name[self].e.shape][constant[0]]
variable[e_indices_to_keep] assign[=] call[name[self].all_edges_with_verts, parameter[name[indices_to_keep]]]
name[self].v assign[=] call[name[self].v][name[indices_to_keep]]
if compare[name[self].e is_not constant[None]] begin[:]
variable[v_old_to_new] assign[=] call[name[np].zeros, parameter[name[initial_num_verts]]]
variable[e_old_to_new] assign[=] call[name[np].zeros, parameter[name[initial_num_edges]]]
call[name[v_old_to_new]][name[indices_to_keep]] assign[=] call[name[np].arange, parameter[call[name[len], parameter[name[indices_to_keep]]]]]
name[self].e assign[=] call[name[v_old_to_new]][call[name[self].e][name[e_indices_to_keep]]]
call[name[e_old_to_new]][name[e_indices_to_keep]] assign[=] call[name[np].arange, parameter[call[name[self].e.shape][constant[0]]]]
return[<ast.IfExp object at 0x7da20c7c9060>] | keyword[def] identifier[keep_vertices] ( identifier[self] , identifier[indices_to_keep] , identifier[ret_kept_edges] = keyword[False] ):
literal[string]
keyword[if] identifier[self] . identifier[v] keyword[is] keyword[None] :
keyword[return]
identifier[initial_num_verts] = identifier[self] . identifier[v] . identifier[shape] [ literal[int] ]
keyword[if] identifier[self] . identifier[e] keyword[is] keyword[not] keyword[None] :
identifier[initial_num_edges] = identifier[self] . identifier[e] . identifier[shape] [ literal[int] ]
identifier[e_indices_to_keep] = identifier[self] . identifier[all_edges_with_verts] ( identifier[indices_to_keep] , identifier[as_boolean] = keyword[True] )
identifier[self] . identifier[v] = identifier[self] . identifier[v] [ identifier[indices_to_keep] ]
keyword[if] identifier[self] . identifier[e] keyword[is] keyword[not] keyword[None] :
identifier[v_old_to_new] = identifier[np] . identifier[zeros] ( identifier[initial_num_verts] , identifier[dtype] = identifier[int] )
identifier[e_old_to_new] = identifier[np] . identifier[zeros] ( identifier[initial_num_edges] , identifier[dtype] = identifier[int] )
identifier[v_old_to_new] [ identifier[indices_to_keep] ]= identifier[np] . identifier[arange] ( identifier[len] ( identifier[indices_to_keep] ), identifier[dtype] = identifier[int] )
identifier[self] . identifier[e] = identifier[v_old_to_new] [ identifier[self] . identifier[e] [ identifier[e_indices_to_keep] ]]
identifier[e_old_to_new] [ identifier[e_indices_to_keep] ]= identifier[np] . identifier[arange] ( identifier[self] . identifier[e] . identifier[shape] [ literal[int] ], identifier[dtype] = identifier[int] )
keyword[else] :
identifier[e_indices_to_keep] =[]
keyword[return] identifier[np] . identifier[nonzero] ( identifier[e_indices_to_keep] )[ literal[int] ] keyword[if] identifier[ret_kept_edges] keyword[else] identifier[self] | def keep_vertices(self, indices_to_keep, ret_kept_edges=False):
"""
Keep the given vertices and discard the others, and any edges to which
they may belong.
If `ret_kept_edges` is `True`, return the original indices of the kept
edges. Otherwise return `self` for chaining.
"""
if self.v is None:
return # depends on [control=['if'], data=[]]
initial_num_verts = self.v.shape[0]
if self.e is not None:
initial_num_edges = self.e.shape[0]
e_indices_to_keep = self.all_edges_with_verts(indices_to_keep, as_boolean=True) # depends on [control=['if'], data=[]]
self.v = self.v[indices_to_keep]
if self.e is not None:
v_old_to_new = np.zeros(initial_num_verts, dtype=int)
e_old_to_new = np.zeros(initial_num_edges, dtype=int)
v_old_to_new[indices_to_keep] = np.arange(len(indices_to_keep), dtype=int)
self.e = v_old_to_new[self.e[e_indices_to_keep]]
e_old_to_new[e_indices_to_keep] = np.arange(self.e.shape[0], dtype=int) # depends on [control=['if'], data=[]]
else:
e_indices_to_keep = []
return np.nonzero(e_indices_to_keep)[0] if ret_kept_edges else self |
def decipher(self,string):
"""Decipher string using Delastelle cipher according to initialised key.
Example::
plaintext = Delastelle('APCZ WRLFBDKOTYUQGENHXMIVS').decipher(ciphertext)
:param string: The string to decipher.
:returns: The deciphered string. The plaintext will be 1/3 the length of the ciphertext.
"""
string = self.remove_punctuation(string,filter='[^'+self.chars+']')
ret = ''
for i in range(0,len(string),3):
ind = tuple([int(string[i+k]) for k in [0,1,2]])
ret += IND2L[ind]
return ret | def function[decipher, parameter[self, string]]:
constant[Decipher string using Delastelle cipher according to initialised key.
Example::
plaintext = Delastelle('APCZ WRLFBDKOTYUQGENHXMIVS').decipher(ciphertext)
:param string: The string to decipher.
:returns: The deciphered string. The plaintext will be 1/3 the length of the ciphertext.
]
variable[string] assign[=] call[name[self].remove_punctuation, parameter[name[string]]]
variable[ret] assign[=] constant[]
for taget[name[i]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[name[string]]], constant[3]]]] begin[:]
variable[ind] assign[=] call[name[tuple], parameter[<ast.ListComp object at 0x7da1b06ccd60>]]
<ast.AugAssign object at 0x7da1b06ccc70>
return[name[ret]] | keyword[def] identifier[decipher] ( identifier[self] , identifier[string] ):
literal[string]
identifier[string] = identifier[self] . identifier[remove_punctuation] ( identifier[string] , identifier[filter] = literal[string] + identifier[self] . identifier[chars] + literal[string] )
identifier[ret] = literal[string]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[string] ), literal[int] ):
identifier[ind] = identifier[tuple] ([ identifier[int] ( identifier[string] [ identifier[i] + identifier[k] ]) keyword[for] identifier[k] keyword[in] [ literal[int] , literal[int] , literal[int] ]])
identifier[ret] += identifier[IND2L] [ identifier[ind] ]
keyword[return] identifier[ret] | def decipher(self, string):
"""Decipher string using Delastelle cipher according to initialised key.
Example::
plaintext = Delastelle('APCZ WRLFBDKOTYUQGENHXMIVS').decipher(ciphertext)
:param string: The string to decipher.
:returns: The deciphered string. The plaintext will be 1/3 the length of the ciphertext.
"""
string = self.remove_punctuation(string, filter='[^' + self.chars + ']')
ret = ''
for i in range(0, len(string), 3):
ind = tuple([int(string[i + k]) for k in [0, 1, 2]])
ret += IND2L[ind] # depends on [control=['for'], data=['i']]
return ret |
def make_ttv_yaml(corpora, path_to_ttv_file, ttv_ratio=DEFAULT_TTV_RATIO, deterministic=False):
""" Create a test, train, validation from the corpora given and saves it as a YAML filename.
Each set will be subject independent, meaning that no one subject can have data in more than one
set
# Arguments;
corpora: a list of the paths to corpora used (these have to be formatted accoring to notes.md)
path_to_ttv_file: the path to where the YAML file be be saved
ttv_ratio: a tuple (e.g. (1,4,4) of the relative sizoe of each set)
deterministic: whether or not to shuffle the resources around when making the set.
"""
dataset = get_dataset(corpora)
data_sets = make_ttv(dataset, ttv_ratio=ttv_ratio, deterministic=deterministic)
def get_for_ttv(key):
return (
data_sets['test'][key],
data_sets['train'][key],
data_sets['validation'][key]
)
test, train, validation = get_for_ttv('paths')
number_of_files_for_each_set = list(get_for_ttv('number_of_files'))
number_of_subjects_for_each_set = [len(x) for x in get_for_ttv('subjects')]
dict_for_yaml = {
'split': number_of_files_for_each_set,
'subject_split': number_of_subjects_for_each_set,
"test": test,
"train": train,
"validation": validation
}
with open(path_to_ttv_file, 'w') as f:
yaml.dump(dict_for_yaml, f, default_flow_style=False) | def function[make_ttv_yaml, parameter[corpora, path_to_ttv_file, ttv_ratio, deterministic]]:
constant[ Create a test, train, validation from the corpora given and saves it as a YAML filename.
Each set will be subject independent, meaning that no one subject can have data in more than one
set
# Arguments;
corpora: a list of the paths to corpora used (these have to be formatted accoring to notes.md)
path_to_ttv_file: the path to where the YAML file be be saved
ttv_ratio: a tuple (e.g. (1,4,4) of the relative sizoe of each set)
deterministic: whether or not to shuffle the resources around when making the set.
]
variable[dataset] assign[=] call[name[get_dataset], parameter[name[corpora]]]
variable[data_sets] assign[=] call[name[make_ttv], parameter[name[dataset]]]
def function[get_for_ttv, parameter[key]]:
return[tuple[[<ast.Subscript object at 0x7da204565510>, <ast.Subscript object at 0x7da204567a00>, <ast.Subscript object at 0x7da2045658a0>]]]
<ast.Tuple object at 0x7da204566410> assign[=] call[name[get_for_ttv], parameter[constant[paths]]]
variable[number_of_files_for_each_set] assign[=] call[name[list], parameter[call[name[get_for_ttv], parameter[constant[number_of_files]]]]]
variable[number_of_subjects_for_each_set] assign[=] <ast.ListComp object at 0x7da204567c10>
variable[dict_for_yaml] assign[=] dictionary[[<ast.Constant object at 0x7da204567790>, <ast.Constant object at 0x7da2045677c0>, <ast.Constant object at 0x7da204565870>, <ast.Constant object at 0x7da204565240>, <ast.Constant object at 0x7da204566aa0>], [<ast.Name object at 0x7da204564d30>, <ast.Name object at 0x7da204565ea0>, <ast.Name object at 0x7da204567f40>, <ast.Name object at 0x7da2045664d0>, <ast.Name object at 0x7da204567d60>]]
with call[name[open], parameter[name[path_to_ttv_file], constant[w]]] begin[:]
call[name[yaml].dump, parameter[name[dict_for_yaml], name[f]]] | keyword[def] identifier[make_ttv_yaml] ( identifier[corpora] , identifier[path_to_ttv_file] , identifier[ttv_ratio] = identifier[DEFAULT_TTV_RATIO] , identifier[deterministic] = keyword[False] ):
literal[string]
identifier[dataset] = identifier[get_dataset] ( identifier[corpora] )
identifier[data_sets] = identifier[make_ttv] ( identifier[dataset] , identifier[ttv_ratio] = identifier[ttv_ratio] , identifier[deterministic] = identifier[deterministic] )
keyword[def] identifier[get_for_ttv] ( identifier[key] ):
keyword[return] (
identifier[data_sets] [ literal[string] ][ identifier[key] ],
identifier[data_sets] [ literal[string] ][ identifier[key] ],
identifier[data_sets] [ literal[string] ][ identifier[key] ]
)
identifier[test] , identifier[train] , identifier[validation] = identifier[get_for_ttv] ( literal[string] )
identifier[number_of_files_for_each_set] = identifier[list] ( identifier[get_for_ttv] ( literal[string] ))
identifier[number_of_subjects_for_each_set] =[ identifier[len] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[get_for_ttv] ( literal[string] )]
identifier[dict_for_yaml] ={
literal[string] : identifier[number_of_files_for_each_set] ,
literal[string] : identifier[number_of_subjects_for_each_set] ,
literal[string] : identifier[test] ,
literal[string] : identifier[train] ,
literal[string] : identifier[validation]
}
keyword[with] identifier[open] ( identifier[path_to_ttv_file] , literal[string] ) keyword[as] identifier[f] :
identifier[yaml] . identifier[dump] ( identifier[dict_for_yaml] , identifier[f] , identifier[default_flow_style] = keyword[False] ) | def make_ttv_yaml(corpora, path_to_ttv_file, ttv_ratio=DEFAULT_TTV_RATIO, deterministic=False):
""" Create a test, train, validation from the corpora given and saves it as a YAML filename.
Each set will be subject independent, meaning that no one subject can have data in more than one
set
# Arguments;
corpora: a list of the paths to corpora used (these have to be formatted accoring to notes.md)
path_to_ttv_file: the path to where the YAML file be be saved
ttv_ratio: a tuple (e.g. (1,4,4) of the relative sizoe of each set)
deterministic: whether or not to shuffle the resources around when making the set.
"""
dataset = get_dataset(corpora)
data_sets = make_ttv(dataset, ttv_ratio=ttv_ratio, deterministic=deterministic)
def get_for_ttv(key):
return (data_sets['test'][key], data_sets['train'][key], data_sets['validation'][key])
(test, train, validation) = get_for_ttv('paths')
number_of_files_for_each_set = list(get_for_ttv('number_of_files'))
number_of_subjects_for_each_set = [len(x) for x in get_for_ttv('subjects')]
dict_for_yaml = {'split': number_of_files_for_each_set, 'subject_split': number_of_subjects_for_each_set, 'test': test, 'train': train, 'validation': validation}
with open(path_to_ttv_file, 'w') as f:
yaml.dump(dict_for_yaml, f, default_flow_style=False) # depends on [control=['with'], data=['f']] |
def check_resize(resize):
"""checks resize parameter if illegal value raises exception"""
if resize is None:
return
resize = resize.lower().strip()
if 'x' in resize:
tmp = resize.lower().split('x')
tmp = [x.strip() for x in resize.split('x')]
if len(tmp) == 2 and tmp[0].isdigit() and tmp[1].isdigit():
return
elif '%' in resize:
tmp = resize.split('%')[0]
if tmp.isnumeric():
tmp = int(tmp)
if 1 <= tmp <= 1000:
return
else:
raise PercentageOutOfRange("percentage must be between 1 and 1000")
raise MallformedResize('Resize value "%s" is mallformed. '
'Desired format is: {width}x{height} or {percentage}%%' % resize) | def function[check_resize, parameter[resize]]:
constant[checks resize parameter if illegal value raises exception]
if compare[name[resize] is constant[None]] begin[:]
return[None]
variable[resize] assign[=] call[call[name[resize].lower, parameter[]].strip, parameter[]]
if compare[constant[x] in name[resize]] begin[:]
variable[tmp] assign[=] call[call[name[resize].lower, parameter[]].split, parameter[constant[x]]]
variable[tmp] assign[=] <ast.ListComp object at 0x7da20c6ab130>
if <ast.BoolOp object at 0x7da20c6aa7a0> begin[:]
return[None]
<ast.Raise object at 0x7da20c6a8d60> | keyword[def] identifier[check_resize] ( identifier[resize] ):
literal[string]
keyword[if] identifier[resize] keyword[is] keyword[None] :
keyword[return]
identifier[resize] = identifier[resize] . identifier[lower] (). identifier[strip] ()
keyword[if] literal[string] keyword[in] identifier[resize] :
identifier[tmp] = identifier[resize] . identifier[lower] (). identifier[split] ( literal[string] )
identifier[tmp] =[ identifier[x] . identifier[strip] () keyword[for] identifier[x] keyword[in] identifier[resize] . identifier[split] ( literal[string] )]
keyword[if] identifier[len] ( identifier[tmp] )== literal[int] keyword[and] identifier[tmp] [ literal[int] ]. identifier[isdigit] () keyword[and] identifier[tmp] [ literal[int] ]. identifier[isdigit] ():
keyword[return]
keyword[elif] literal[string] keyword[in] identifier[resize] :
identifier[tmp] = identifier[resize] . identifier[split] ( literal[string] )[ literal[int] ]
keyword[if] identifier[tmp] . identifier[isnumeric] ():
identifier[tmp] = identifier[int] ( identifier[tmp] )
keyword[if] literal[int] <= identifier[tmp] <= literal[int] :
keyword[return]
keyword[else] :
keyword[raise] identifier[PercentageOutOfRange] ( literal[string] )
keyword[raise] identifier[MallformedResize] ( literal[string]
literal[string] % identifier[resize] ) | def check_resize(resize):
"""checks resize parameter if illegal value raises exception"""
if resize is None:
return # depends on [control=['if'], data=[]]
resize = resize.lower().strip()
if 'x' in resize:
tmp = resize.lower().split('x')
tmp = [x.strip() for x in resize.split('x')]
if len(tmp) == 2 and tmp[0].isdigit() and tmp[1].isdigit():
return # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['resize']]
elif '%' in resize:
tmp = resize.split('%')[0]
if tmp.isnumeric():
tmp = int(tmp)
if 1 <= tmp <= 1000:
return # depends on [control=['if'], data=[]]
else:
raise PercentageOutOfRange('percentage must be between 1 and 1000') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['resize']]
raise MallformedResize('Resize value "%s" is mallformed. Desired format is: {width}x{height} or {percentage}%%' % resize) |
def order_enum(field, members):
"""
Make an annotation value that can be used to sort by an enum field.
``field``
The name of an EnumChoiceField.
``members``
An iterable of Enum members in the order to sort by.
Use like:
.. code-block:: python
desired_order = [MyEnum.bar, MyEnum.baz, MyEnum.foo]
ChoiceModel.objects\\
.annotate(my_order=order_enum('choice', desired_order))\\
.order_by('my_order')
As Enums are iterable, ``members`` can be the Enum itself
if the default ordering is desired:
.. code-block:: python
ChoiceModel.objects\\
.annotate(my_order=order_enum('choice', MyEnum))\\
.order_by('my_order')
.. warning:: On Python 2, Enums may not have a consistent order,
depending upon how they were defined.
You can set an explicit order using ``__order__`` to fix this.
See the ``enum34`` docs for more information.
Any enum members not present in the list of members
will be sorted to the end of the results.
"""
members = list(members)
return Case(
*(When(**{field: member, 'then': i})
for i, member in enumerate(members)),
default=len(members),
output_field=IntegerField()) | def function[order_enum, parameter[field, members]]:
constant[
Make an annotation value that can be used to sort by an enum field.
``field``
The name of an EnumChoiceField.
``members``
An iterable of Enum members in the order to sort by.
Use like:
.. code-block:: python
desired_order = [MyEnum.bar, MyEnum.baz, MyEnum.foo]
ChoiceModel.objects\
.annotate(my_order=order_enum('choice', desired_order))\
.order_by('my_order')
As Enums are iterable, ``members`` can be the Enum itself
if the default ordering is desired:
.. code-block:: python
ChoiceModel.objects\
.annotate(my_order=order_enum('choice', MyEnum))\
.order_by('my_order')
.. warning:: On Python 2, Enums may not have a consistent order,
depending upon how they were defined.
You can set an explicit order using ``__order__`` to fix this.
See the ``enum34`` docs for more information.
Any enum members not present in the list of members
will be sorted to the end of the results.
]
variable[members] assign[=] call[name[list], parameter[name[members]]]
return[call[name[Case], parameter[<ast.Starred object at 0x7da1b0380280>]]] | keyword[def] identifier[order_enum] ( identifier[field] , identifier[members] ):
literal[string]
identifier[members] = identifier[list] ( identifier[members] )
keyword[return] identifier[Case] (
*( identifier[When] (**{ identifier[field] : identifier[member] , literal[string] : identifier[i] })
keyword[for] identifier[i] , identifier[member] keyword[in] identifier[enumerate] ( identifier[members] )),
identifier[default] = identifier[len] ( identifier[members] ),
identifier[output_field] = identifier[IntegerField] ()) | def order_enum(field, members):
"""
Make an annotation value that can be used to sort by an enum field.
``field``
The name of an EnumChoiceField.
``members``
An iterable of Enum members in the order to sort by.
Use like:
.. code-block:: python
desired_order = [MyEnum.bar, MyEnum.baz, MyEnum.foo]
ChoiceModel.objects\\
.annotate(my_order=order_enum('choice', desired_order))\\
.order_by('my_order')
As Enums are iterable, ``members`` can be the Enum itself
if the default ordering is desired:
.. code-block:: python
ChoiceModel.objects\\
.annotate(my_order=order_enum('choice', MyEnum))\\
.order_by('my_order')
.. warning:: On Python 2, Enums may not have a consistent order,
depending upon how they were defined.
You can set an explicit order using ``__order__`` to fix this.
See the ``enum34`` docs for more information.
Any enum members not present in the list of members
will be sorted to the end of the results.
"""
members = list(members)
return Case(*(When(**{field: member, 'then': i}) for (i, member) in enumerate(members)), default=len(members), output_field=IntegerField()) |
def final_spin_from_f0_tau(f0, tau, l=2, m=2):
"""Returns the final spin based on the given frequency and damping time.
.. note::
Currently, only l = m = 2 is supported. Any other indices will raise
a ``KeyError``.
Parameters
----------
f0 : float or array
Frequency of the QNM (in Hz).
tau : float or array
Damping time of the QNM (in seconds).
l : int, optional
l-index of the harmonic. Default is 2.
m : int, optional
m-index of the harmonic. Default is 2.
Returns
-------
float or array
The spin of the final black hole. If the combination of frequency
and damping times give an unphysical result, ``numpy.nan`` will be
returned.
"""
f0, tau, input_is_array = ensurearray(f0, tau)
# from Berti et al. 2006
a, b, c = _berti_spin_constants[l,m]
origshape = f0.shape
# flatten inputs for storing results
f0 = f0.ravel()
tau = tau.ravel()
spins = numpy.zeros(f0.size)
for ii in range(spins.size):
Q = f0[ii] * tau[ii] * numpy.pi
try:
s = 1. - ((Q-a)/b)**(1./c)
except ValueError:
s = numpy.nan
spins[ii] = s
spins = spins.reshape(origshape)
return formatreturn(spins, input_is_array) | def function[final_spin_from_f0_tau, parameter[f0, tau, l, m]]:
constant[Returns the final spin based on the given frequency and damping time.
.. note::
Currently, only l = m = 2 is supported. Any other indices will raise
a ``KeyError``.
Parameters
----------
f0 : float or array
Frequency of the QNM (in Hz).
tau : float or array
Damping time of the QNM (in seconds).
l : int, optional
l-index of the harmonic. Default is 2.
m : int, optional
m-index of the harmonic. Default is 2.
Returns
-------
float or array
The spin of the final black hole. If the combination of frequency
and damping times give an unphysical result, ``numpy.nan`` will be
returned.
]
<ast.Tuple object at 0x7da18bccac80> assign[=] call[name[ensurearray], parameter[name[f0], name[tau]]]
<ast.Tuple object at 0x7da18bcc8220> assign[=] call[name[_berti_spin_constants]][tuple[[<ast.Name object at 0x7da18bccabf0>, <ast.Name object at 0x7da18bcca2f0>]]]
variable[origshape] assign[=] name[f0].shape
variable[f0] assign[=] call[name[f0].ravel, parameter[]]
variable[tau] assign[=] call[name[tau].ravel, parameter[]]
variable[spins] assign[=] call[name[numpy].zeros, parameter[name[f0].size]]
for taget[name[ii]] in starred[call[name[range], parameter[name[spins].size]]] begin[:]
variable[Q] assign[=] binary_operation[binary_operation[call[name[f0]][name[ii]] * call[name[tau]][name[ii]]] * name[numpy].pi]
<ast.Try object at 0x7da18bcc9ab0>
call[name[spins]][name[ii]] assign[=] name[s]
variable[spins] assign[=] call[name[spins].reshape, parameter[name[origshape]]]
return[call[name[formatreturn], parameter[name[spins], name[input_is_array]]]] | keyword[def] identifier[final_spin_from_f0_tau] ( identifier[f0] , identifier[tau] , identifier[l] = literal[int] , identifier[m] = literal[int] ):
literal[string]
identifier[f0] , identifier[tau] , identifier[input_is_array] = identifier[ensurearray] ( identifier[f0] , identifier[tau] )
identifier[a] , identifier[b] , identifier[c] = identifier[_berti_spin_constants] [ identifier[l] , identifier[m] ]
identifier[origshape] = identifier[f0] . identifier[shape]
identifier[f0] = identifier[f0] . identifier[ravel] ()
identifier[tau] = identifier[tau] . identifier[ravel] ()
identifier[spins] = identifier[numpy] . identifier[zeros] ( identifier[f0] . identifier[size] )
keyword[for] identifier[ii] keyword[in] identifier[range] ( identifier[spins] . identifier[size] ):
identifier[Q] = identifier[f0] [ identifier[ii] ]* identifier[tau] [ identifier[ii] ]* identifier[numpy] . identifier[pi]
keyword[try] :
identifier[s] = literal[int] -(( identifier[Q] - identifier[a] )/ identifier[b] )**( literal[int] / identifier[c] )
keyword[except] identifier[ValueError] :
identifier[s] = identifier[numpy] . identifier[nan]
identifier[spins] [ identifier[ii] ]= identifier[s]
identifier[spins] = identifier[spins] . identifier[reshape] ( identifier[origshape] )
keyword[return] identifier[formatreturn] ( identifier[spins] , identifier[input_is_array] ) | def final_spin_from_f0_tau(f0, tau, l=2, m=2):
"""Returns the final spin based on the given frequency and damping time.
.. note::
Currently, only l = m = 2 is supported. Any other indices will raise
a ``KeyError``.
Parameters
----------
f0 : float or array
Frequency of the QNM (in Hz).
tau : float or array
Damping time of the QNM (in seconds).
l : int, optional
l-index of the harmonic. Default is 2.
m : int, optional
m-index of the harmonic. Default is 2.
Returns
-------
float or array
The spin of the final black hole. If the combination of frequency
and damping times give an unphysical result, ``numpy.nan`` will be
returned.
"""
(f0, tau, input_is_array) = ensurearray(f0, tau)
# from Berti et al. 2006
(a, b, c) = _berti_spin_constants[l, m]
origshape = f0.shape
# flatten inputs for storing results
f0 = f0.ravel()
tau = tau.ravel()
spins = numpy.zeros(f0.size)
for ii in range(spins.size):
Q = f0[ii] * tau[ii] * numpy.pi
try:
s = 1.0 - ((Q - a) / b) ** (1.0 / c) # depends on [control=['try'], data=[]]
except ValueError:
s = numpy.nan # depends on [control=['except'], data=[]]
spins[ii] = s # depends on [control=['for'], data=['ii']]
spins = spins.reshape(origshape)
return formatreturn(spins, input_is_array) |
def calc_pvalue(self, study_count, study_n, pop_count, pop_n):
"""pvalues are calculated in derived classes."""
fnc_call = "calc_pvalue({SCNT}, {STOT}, {PCNT} {PTOT})".format(
SCNT=study_count, STOT=study_n, PCNT=pop_count, PTOT=pop_n)
raise Exception("NOT IMPLEMENTED: {FNC_CALL} using {FNC}.".format(
FNC_CALL=fnc_call, FNC=self.pval_fnc)) | def function[calc_pvalue, parameter[self, study_count, study_n, pop_count, pop_n]]:
constant[pvalues are calculated in derived classes.]
variable[fnc_call] assign[=] call[constant[calc_pvalue({SCNT}, {STOT}, {PCNT} {PTOT})].format, parameter[]]
<ast.Raise object at 0x7da18f811360> | keyword[def] identifier[calc_pvalue] ( identifier[self] , identifier[study_count] , identifier[study_n] , identifier[pop_count] , identifier[pop_n] ):
literal[string]
identifier[fnc_call] = literal[string] . identifier[format] (
identifier[SCNT] = identifier[study_count] , identifier[STOT] = identifier[study_n] , identifier[PCNT] = identifier[pop_count] , identifier[PTOT] = identifier[pop_n] )
keyword[raise] identifier[Exception] ( literal[string] . identifier[format] (
identifier[FNC_CALL] = identifier[fnc_call] , identifier[FNC] = identifier[self] . identifier[pval_fnc] )) | def calc_pvalue(self, study_count, study_n, pop_count, pop_n):
"""pvalues are calculated in derived classes."""
fnc_call = 'calc_pvalue({SCNT}, {STOT}, {PCNT} {PTOT})'.format(SCNT=study_count, STOT=study_n, PCNT=pop_count, PTOT=pop_n)
raise Exception('NOT IMPLEMENTED: {FNC_CALL} using {FNC}.'.format(FNC_CALL=fnc_call, FNC=self.pval_fnc)) |
def get(self, sid):
"""
Constructs a InstalledAddOnExtensionContext
:param sid: The unique Extension Sid
:returns: twilio.rest.preview.marketplace.installed_add_on.installed_add_on_extension.InstalledAddOnExtensionContext
:rtype: twilio.rest.preview.marketplace.installed_add_on.installed_add_on_extension.InstalledAddOnExtensionContext
"""
return InstalledAddOnExtensionContext(
self._version,
installed_add_on_sid=self._solution['installed_add_on_sid'],
sid=sid,
) | def function[get, parameter[self, sid]]:
constant[
Constructs a InstalledAddOnExtensionContext
:param sid: The unique Extension Sid
:returns: twilio.rest.preview.marketplace.installed_add_on.installed_add_on_extension.InstalledAddOnExtensionContext
:rtype: twilio.rest.preview.marketplace.installed_add_on.installed_add_on_extension.InstalledAddOnExtensionContext
]
return[call[name[InstalledAddOnExtensionContext], parameter[name[self]._version]]] | keyword[def] identifier[get] ( identifier[self] , identifier[sid] ):
literal[string]
keyword[return] identifier[InstalledAddOnExtensionContext] (
identifier[self] . identifier[_version] ,
identifier[installed_add_on_sid] = identifier[self] . identifier[_solution] [ literal[string] ],
identifier[sid] = identifier[sid] ,
) | def get(self, sid):
"""
Constructs a InstalledAddOnExtensionContext
:param sid: The unique Extension Sid
:returns: twilio.rest.preview.marketplace.installed_add_on.installed_add_on_extension.InstalledAddOnExtensionContext
:rtype: twilio.rest.preview.marketplace.installed_add_on.installed_add_on_extension.InstalledAddOnExtensionContext
"""
return InstalledAddOnExtensionContext(self._version, installed_add_on_sid=self._solution['installed_add_on_sid'], sid=sid) |
def nmf_ensemble(data, k, n_runs=10, W_list=[], **nmf_params):
"""
Runs an ensemble method on the list of NMF W matrices...
Args:
data: genes x cells array (should be log + cell-normalized)
k: number of classes
n_runs (optional): number of random initializations of state estimation
M_list (optional): list of M arrays from state estimation
se_params (optional): optional poisson_estimate_state params
Returns:
W_new
H_new
"""
nmf = NMF(k)
if len(W_list)==0:
W_list = []
for i in range(n_runs):
W = nmf.fit_transform(data)
W_list.append(W)
W_stacked = np.hstack(W_list)
nmf_w = nmf.fit_transform(W_stacked)
nmf_h = nmf.components_
H_new = data.T.dot(nmf_w).T
nmf2 = NMF(k, init='custom')
nmf_w = nmf2.fit_transform(data, W=nmf_w, H=H_new)
H_new = nmf2.components_
#W_new = W_new/W_new.sum(0)
# alternatively, use nmf_w and h_new as initializations for another NMF round?
return nmf_w, H_new | def function[nmf_ensemble, parameter[data, k, n_runs, W_list]]:
constant[
Runs an ensemble method on the list of NMF W matrices...
Args:
data: genes x cells array (should be log + cell-normalized)
k: number of classes
n_runs (optional): number of random initializations of state estimation
M_list (optional): list of M arrays from state estimation
se_params (optional): optional poisson_estimate_state params
Returns:
W_new
H_new
]
variable[nmf] assign[=] call[name[NMF], parameter[name[k]]]
if compare[call[name[len], parameter[name[W_list]]] equal[==] constant[0]] begin[:]
variable[W_list] assign[=] list[[]]
for taget[name[i]] in starred[call[name[range], parameter[name[n_runs]]]] begin[:]
variable[W] assign[=] call[name[nmf].fit_transform, parameter[name[data]]]
call[name[W_list].append, parameter[name[W]]]
variable[W_stacked] assign[=] call[name[np].hstack, parameter[name[W_list]]]
variable[nmf_w] assign[=] call[name[nmf].fit_transform, parameter[name[W_stacked]]]
variable[nmf_h] assign[=] name[nmf].components_
variable[H_new] assign[=] call[name[data].T.dot, parameter[name[nmf_w]]].T
variable[nmf2] assign[=] call[name[NMF], parameter[name[k]]]
variable[nmf_w] assign[=] call[name[nmf2].fit_transform, parameter[name[data]]]
variable[H_new] assign[=] name[nmf2].components_
return[tuple[[<ast.Name object at 0x7da2044c19f0>, <ast.Name object at 0x7da2044c17b0>]]] | keyword[def] identifier[nmf_ensemble] ( identifier[data] , identifier[k] , identifier[n_runs] = literal[int] , identifier[W_list] =[],** identifier[nmf_params] ):
literal[string]
identifier[nmf] = identifier[NMF] ( identifier[k] )
keyword[if] identifier[len] ( identifier[W_list] )== literal[int] :
identifier[W_list] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n_runs] ):
identifier[W] = identifier[nmf] . identifier[fit_transform] ( identifier[data] )
identifier[W_list] . identifier[append] ( identifier[W] )
identifier[W_stacked] = identifier[np] . identifier[hstack] ( identifier[W_list] )
identifier[nmf_w] = identifier[nmf] . identifier[fit_transform] ( identifier[W_stacked] )
identifier[nmf_h] = identifier[nmf] . identifier[components_]
identifier[H_new] = identifier[data] . identifier[T] . identifier[dot] ( identifier[nmf_w] ). identifier[T]
identifier[nmf2] = identifier[NMF] ( identifier[k] , identifier[init] = literal[string] )
identifier[nmf_w] = identifier[nmf2] . identifier[fit_transform] ( identifier[data] , identifier[W] = identifier[nmf_w] , identifier[H] = identifier[H_new] )
identifier[H_new] = identifier[nmf2] . identifier[components_]
keyword[return] identifier[nmf_w] , identifier[H_new] | def nmf_ensemble(data, k, n_runs=10, W_list=[], **nmf_params):
"""
Runs an ensemble method on the list of NMF W matrices...
Args:
data: genes x cells array (should be log + cell-normalized)
k: number of classes
n_runs (optional): number of random initializations of state estimation
M_list (optional): list of M arrays from state estimation
se_params (optional): optional poisson_estimate_state params
Returns:
W_new
H_new
"""
nmf = NMF(k)
if len(W_list) == 0:
W_list = []
for i in range(n_runs):
W = nmf.fit_transform(data)
W_list.append(W) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
W_stacked = np.hstack(W_list)
nmf_w = nmf.fit_transform(W_stacked)
nmf_h = nmf.components_
H_new = data.T.dot(nmf_w).T
nmf2 = NMF(k, init='custom')
nmf_w = nmf2.fit_transform(data, W=nmf_w, H=H_new)
H_new = nmf2.components_
#W_new = W_new/W_new.sum(0)
# alternatively, use nmf_w and h_new as initializations for another NMF round?
return (nmf_w, H_new) |
def refreshMinMax(self):
""" Refreshes the min max config values from the axes' state.
Does nothing when self.getRefreshBlocked() returns True.
"""
if self.getRefreshBlocked():
logger.debug("refreshMinMax blocked for {}".format(self.nodeName))
return
self._forceRefreshMinMax() | def function[refreshMinMax, parameter[self]]:
constant[ Refreshes the min max config values from the axes' state.
Does nothing when self.getRefreshBlocked() returns True.
]
if call[name[self].getRefreshBlocked, parameter[]] begin[:]
call[name[logger].debug, parameter[call[constant[refreshMinMax blocked for {}].format, parameter[name[self].nodeName]]]]
return[None]
call[name[self]._forceRefreshMinMax, parameter[]] | keyword[def] identifier[refreshMinMax] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[getRefreshBlocked] ():
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[self] . identifier[nodeName] ))
keyword[return]
identifier[self] . identifier[_forceRefreshMinMax] () | def refreshMinMax(self):
""" Refreshes the min max config values from the axes' state.
Does nothing when self.getRefreshBlocked() returns True.
"""
if self.getRefreshBlocked():
logger.debug('refreshMinMax blocked for {}'.format(self.nodeName))
return # depends on [control=['if'], data=[]]
self._forceRefreshMinMax() |
def serial(self, may_block=True):
""" Get the YubiKey serial number (requires YubiKey 2.2). """
if not self.capabilities.have_serial_number():
raise yubikey_base.YubiKeyVersionError("Serial number unsupported in YubiKey %s" % self.version() )
return self._read_serial(may_block) | def function[serial, parameter[self, may_block]]:
constant[ Get the YubiKey serial number (requires YubiKey 2.2). ]
if <ast.UnaryOp object at 0x7da1b0889f00> begin[:]
<ast.Raise object at 0x7da1b088b130>
return[call[name[self]._read_serial, parameter[name[may_block]]]] | keyword[def] identifier[serial] ( identifier[self] , identifier[may_block] = keyword[True] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[capabilities] . identifier[have_serial_number] ():
keyword[raise] identifier[yubikey_base] . identifier[YubiKeyVersionError] ( literal[string] % identifier[self] . identifier[version] ())
keyword[return] identifier[self] . identifier[_read_serial] ( identifier[may_block] ) | def serial(self, may_block=True):
""" Get the YubiKey serial number (requires YubiKey 2.2). """
if not self.capabilities.have_serial_number():
raise yubikey_base.YubiKeyVersionError('Serial number unsupported in YubiKey %s' % self.version()) # depends on [control=['if'], data=[]]
return self._read_serial(may_block) |
def write_history(self, history):
"""
Write history text into the header
"""
self._FITS.write_history(self._ext+1, str(history)) | def function[write_history, parameter[self, history]]:
constant[
Write history text into the header
]
call[name[self]._FITS.write_history, parameter[binary_operation[name[self]._ext + constant[1]], call[name[str], parameter[name[history]]]]] | keyword[def] identifier[write_history] ( identifier[self] , identifier[history] ):
literal[string]
identifier[self] . identifier[_FITS] . identifier[write_history] ( identifier[self] . identifier[_ext] + literal[int] , identifier[str] ( identifier[history] )) | def write_history(self, history):
"""
Write history text into the header
"""
self._FITS.write_history(self._ext + 1, str(history)) |
def fit(self, **kwargs):
"""Call the fit method of the primitive.
The given keyword arguments will be passed directly to the `fit`
method of the primitive instance specified in the JSON annotation.
If any of the arguments expected by the produce method had been
given during the MLBlock initialization, they will be passed as well.
If the fit method was not specified in the JSON annotation, or if
the primitive is a simple function, this will be a noop.
Args:
**kwargs: Any given keyword argument will be directly passed
to the primitive fit method.
Raises:
TypeError: A `TypeError` might be raised if any argument not
expected by the primitive fit method is given.
"""
if self.fit_method is not None:
fit_args = self._fit_params.copy()
fit_args.update(kwargs)
getattr(self.instance, self.fit_method)(**fit_args) | def function[fit, parameter[self]]:
constant[Call the fit method of the primitive.
The given keyword arguments will be passed directly to the `fit`
method of the primitive instance specified in the JSON annotation.
If any of the arguments expected by the produce method had been
given during the MLBlock initialization, they will be passed as well.
If the fit method was not specified in the JSON annotation, or if
the primitive is a simple function, this will be a noop.
Args:
**kwargs: Any given keyword argument will be directly passed
to the primitive fit method.
Raises:
TypeError: A `TypeError` might be raised if any argument not
expected by the primitive fit method is given.
]
if compare[name[self].fit_method is_not constant[None]] begin[:]
variable[fit_args] assign[=] call[name[self]._fit_params.copy, parameter[]]
call[name[fit_args].update, parameter[name[kwargs]]]
call[call[name[getattr], parameter[name[self].instance, name[self].fit_method]], parameter[]] | keyword[def] identifier[fit] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[self] . identifier[fit_method] keyword[is] keyword[not] keyword[None] :
identifier[fit_args] = identifier[self] . identifier[_fit_params] . identifier[copy] ()
identifier[fit_args] . identifier[update] ( identifier[kwargs] )
identifier[getattr] ( identifier[self] . identifier[instance] , identifier[self] . identifier[fit_method] )(** identifier[fit_args] ) | def fit(self, **kwargs):
"""Call the fit method of the primitive.
The given keyword arguments will be passed directly to the `fit`
method of the primitive instance specified in the JSON annotation.
If any of the arguments expected by the produce method had been
given during the MLBlock initialization, they will be passed as well.
If the fit method was not specified in the JSON annotation, or if
the primitive is a simple function, this will be a noop.
Args:
**kwargs: Any given keyword argument will be directly passed
to the primitive fit method.
Raises:
TypeError: A `TypeError` might be raised if any argument not
expected by the primitive fit method is given.
"""
if self.fit_method is not None:
fit_args = self._fit_params.copy()
fit_args.update(kwargs)
getattr(self.instance, self.fit_method)(**fit_args) # depends on [control=['if'], data=[]] |
def to_dict(self):
"""
Convert the object into a json serializable dictionary.
Note: It uses the private method _save_to_input_dict of the parent.
:return dict: json serializable dictionary containing the needed information to instantiate the object
"""
input_dict = super(StdPeriodic, self)._save_to_input_dict()
input_dict["class"] = "GPy.kern.StdPeriodic"
input_dict["variance"] = self.variance.values.tolist()
input_dict["period"] = self.period.values.tolist()
input_dict["lengthscale"] = self.lengthscale.values.tolist()
input_dict["ARD1"] = self.ARD1
input_dict["ARD2"] = self.ARD2
return input_dict | def function[to_dict, parameter[self]]:
constant[
Convert the object into a json serializable dictionary.
Note: It uses the private method _save_to_input_dict of the parent.
:return dict: json serializable dictionary containing the needed information to instantiate the object
]
variable[input_dict] assign[=] call[call[name[super], parameter[name[StdPeriodic], name[self]]]._save_to_input_dict, parameter[]]
call[name[input_dict]][constant[class]] assign[=] constant[GPy.kern.StdPeriodic]
call[name[input_dict]][constant[variance]] assign[=] call[name[self].variance.values.tolist, parameter[]]
call[name[input_dict]][constant[period]] assign[=] call[name[self].period.values.tolist, parameter[]]
call[name[input_dict]][constant[lengthscale]] assign[=] call[name[self].lengthscale.values.tolist, parameter[]]
call[name[input_dict]][constant[ARD1]] assign[=] name[self].ARD1
call[name[input_dict]][constant[ARD2]] assign[=] name[self].ARD2
return[name[input_dict]] | keyword[def] identifier[to_dict] ( identifier[self] ):
literal[string]
identifier[input_dict] = identifier[super] ( identifier[StdPeriodic] , identifier[self] ). identifier[_save_to_input_dict] ()
identifier[input_dict] [ literal[string] ]= literal[string]
identifier[input_dict] [ literal[string] ]= identifier[self] . identifier[variance] . identifier[values] . identifier[tolist] ()
identifier[input_dict] [ literal[string] ]= identifier[self] . identifier[period] . identifier[values] . identifier[tolist] ()
identifier[input_dict] [ literal[string] ]= identifier[self] . identifier[lengthscale] . identifier[values] . identifier[tolist] ()
identifier[input_dict] [ literal[string] ]= identifier[self] . identifier[ARD1]
identifier[input_dict] [ literal[string] ]= identifier[self] . identifier[ARD2]
keyword[return] identifier[input_dict] | def to_dict(self):
"""
Convert the object into a json serializable dictionary.
Note: It uses the private method _save_to_input_dict of the parent.
:return dict: json serializable dictionary containing the needed information to instantiate the object
"""
input_dict = super(StdPeriodic, self)._save_to_input_dict()
input_dict['class'] = 'GPy.kern.StdPeriodic'
input_dict['variance'] = self.variance.values.tolist()
input_dict['period'] = self.period.values.tolist()
input_dict['lengthscale'] = self.lengthscale.values.tolist()
input_dict['ARD1'] = self.ARD1
input_dict['ARD2'] = self.ARD2
return input_dict |
def all(self, data={}, **kwargs):
""""
Fetch all Subscription entities
Returns:
Dictionary of Subscription data
"""
return super(Subscription, self).all(data, **kwargs) | def function[all, parameter[self, data]]:
constant["
Fetch all Subscription entities
Returns:
Dictionary of Subscription data
]
return[call[call[name[super], parameter[name[Subscription], name[self]]].all, parameter[name[data]]]] | keyword[def] identifier[all] ( identifier[self] , identifier[data] ={},** identifier[kwargs] ):
literal[string]
keyword[return] identifier[super] ( identifier[Subscription] , identifier[self] ). identifier[all] ( identifier[data] ,** identifier[kwargs] ) | def all(self, data={}, **kwargs):
""""
Fetch all Subscription entities
Returns:
Dictionary of Subscription data
"""
return super(Subscription, self).all(data, **kwargs) |
def get_apps_menu(self):
"""Temporal code, will change to apps.get_app_configs() for django 1.7
Generate a initial menu list using the AppsConfig registered
"""
menu = {}
for model, model_admin in self.admin_site._registry.items():
if hasattr(model_admin, 'app_config'):
if model_admin.app_config.has_menu_permission(obj=self.user):
menu.update({
'app:' + model_admin.app_config.name: {
'title': model_admin.app_config.verbose_name,
'menus': model_admin.app_config.init_menu(),
'first_icon': model_admin.app_config.icon}})
return menu | def function[get_apps_menu, parameter[self]]:
constant[Temporal code, will change to apps.get_app_configs() for django 1.7
Generate a initial menu list using the AppsConfig registered
]
variable[menu] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b0b3b2b0>, <ast.Name object at 0x7da1b0b3abf0>]]] in starred[call[name[self].admin_site._registry.items, parameter[]]] begin[:]
if call[name[hasattr], parameter[name[model_admin], constant[app_config]]] begin[:]
if call[name[model_admin].app_config.has_menu_permission, parameter[]] begin[:]
call[name[menu].update, parameter[dictionary[[<ast.BinOp object at 0x7da1b0b3a110>], [<ast.Dict object at 0x7da1b0b3a6b0>]]]]
return[name[menu]] | keyword[def] identifier[get_apps_menu] ( identifier[self] ):
literal[string]
identifier[menu] ={}
keyword[for] identifier[model] , identifier[model_admin] keyword[in] identifier[self] . identifier[admin_site] . identifier[_registry] . identifier[items] ():
keyword[if] identifier[hasattr] ( identifier[model_admin] , literal[string] ):
keyword[if] identifier[model_admin] . identifier[app_config] . identifier[has_menu_permission] ( identifier[obj] = identifier[self] . identifier[user] ):
identifier[menu] . identifier[update] ({
literal[string] + identifier[model_admin] . identifier[app_config] . identifier[name] :{
literal[string] : identifier[model_admin] . identifier[app_config] . identifier[verbose_name] ,
literal[string] : identifier[model_admin] . identifier[app_config] . identifier[init_menu] (),
literal[string] : identifier[model_admin] . identifier[app_config] . identifier[icon] }})
keyword[return] identifier[menu] | def get_apps_menu(self):
"""Temporal code, will change to apps.get_app_configs() for django 1.7
Generate a initial menu list using the AppsConfig registered
"""
menu = {}
for (model, model_admin) in self.admin_site._registry.items():
if hasattr(model_admin, 'app_config'):
if model_admin.app_config.has_menu_permission(obj=self.user):
menu.update({'app:' + model_admin.app_config.name: {'title': model_admin.app_config.verbose_name, 'menus': model_admin.app_config.init_menu(), 'first_icon': model_admin.app_config.icon}}) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return menu |
def update(self, time):
""" Update acceleration. Accounts for the importance and
priority (order) of multiple behaviors. """
# .... I feel this stuff could be done a lot better.
total_acceleration = Vector.null()
max_jerk = self.max_acceleration
for behavior in self.behaviors:
acceleration, importance = behavior.update()
weighted_acceleration = acceleration * importance
"""
if max_jerk >= weighted_acceleration.magnitude:
max_jerk -= weighted_acceleration.magnitude
total_acceleration += weighted_acceleration
elif max_jerk > 0 and max_jerk < weighted_acceleration.magnitude:
total_acceleration += weighted_acceleration.normal * max_jerk
break
else:
break """
total_acceleration += weighted_acceleration
self.acceleration = total_acceleration
# Update position and velocity.
Sprite.update(self, time)
# Update facing direction.
if self.velocity.magnitude > 0.0:
self.facing = self.velocity.normal | def function[update, parameter[self, time]]:
constant[ Update acceleration. Accounts for the importance and
priority (order) of multiple behaviors. ]
variable[total_acceleration] assign[=] call[name[Vector].null, parameter[]]
variable[max_jerk] assign[=] name[self].max_acceleration
for taget[name[behavior]] in starred[name[self].behaviors] begin[:]
<ast.Tuple object at 0x7da1b0022680> assign[=] call[name[behavior].update, parameter[]]
variable[weighted_acceleration] assign[=] binary_operation[name[acceleration] * name[importance]]
constant[
if max_jerk >= weighted_acceleration.magnitude:
max_jerk -= weighted_acceleration.magnitude
total_acceleration += weighted_acceleration
elif max_jerk > 0 and max_jerk < weighted_acceleration.magnitude:
total_acceleration += weighted_acceleration.normal * max_jerk
break
else:
break ]
<ast.AugAssign object at 0x7da1b0021a80>
name[self].acceleration assign[=] name[total_acceleration]
call[name[Sprite].update, parameter[name[self], name[time]]]
if compare[name[self].velocity.magnitude greater[>] constant[0.0]] begin[:]
name[self].facing assign[=] name[self].velocity.normal | keyword[def] identifier[update] ( identifier[self] , identifier[time] ):
literal[string]
identifier[total_acceleration] = identifier[Vector] . identifier[null] ()
identifier[max_jerk] = identifier[self] . identifier[max_acceleration]
keyword[for] identifier[behavior] keyword[in] identifier[self] . identifier[behaviors] :
identifier[acceleration] , identifier[importance] = identifier[behavior] . identifier[update] ()
identifier[weighted_acceleration] = identifier[acceleration] * identifier[importance]
literal[string]
identifier[total_acceleration] += identifier[weighted_acceleration]
identifier[self] . identifier[acceleration] = identifier[total_acceleration]
identifier[Sprite] . identifier[update] ( identifier[self] , identifier[time] )
keyword[if] identifier[self] . identifier[velocity] . identifier[magnitude] > literal[int] :
identifier[self] . identifier[facing] = identifier[self] . identifier[velocity] . identifier[normal] | def update(self, time):
""" Update acceleration. Accounts for the importance and
priority (order) of multiple behaviors. """
# .... I feel this stuff could be done a lot better.
total_acceleration = Vector.null()
max_jerk = self.max_acceleration
for behavior in self.behaviors:
(acceleration, importance) = behavior.update()
weighted_acceleration = acceleration * importance
' \n if max_jerk >= weighted_acceleration.magnitude:\n max_jerk -= weighted_acceleration.magnitude\n total_acceleration += weighted_acceleration\n elif max_jerk > 0 and max_jerk < weighted_acceleration.magnitude:\n total_acceleration += weighted_acceleration.normal * max_jerk\n break\n else:\n break '
total_acceleration += weighted_acceleration # depends on [control=['for'], data=['behavior']]
self.acceleration = total_acceleration
# Update position and velocity.
Sprite.update(self, time)
# Update facing direction.
if self.velocity.magnitude > 0.0:
self.facing = self.velocity.normal # depends on [control=['if'], data=[]] |
def to_file(file, array):
"""Wrapper around ndarray.tofile to support any file-like object"""
try:
array.tofile(file)
except (TypeError, IOError, UnsupportedOperation):
# tostring actually returns bytes
file.write(array.tostring()) | def function[to_file, parameter[file, array]]:
constant[Wrapper around ndarray.tofile to support any file-like object]
<ast.Try object at 0x7da20c7c8b80> | keyword[def] identifier[to_file] ( identifier[file] , identifier[array] ):
literal[string]
keyword[try] :
identifier[array] . identifier[tofile] ( identifier[file] )
keyword[except] ( identifier[TypeError] , identifier[IOError] , identifier[UnsupportedOperation] ):
identifier[file] . identifier[write] ( identifier[array] . identifier[tostring] ()) | def to_file(file, array):
"""Wrapper around ndarray.tofile to support any file-like object"""
try:
array.tofile(file) # depends on [control=['try'], data=[]]
except (TypeError, IOError, UnsupportedOperation):
# tostring actually returns bytes
file.write(array.tostring()) # depends on [control=['except'], data=[]] |
def l2_distance_sq(t1, t2, name=None):
"""Square of l2 distance between t1 and t2.
Args:
t1: A tensor.
t2: A tensor that is the same size as t1.
name: Optional name for this op.
Returns:
The l2 distance between t1 and t2.
"""
with tf.name_scope(name, 'l2_distance_sq', [t1, t2]) as scope:
t1 = tf.convert_to_tensor(t1, name='t1')
t2 = tf.convert_to_tensor(t2, name='t2')
return length_squared(tf.subtract(t1, t2), name=scope) | def function[l2_distance_sq, parameter[t1, t2, name]]:
constant[Square of l2 distance between t1 and t2.
Args:
t1: A tensor.
t2: A tensor that is the same size as t1.
name: Optional name for this op.
Returns:
The l2 distance between t1 and t2.
]
with call[name[tf].name_scope, parameter[name[name], constant[l2_distance_sq], list[[<ast.Name object at 0x7da2047e8280>, <ast.Name object at 0x7da2047e97b0>]]]] begin[:]
variable[t1] assign[=] call[name[tf].convert_to_tensor, parameter[name[t1]]]
variable[t2] assign[=] call[name[tf].convert_to_tensor, parameter[name[t2]]]
return[call[name[length_squared], parameter[call[name[tf].subtract, parameter[name[t1], name[t2]]]]]] | keyword[def] identifier[l2_distance_sq] ( identifier[t1] , identifier[t2] , identifier[name] = keyword[None] ):
literal[string]
keyword[with] identifier[tf] . identifier[name_scope] ( identifier[name] , literal[string] ,[ identifier[t1] , identifier[t2] ]) keyword[as] identifier[scope] :
identifier[t1] = identifier[tf] . identifier[convert_to_tensor] ( identifier[t1] , identifier[name] = literal[string] )
identifier[t2] = identifier[tf] . identifier[convert_to_tensor] ( identifier[t2] , identifier[name] = literal[string] )
keyword[return] identifier[length_squared] ( identifier[tf] . identifier[subtract] ( identifier[t1] , identifier[t2] ), identifier[name] = identifier[scope] ) | def l2_distance_sq(t1, t2, name=None):
"""Square of l2 distance between t1 and t2.
Args:
t1: A tensor.
t2: A tensor that is the same size as t1.
name: Optional name for this op.
Returns:
The l2 distance between t1 and t2.
"""
with tf.name_scope(name, 'l2_distance_sq', [t1, t2]) as scope:
t1 = tf.convert_to_tensor(t1, name='t1')
t2 = tf.convert_to_tensor(t2, name='t2')
return length_squared(tf.subtract(t1, t2), name=scope) # depends on [control=['with'], data=['scope']] |
def init_session(self, get_token=True):
"""
init a new oauth2 session that is required to access the cloud
:param bool get_token: if True, a token will be obtained, after
the session has been created
"""
if (self._client_id is None) or (self._client_secret is None):
sys.exit(
"Please make sure to set the client id and client secret "
"via the constructor, the environment variables or the config "
"file; otherwise, the LaMetric cloud cannot be accessed. "
"Abort!"
)
self._session = OAuth2Session(
client=BackendApplicationClient(client_id=self._client_id)
)
if get_token is True:
# get oauth token
self.get_token() | def function[init_session, parameter[self, get_token]]:
constant[
init a new oauth2 session that is required to access the cloud
:param bool get_token: if True, a token will be obtained, after
the session has been created
]
if <ast.BoolOp object at 0x7da2054a7490> begin[:]
call[name[sys].exit, parameter[constant[Please make sure to set the client id and client secret via the constructor, the environment variables or the config file; otherwise, the LaMetric cloud cannot be accessed. Abort!]]]
name[self]._session assign[=] call[name[OAuth2Session], parameter[]]
if compare[name[get_token] is constant[True]] begin[:]
call[name[self].get_token, parameter[]] | keyword[def] identifier[init_session] ( identifier[self] , identifier[get_token] = keyword[True] ):
literal[string]
keyword[if] ( identifier[self] . identifier[_client_id] keyword[is] keyword[None] ) keyword[or] ( identifier[self] . identifier[_client_secret] keyword[is] keyword[None] ):
identifier[sys] . identifier[exit] (
literal[string]
literal[string]
literal[string]
literal[string]
)
identifier[self] . identifier[_session] = identifier[OAuth2Session] (
identifier[client] = identifier[BackendApplicationClient] ( identifier[client_id] = identifier[self] . identifier[_client_id] )
)
keyword[if] identifier[get_token] keyword[is] keyword[True] :
identifier[self] . identifier[get_token] () | def init_session(self, get_token=True):
"""
init a new oauth2 session that is required to access the cloud
:param bool get_token: if True, a token will be obtained, after
the session has been created
"""
if self._client_id is None or self._client_secret is None:
sys.exit('Please make sure to set the client id and client secret via the constructor, the environment variables or the config file; otherwise, the LaMetric cloud cannot be accessed. Abort!') # depends on [control=['if'], data=[]]
self._session = OAuth2Session(client=BackendApplicationClient(client_id=self._client_id))
if get_token is True:
# get oauth token
self.get_token() # depends on [control=['if'], data=[]] |
def arctic_setting(setting_name, valid_options=None):
"""
Tries to get a setting from the django settings, if not available defaults
to the one defined in defaults.py
"""
try:
value = getattr(settings, setting_name)
if valid_options and value not in valid_options:
error_message = "Invalid value for {}, must be one of: {}".format(
setting_name, str(valid_options)
)
raise ImproperlyConfigured(error_message)
except AttributeError:
pass
return getattr(settings, setting_name, getattr(defaults, setting_name)) | def function[arctic_setting, parameter[setting_name, valid_options]]:
constant[
Tries to get a setting from the django settings, if not available defaults
to the one defined in defaults.py
]
<ast.Try object at 0x7da1b0415870>
return[call[name[getattr], parameter[name[settings], name[setting_name], call[name[getattr], parameter[name[defaults], name[setting_name]]]]]] | keyword[def] identifier[arctic_setting] ( identifier[setting_name] , identifier[valid_options] = keyword[None] ):
literal[string]
keyword[try] :
identifier[value] = identifier[getattr] ( identifier[settings] , identifier[setting_name] )
keyword[if] identifier[valid_options] keyword[and] identifier[value] keyword[not] keyword[in] identifier[valid_options] :
identifier[error_message] = literal[string] . identifier[format] (
identifier[setting_name] , identifier[str] ( identifier[valid_options] )
)
keyword[raise] identifier[ImproperlyConfigured] ( identifier[error_message] )
keyword[except] identifier[AttributeError] :
keyword[pass]
keyword[return] identifier[getattr] ( identifier[settings] , identifier[setting_name] , identifier[getattr] ( identifier[defaults] , identifier[setting_name] )) | def arctic_setting(setting_name, valid_options=None):
"""
Tries to get a setting from the django settings, if not available defaults
to the one defined in defaults.py
"""
try:
value = getattr(settings, setting_name)
if valid_options and value not in valid_options:
error_message = 'Invalid value for {}, must be one of: {}'.format(setting_name, str(valid_options))
raise ImproperlyConfigured(error_message) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]]
return getattr(settings, setting_name, getattr(defaults, setting_name)) |
def on_action_begin(self, action, logs={}):
""" Called at beginning of each action for each callback in callbackList"""
for callback in self.callbacks:
if callable(getattr(callback, 'on_action_begin', None)):
callback.on_action_begin(action, logs=logs) | def function[on_action_begin, parameter[self, action, logs]]:
constant[ Called at beginning of each action for each callback in callbackList]
for taget[name[callback]] in starred[name[self].callbacks] begin[:]
if call[name[callable], parameter[call[name[getattr], parameter[name[callback], constant[on_action_begin], constant[None]]]]] begin[:]
call[name[callback].on_action_begin, parameter[name[action]]] | keyword[def] identifier[on_action_begin] ( identifier[self] , identifier[action] , identifier[logs] ={}):
literal[string]
keyword[for] identifier[callback] keyword[in] identifier[self] . identifier[callbacks] :
keyword[if] identifier[callable] ( identifier[getattr] ( identifier[callback] , literal[string] , keyword[None] )):
identifier[callback] . identifier[on_action_begin] ( identifier[action] , identifier[logs] = identifier[logs] ) | def on_action_begin(self, action, logs={}):
""" Called at beginning of each action for each callback in callbackList"""
for callback in self.callbacks:
if callable(getattr(callback, 'on_action_begin', None)):
callback.on_action_begin(action, logs=logs) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['callback']] |
def clean_cable_content(content):
"""\
Removes content like "1. (C)" from the content.
`content`
The content of the cable.
"""
for pattern, subst in _CLEAN_PATTERNS:
content = pattern.sub(subst, content)
return content | def function[clean_cable_content, parameter[content]]:
constant[ Removes content like "1. (C)" from the content.
`content`
The content of the cable.
]
for taget[tuple[[<ast.Name object at 0x7da20c993370>, <ast.Name object at 0x7da20c993520>]]] in starred[name[_CLEAN_PATTERNS]] begin[:]
variable[content] assign[=] call[name[pattern].sub, parameter[name[subst], name[content]]]
return[name[content]] | keyword[def] identifier[clean_cable_content] ( identifier[content] ):
literal[string]
keyword[for] identifier[pattern] , identifier[subst] keyword[in] identifier[_CLEAN_PATTERNS] :
identifier[content] = identifier[pattern] . identifier[sub] ( identifier[subst] , identifier[content] )
keyword[return] identifier[content] | def clean_cable_content(content):
""" Removes content like "1. (C)" from the content.
`content`
The content of the cable.
"""
for (pattern, subst) in _CLEAN_PATTERNS:
content = pattern.sub(subst, content) # depends on [control=['for'], data=[]]
return content |
def create(self, name, site_element):
"""
Create a VPN site for an internal or external gateway
:param str name: name of site
:param list site_element: list of protected networks/hosts
:type site_element: list[str,Element]
:raises CreateElementFailed: create element failed with reason
:return: href of new element
:rtype: str
"""
site_element = element_resolver(site_element)
json = {
'name': name,
'site_element': site_element}
return ElementCreator(
self.__class__,
href=self.href,
json=json) | def function[create, parameter[self, name, site_element]]:
constant[
Create a VPN site for an internal or external gateway
:param str name: name of site
:param list site_element: list of protected networks/hosts
:type site_element: list[str,Element]
:raises CreateElementFailed: create element failed with reason
:return: href of new element
:rtype: str
]
variable[site_element] assign[=] call[name[element_resolver], parameter[name[site_element]]]
variable[json] assign[=] dictionary[[<ast.Constant object at 0x7da1b1a2a110>, <ast.Constant object at 0x7da1b1a2a080>], [<ast.Name object at 0x7da1b1a29f60>, <ast.Name object at 0x7da1b1a29e70>]]
return[call[name[ElementCreator], parameter[name[self].__class__]]] | keyword[def] identifier[create] ( identifier[self] , identifier[name] , identifier[site_element] ):
literal[string]
identifier[site_element] = identifier[element_resolver] ( identifier[site_element] )
identifier[json] ={
literal[string] : identifier[name] ,
literal[string] : identifier[site_element] }
keyword[return] identifier[ElementCreator] (
identifier[self] . identifier[__class__] ,
identifier[href] = identifier[self] . identifier[href] ,
identifier[json] = identifier[json] ) | def create(self, name, site_element):
"""
Create a VPN site for an internal or external gateway
:param str name: name of site
:param list site_element: list of protected networks/hosts
:type site_element: list[str,Element]
:raises CreateElementFailed: create element failed with reason
:return: href of new element
:rtype: str
"""
site_element = element_resolver(site_element)
json = {'name': name, 'site_element': site_element}
return ElementCreator(self.__class__, href=self.href, json=json) |
def is_float(tg_type, inc_array=False):
"""Tells if the given tango type is float
:param tg_type: tango type
:type tg_type: :class:`tango.CmdArgType`
:param inc_array: (optional, default is False) determines if include array
in the list of checked types
:type inc_array: :py:obj:`bool`
:return: True if the given tango type is float or False otherwise
:rtype: :py:obj:`bool`
"""
global _scalar_float_types, _array_float_types
if tg_type in _scalar_float_types:
return True
if not inc_array:
return False
return tg_type in _array_float_types | def function[is_float, parameter[tg_type, inc_array]]:
constant[Tells if the given tango type is float
:param tg_type: tango type
:type tg_type: :class:`tango.CmdArgType`
:param inc_array: (optional, default is False) determines if include array
in the list of checked types
:type inc_array: :py:obj:`bool`
:return: True if the given tango type is float or False otherwise
:rtype: :py:obj:`bool`
]
<ast.Global object at 0x7da18dc04c70>
if compare[name[tg_type] in name[_scalar_float_types]] begin[:]
return[constant[True]]
if <ast.UnaryOp object at 0x7da18dc04eb0> begin[:]
return[constant[False]]
return[compare[name[tg_type] in name[_array_float_types]]] | keyword[def] identifier[is_float] ( identifier[tg_type] , identifier[inc_array] = keyword[False] ):
literal[string]
keyword[global] identifier[_scalar_float_types] , identifier[_array_float_types]
keyword[if] identifier[tg_type] keyword[in] identifier[_scalar_float_types] :
keyword[return] keyword[True]
keyword[if] keyword[not] identifier[inc_array] :
keyword[return] keyword[False]
keyword[return] identifier[tg_type] keyword[in] identifier[_array_float_types] | def is_float(tg_type, inc_array=False):
"""Tells if the given tango type is float
:param tg_type: tango type
:type tg_type: :class:`tango.CmdArgType`
:param inc_array: (optional, default is False) determines if include array
in the list of checked types
:type inc_array: :py:obj:`bool`
:return: True if the given tango type is float or False otherwise
:rtype: :py:obj:`bool`
"""
global _scalar_float_types, _array_float_types
if tg_type in _scalar_float_types:
return True # depends on [control=['if'], data=[]]
if not inc_array:
return False # depends on [control=['if'], data=[]]
return tg_type in _array_float_types |
def additional_options(self, is_pylab=False, is_sympy=False):
"""
Additional options for shell widgets that are not defined
in JupyterWidget config options
"""
options = dict(
pylab=self.get_option('pylab'),
autoload_pylab=self.get_option('pylab/autoload'),
sympy=self.get_option('symbolic_math'),
show_banner=self.get_option('show_banner')
)
if is_pylab is True:
options['autoload_pylab'] = True
options['sympy'] = False
if is_sympy is True:
options['autoload_pylab'] = False
options['sympy'] = True
return options | def function[additional_options, parameter[self, is_pylab, is_sympy]]:
constant[
Additional options for shell widgets that are not defined
in JupyterWidget config options
]
variable[options] assign[=] call[name[dict], parameter[]]
if compare[name[is_pylab] is constant[True]] begin[:]
call[name[options]][constant[autoload_pylab]] assign[=] constant[True]
call[name[options]][constant[sympy]] assign[=] constant[False]
if compare[name[is_sympy] is constant[True]] begin[:]
call[name[options]][constant[autoload_pylab]] assign[=] constant[False]
call[name[options]][constant[sympy]] assign[=] constant[True]
return[name[options]] | keyword[def] identifier[additional_options] ( identifier[self] , identifier[is_pylab] = keyword[False] , identifier[is_sympy] = keyword[False] ):
literal[string]
identifier[options] = identifier[dict] (
identifier[pylab] = identifier[self] . identifier[get_option] ( literal[string] ),
identifier[autoload_pylab] = identifier[self] . identifier[get_option] ( literal[string] ),
identifier[sympy] = identifier[self] . identifier[get_option] ( literal[string] ),
identifier[show_banner] = identifier[self] . identifier[get_option] ( literal[string] )
)
keyword[if] identifier[is_pylab] keyword[is] keyword[True] :
identifier[options] [ literal[string] ]= keyword[True]
identifier[options] [ literal[string] ]= keyword[False]
keyword[if] identifier[is_sympy] keyword[is] keyword[True] :
identifier[options] [ literal[string] ]= keyword[False]
identifier[options] [ literal[string] ]= keyword[True]
keyword[return] identifier[options] | def additional_options(self, is_pylab=False, is_sympy=False):
"""
Additional options for shell widgets that are not defined
in JupyterWidget config options
"""
options = dict(pylab=self.get_option('pylab'), autoload_pylab=self.get_option('pylab/autoload'), sympy=self.get_option('symbolic_math'), show_banner=self.get_option('show_banner'))
if is_pylab is True:
options['autoload_pylab'] = True
options['sympy'] = False # depends on [control=['if'], data=[]]
if is_sympy is True:
options['autoload_pylab'] = False
options['sympy'] = True # depends on [control=['if'], data=[]]
return options |
def _resolve_path(self, create=False):
"""
Returns a tuple of a reference to the last container in the path, and
the last component in the key path.
For example, with a self._value like this:
{
'thing': {
'another': {
'some_leaf': 5,
'one_more': {
'other_leaf': 'x'
}
}
}
}
And a self._path of: 'thing.another.some_leaf'
This will return a tuple of a reference to the 'another' dict, and
'some_leaf', allowing the setter and casting methods to directly access
the item referred to by the key path.
"""
# Split up the key path
if type(self._path) == str:
key_path = self._path.split('.')
else:
key_path = [self._path]
# Start at the root node
node = self._root._data
nodes = [self._root._data]
# Traverse along key path
while len(key_path):
# Get the next key in the key path
key = key_path.pop(0)
# See if the test could be an int for array access, if so assume it is
try:
key = int(key)
except:
pass
# If the next level doesn't exist, create it
if create:
if type(node) == dict and key not in node:
node[key] = {}
elif type(node) == list and type(key) == int and len(node) < key:
node.append([None for i in range(key-len(node))])
# Store the last node and traverse down the hierarchy
nodes.append(node)
try:
node = node[key]
except TypeError:
if type(key) == int:
raise IndexError(key)
else:
raise KeyError(key)
return (nodes[-1], key) | def function[_resolve_path, parameter[self, create]]:
constant[
Returns a tuple of a reference to the last container in the path, and
the last component in the key path.
For example, with a self._value like this:
{
'thing': {
'another': {
'some_leaf': 5,
'one_more': {
'other_leaf': 'x'
}
}
}
}
And a self._path of: 'thing.another.some_leaf'
This will return a tuple of a reference to the 'another' dict, and
'some_leaf', allowing the setter and casting methods to directly access
the item referred to by the key path.
]
if compare[call[name[type], parameter[name[self]._path]] equal[==] name[str]] begin[:]
variable[key_path] assign[=] call[name[self]._path.split, parameter[constant[.]]]
variable[node] assign[=] name[self]._root._data
variable[nodes] assign[=] list[[<ast.Attribute object at 0x7da1b0370400>]]
while call[name[len], parameter[name[key_path]]] begin[:]
variable[key] assign[=] call[name[key_path].pop, parameter[constant[0]]]
<ast.Try object at 0x7da1b0210280>
if name[create] begin[:]
if <ast.BoolOp object at 0x7da1b0210160> begin[:]
call[name[node]][name[key]] assign[=] dictionary[[], []]
call[name[nodes].append, parameter[name[node]]]
<ast.Try object at 0x7da1b0242e30>
return[tuple[[<ast.Subscript object at 0x7da1b0242d10>, <ast.Name object at 0x7da1b0242da0>]]] | keyword[def] identifier[_resolve_path] ( identifier[self] , identifier[create] = keyword[False] ):
literal[string]
keyword[if] identifier[type] ( identifier[self] . identifier[_path] )== identifier[str] :
identifier[key_path] = identifier[self] . identifier[_path] . identifier[split] ( literal[string] )
keyword[else] :
identifier[key_path] =[ identifier[self] . identifier[_path] ]
identifier[node] = identifier[self] . identifier[_root] . identifier[_data]
identifier[nodes] =[ identifier[self] . identifier[_root] . identifier[_data] ]
keyword[while] identifier[len] ( identifier[key_path] ):
identifier[key] = identifier[key_path] . identifier[pop] ( literal[int] )
keyword[try] :
identifier[key] = identifier[int] ( identifier[key] )
keyword[except] :
keyword[pass]
keyword[if] identifier[create] :
keyword[if] identifier[type] ( identifier[node] )== identifier[dict] keyword[and] identifier[key] keyword[not] keyword[in] identifier[node] :
identifier[node] [ identifier[key] ]={}
keyword[elif] identifier[type] ( identifier[node] )== identifier[list] keyword[and] identifier[type] ( identifier[key] )== identifier[int] keyword[and] identifier[len] ( identifier[node] )< identifier[key] :
identifier[node] . identifier[append] ([ keyword[None] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[key] - identifier[len] ( identifier[node] ))])
identifier[nodes] . identifier[append] ( identifier[node] )
keyword[try] :
identifier[node] = identifier[node] [ identifier[key] ]
keyword[except] identifier[TypeError] :
keyword[if] identifier[type] ( identifier[key] )== identifier[int] :
keyword[raise] identifier[IndexError] ( identifier[key] )
keyword[else] :
keyword[raise] identifier[KeyError] ( identifier[key] )
keyword[return] ( identifier[nodes] [- literal[int] ], identifier[key] ) | def _resolve_path(self, create=False):
"""
Returns a tuple of a reference to the last container in the path, and
the last component in the key path.
For example, with a self._value like this:
{
'thing': {
'another': {
'some_leaf': 5,
'one_more': {
'other_leaf': 'x'
}
}
}
}
And a self._path of: 'thing.another.some_leaf'
This will return a tuple of a reference to the 'another' dict, and
'some_leaf', allowing the setter and casting methods to directly access
the item referred to by the key path.
"""
# Split up the key path
if type(self._path) == str:
key_path = self._path.split('.') # depends on [control=['if'], data=[]]
else:
key_path = [self._path]
# Start at the root node
node = self._root._data
nodes = [self._root._data]
# Traverse along key path
while len(key_path):
# Get the next key in the key path
key = key_path.pop(0)
# See if the test could be an int for array access, if so assume it is
try:
key = int(key) # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]]
# If the next level doesn't exist, create it
if create:
if type(node) == dict and key not in node:
node[key] = {} # depends on [control=['if'], data=[]]
elif type(node) == list and type(key) == int and (len(node) < key):
node.append([None for i in range(key - len(node))]) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Store the last node and traverse down the hierarchy
nodes.append(node)
try:
node = node[key] # depends on [control=['try'], data=[]]
except TypeError:
if type(key) == int:
raise IndexError(key) # depends on [control=['if'], data=[]]
else:
raise KeyError(key) # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]]
return (nodes[-1], key) |
def _manifest(self):
"""Return manifest content."""
if self._manifest_cache is None:
self._manifest_cache = self._storage_broker.get_manifest()
return self._manifest_cache | def function[_manifest, parameter[self]]:
constant[Return manifest content.]
if compare[name[self]._manifest_cache is constant[None]] begin[:]
name[self]._manifest_cache assign[=] call[name[self]._storage_broker.get_manifest, parameter[]]
return[name[self]._manifest_cache] | keyword[def] identifier[_manifest] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_manifest_cache] keyword[is] keyword[None] :
identifier[self] . identifier[_manifest_cache] = identifier[self] . identifier[_storage_broker] . identifier[get_manifest] ()
keyword[return] identifier[self] . identifier[_manifest_cache] | def _manifest(self):
"""Return manifest content."""
if self._manifest_cache is None:
self._manifest_cache = self._storage_broker.get_manifest() # depends on [control=['if'], data=[]]
return self._manifest_cache |
def validate_token(self, token, expected_data=None):
"""Validate secret link token.
:param token: Token value.
:param expected_data: A dictionary of key/values that must be present
in the data part of the token (i.e. included via ``extra_data`` in
``create_token``).
"""
try:
# Load token and remove random data.
data = self.load_token(token)
# Compare expected data with data in token.
if expected_data:
for k in expected_data:
if expected_data[k] != data["data"].get(k):
return None
return data
except BadData:
return None | def function[validate_token, parameter[self, token, expected_data]]:
constant[Validate secret link token.
:param token: Token value.
:param expected_data: A dictionary of key/values that must be present
in the data part of the token (i.e. included via ``extra_data`` in
``create_token``).
]
<ast.Try object at 0x7da20c795c30> | keyword[def] identifier[validate_token] ( identifier[self] , identifier[token] , identifier[expected_data] = keyword[None] ):
literal[string]
keyword[try] :
identifier[data] = identifier[self] . identifier[load_token] ( identifier[token] )
keyword[if] identifier[expected_data] :
keyword[for] identifier[k] keyword[in] identifier[expected_data] :
keyword[if] identifier[expected_data] [ identifier[k] ]!= identifier[data] [ literal[string] ]. identifier[get] ( identifier[k] ):
keyword[return] keyword[None]
keyword[return] identifier[data]
keyword[except] identifier[BadData] :
keyword[return] keyword[None] | def validate_token(self, token, expected_data=None):
"""Validate secret link token.
:param token: Token value.
:param expected_data: A dictionary of key/values that must be present
in the data part of the token (i.e. included via ``extra_data`` in
``create_token``).
"""
try:
# Load token and remove random data.
data = self.load_token(token)
# Compare expected data with data in token.
if expected_data:
for k in expected_data:
if expected_data[k] != data['data'].get(k):
return None # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['k']] # depends on [control=['if'], data=[]]
return data # depends on [control=['try'], data=[]]
except BadData:
return None # depends on [control=['except'], data=[]] |
def from_cryptography(cls, crypto_crl):
"""
Construct based on a ``cryptography`` *crypto_crl*.
:param crypto_crl: A ``cryptography`` certificate revocation list
:type crypto_crl: ``cryptography.x509.CertificateRevocationList``
:rtype: CRL
.. versionadded:: 17.1.0
"""
if not isinstance(crypto_crl, x509.CertificateRevocationList):
raise TypeError("Must be a certificate revocation list")
crl = cls()
crl._crl = crypto_crl._x509_crl
return crl | def function[from_cryptography, parameter[cls, crypto_crl]]:
constant[
Construct based on a ``cryptography`` *crypto_crl*.
:param crypto_crl: A ``cryptography`` certificate revocation list
:type crypto_crl: ``cryptography.x509.CertificateRevocationList``
:rtype: CRL
.. versionadded:: 17.1.0
]
if <ast.UnaryOp object at 0x7da1b028ccd0> begin[:]
<ast.Raise object at 0x7da1b028f490>
variable[crl] assign[=] call[name[cls], parameter[]]
name[crl]._crl assign[=] name[crypto_crl]._x509_crl
return[name[crl]] | keyword[def] identifier[from_cryptography] ( identifier[cls] , identifier[crypto_crl] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[crypto_crl] , identifier[x509] . identifier[CertificateRevocationList] ):
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[crl] = identifier[cls] ()
identifier[crl] . identifier[_crl] = identifier[crypto_crl] . identifier[_x509_crl]
keyword[return] identifier[crl] | def from_cryptography(cls, crypto_crl):
"""
Construct based on a ``cryptography`` *crypto_crl*.
:param crypto_crl: A ``cryptography`` certificate revocation list
:type crypto_crl: ``cryptography.x509.CertificateRevocationList``
:rtype: CRL
.. versionadded:: 17.1.0
"""
if not isinstance(crypto_crl, x509.CertificateRevocationList):
raise TypeError('Must be a certificate revocation list') # depends on [control=['if'], data=[]]
crl = cls()
crl._crl = crypto_crl._x509_crl
return crl |
def format_field(self, value: Any, spec: str) -> str:
"""Method of string.Formatter that specifies the output of format()."""
from cirq import ops # HACK: avoids cyclic dependency.
if isinstance(value, (float, int)):
if isinstance(value, float):
value = round(value, self.precision)
if spec == 'half_turns':
value = 'pi*{}'.format(value) if value != 0 else '0'
spec = ''
elif isinstance(value, ops.Qid):
value = self.qubit_id_map[value]
elif isinstance(value, str) and spec == 'meas':
value = self.meas_key_id_map[value]
spec = ''
return super().format_field(value, spec) | def function[format_field, parameter[self, value, spec]]:
constant[Method of string.Formatter that specifies the output of format().]
from relative_module[cirq] import module[ops]
if call[name[isinstance], parameter[name[value], tuple[[<ast.Name object at 0x7da1b1f48a30>, <ast.Name object at 0x7da1b1f480a0>]]]] begin[:]
if call[name[isinstance], parameter[name[value], name[float]]] begin[:]
variable[value] assign[=] call[name[round], parameter[name[value], name[self].precision]]
if compare[name[spec] equal[==] constant[half_turns]] begin[:]
variable[value] assign[=] <ast.IfExp object at 0x7da1b1f4a500>
variable[spec] assign[=] constant[]
return[call[call[name[super], parameter[]].format_field, parameter[name[value], name[spec]]]] | keyword[def] identifier[format_field] ( identifier[self] , identifier[value] : identifier[Any] , identifier[spec] : identifier[str] )-> identifier[str] :
literal[string]
keyword[from] identifier[cirq] keyword[import] identifier[ops]
keyword[if] identifier[isinstance] ( identifier[value] ,( identifier[float] , identifier[int] )):
keyword[if] identifier[isinstance] ( identifier[value] , identifier[float] ):
identifier[value] = identifier[round] ( identifier[value] , identifier[self] . identifier[precision] )
keyword[if] identifier[spec] == literal[string] :
identifier[value] = literal[string] . identifier[format] ( identifier[value] ) keyword[if] identifier[value] != literal[int] keyword[else] literal[string]
identifier[spec] = literal[string]
keyword[elif] identifier[isinstance] ( identifier[value] , identifier[ops] . identifier[Qid] ):
identifier[value] = identifier[self] . identifier[qubit_id_map] [ identifier[value] ]
keyword[elif] identifier[isinstance] ( identifier[value] , identifier[str] ) keyword[and] identifier[spec] == literal[string] :
identifier[value] = identifier[self] . identifier[meas_key_id_map] [ identifier[value] ]
identifier[spec] = literal[string]
keyword[return] identifier[super] (). identifier[format_field] ( identifier[value] , identifier[spec] ) | def format_field(self, value: Any, spec: str) -> str:
"""Method of string.Formatter that specifies the output of format()."""
from cirq import ops # HACK: avoids cyclic dependency.
if isinstance(value, (float, int)):
if isinstance(value, float):
value = round(value, self.precision) # depends on [control=['if'], data=[]]
if spec == 'half_turns':
value = 'pi*{}'.format(value) if value != 0 else '0'
spec = '' # depends on [control=['if'], data=['spec']] # depends on [control=['if'], data=[]]
elif isinstance(value, ops.Qid):
value = self.qubit_id_map[value] # depends on [control=['if'], data=[]]
elif isinstance(value, str) and spec == 'meas':
value = self.meas_key_id_map[value]
spec = '' # depends on [control=['if'], data=[]]
return super().format_field(value, spec) |
def close(self):
"""End the report."""
endpoint = self.endpoint.replace("/api/v1/spans", "")
logger.debug("Zipkin trace may be located at this URL {}/traces/{}".format(endpoint, self.trace_id)) | def function[close, parameter[self]]:
constant[End the report.]
variable[endpoint] assign[=] call[name[self].endpoint.replace, parameter[constant[/api/v1/spans], constant[]]]
call[name[logger].debug, parameter[call[constant[Zipkin trace may be located at this URL {}/traces/{}].format, parameter[name[endpoint], name[self].trace_id]]]] | keyword[def] identifier[close] ( identifier[self] ):
literal[string]
identifier[endpoint] = identifier[self] . identifier[endpoint] . identifier[replace] ( literal[string] , literal[string] )
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[endpoint] , identifier[self] . identifier[trace_id] )) | def close(self):
"""End the report."""
endpoint = self.endpoint.replace('/api/v1/spans', '')
logger.debug('Zipkin trace may be located at this URL {}/traces/{}'.format(endpoint, self.trace_id)) |
def step(h, logy=None, axes=None, **kwargs):
"""
Make a matplotlib step plot from a ROOT histogram.
Parameters
----------
h : Hist
A rootpy Hist
logy : bool, optional (default=None)
If True then clip the y range between 1E-300 and 1E300.
If None (the default) then automatically determine if the axes are
log-scale and if this clipping should be performed.
axes : matplotlib Axes instance, optional (default=None)
The axes to plot on. If None then use the global current axes.
kwargs : additional keyword arguments, optional
Additional keyword arguments are passed directly to
matplotlib's fill_between function.
Returns
-------
Returns the value from matplotlib's fill_between function.
"""
if axes is None:
axes = plt.gca()
if logy is None:
logy = axes.get_yscale() == 'log'
_set_defaults(h, kwargs, ['common', 'line'])
if kwargs.get('color') is None:
kwargs['color'] = h.GetLineColor('mpl')
y = np.array(list(h.y()) + [0.])
if logy:
np.clip(y, 1E-300, 1E300, out=y)
return axes.step(list(h.xedges()), y, where='post', **kwargs) | def function[step, parameter[h, logy, axes]]:
constant[
Make a matplotlib step plot from a ROOT histogram.
Parameters
----------
h : Hist
A rootpy Hist
logy : bool, optional (default=None)
If True then clip the y range between 1E-300 and 1E300.
If None (the default) then automatically determine if the axes are
log-scale and if this clipping should be performed.
axes : matplotlib Axes instance, optional (default=None)
The axes to plot on. If None then use the global current axes.
kwargs : additional keyword arguments, optional
Additional keyword arguments are passed directly to
matplotlib's fill_between function.
Returns
-------
Returns the value from matplotlib's fill_between function.
]
if compare[name[axes] is constant[None]] begin[:]
variable[axes] assign[=] call[name[plt].gca, parameter[]]
if compare[name[logy] is constant[None]] begin[:]
variable[logy] assign[=] compare[call[name[axes].get_yscale, parameter[]] equal[==] constant[log]]
call[name[_set_defaults], parameter[name[h], name[kwargs], list[[<ast.Constant object at 0x7da1b11bed40>, <ast.Constant object at 0x7da1b11bf4c0>]]]]
if compare[call[name[kwargs].get, parameter[constant[color]]] is constant[None]] begin[:]
call[name[kwargs]][constant[color]] assign[=] call[name[h].GetLineColor, parameter[constant[mpl]]]
variable[y] assign[=] call[name[np].array, parameter[binary_operation[call[name[list], parameter[call[name[h].y, parameter[]]]] + list[[<ast.Constant object at 0x7da1b11bc5e0>]]]]]
if name[logy] begin[:]
call[name[np].clip, parameter[name[y], constant[1e-300], constant[1e+300]]]
return[call[name[axes].step, parameter[call[name[list], parameter[call[name[h].xedges, parameter[]]]], name[y]]]] | keyword[def] identifier[step] ( identifier[h] , identifier[logy] = keyword[None] , identifier[axes] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[axes] keyword[is] keyword[None] :
identifier[axes] = identifier[plt] . identifier[gca] ()
keyword[if] identifier[logy] keyword[is] keyword[None] :
identifier[logy] = identifier[axes] . identifier[get_yscale] ()== literal[string]
identifier[_set_defaults] ( identifier[h] , identifier[kwargs] ,[ literal[string] , literal[string] ])
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ) keyword[is] keyword[None] :
identifier[kwargs] [ literal[string] ]= identifier[h] . identifier[GetLineColor] ( literal[string] )
identifier[y] = identifier[np] . identifier[array] ( identifier[list] ( identifier[h] . identifier[y] ())+[ literal[int] ])
keyword[if] identifier[logy] :
identifier[np] . identifier[clip] ( identifier[y] , literal[int] , literal[int] , identifier[out] = identifier[y] )
keyword[return] identifier[axes] . identifier[step] ( identifier[list] ( identifier[h] . identifier[xedges] ()), identifier[y] , identifier[where] = literal[string] ,** identifier[kwargs] ) | def step(h, logy=None, axes=None, **kwargs):
"""
Make a matplotlib step plot from a ROOT histogram.
Parameters
----------
h : Hist
A rootpy Hist
logy : bool, optional (default=None)
If True then clip the y range between 1E-300 and 1E300.
If None (the default) then automatically determine if the axes are
log-scale and if this clipping should be performed.
axes : matplotlib Axes instance, optional (default=None)
The axes to plot on. If None then use the global current axes.
kwargs : additional keyword arguments, optional
Additional keyword arguments are passed directly to
matplotlib's fill_between function.
Returns
-------
Returns the value from matplotlib's fill_between function.
"""
if axes is None:
axes = plt.gca() # depends on [control=['if'], data=['axes']]
if logy is None:
logy = axes.get_yscale() == 'log' # depends on [control=['if'], data=['logy']]
_set_defaults(h, kwargs, ['common', 'line'])
if kwargs.get('color') is None:
kwargs['color'] = h.GetLineColor('mpl') # depends on [control=['if'], data=[]]
y = np.array(list(h.y()) + [0.0])
if logy:
np.clip(y, 1e-300, 1e+300, out=y) # depends on [control=['if'], data=[]]
return axes.step(list(h.xedges()), y, where='post', **kwargs) |
def pgcd(numa, numb):
"""
Calculate the greatest common divisor (GCD) of two numbers.
:param numa: First number
:type numa: number
:param numb: Second number
:type numb: number
:rtype: number
For example:
>>> import pmisc, fractions
>>> pmisc.pgcd(10, 15)
5
>>> str(pmisc.pgcd(0.05, 0.02))
'0.01'
>>> str(pmisc.pgcd(5/3.0, 2/3.0))[:6]
'0.3333'
>>> pmisc.pgcd(
... fractions.Fraction(str(5/3.0)),
... fractions.Fraction(str(2/3.0))
... )
Fraction(1, 3)
>>> pmisc.pgcd(
... fractions.Fraction(5, 3),
... fractions.Fraction(2, 3)
... )
Fraction(1, 3)
"""
# Test for integers this way to be valid also for Numpy data types without
# actually importing (and package depending on) Numpy
int_args = (int(numa) == numa) and (int(numb) == numb)
fraction_args = isinstance(numa, Fraction) and isinstance(numb, Fraction)
# Force conversion for Numpy data types
if int_args:
numa, numb = int(numa), int(numb)
elif not fraction_args:
numa, numb = float(numa), float(numb)
# Limit floating numbers to a "sane" fractional part resolution
if (not int_args) and (not fraction_args):
numa, numb = (
Fraction(_no_exp(numa)).limit_denominator(),
Fraction(_no_exp(numb)).limit_denominator(),
)
while numb:
numa, numb = (
numb,
(numa % numb if int_args else (numa % numb).limit_denominator()),
)
return int(numa) if int_args else (numa if fraction_args else float(numa)) | def function[pgcd, parameter[numa, numb]]:
constant[
Calculate the greatest common divisor (GCD) of two numbers.
:param numa: First number
:type numa: number
:param numb: Second number
:type numb: number
:rtype: number
For example:
>>> import pmisc, fractions
>>> pmisc.pgcd(10, 15)
5
>>> str(pmisc.pgcd(0.05, 0.02))
'0.01'
>>> str(pmisc.pgcd(5/3.0, 2/3.0))[:6]
'0.3333'
>>> pmisc.pgcd(
... fractions.Fraction(str(5/3.0)),
... fractions.Fraction(str(2/3.0))
... )
Fraction(1, 3)
>>> pmisc.pgcd(
... fractions.Fraction(5, 3),
... fractions.Fraction(2, 3)
... )
Fraction(1, 3)
]
variable[int_args] assign[=] <ast.BoolOp object at 0x7da18bcc95d0>
variable[fraction_args] assign[=] <ast.BoolOp object at 0x7da18bccaa40>
if name[int_args] begin[:]
<ast.Tuple object at 0x7da18bcc90f0> assign[=] tuple[[<ast.Call object at 0x7da18bcca350>, <ast.Call object at 0x7da18bcc92a0>]]
if <ast.BoolOp object at 0x7da18bcc94e0> begin[:]
<ast.Tuple object at 0x7da18bccbcd0> assign[=] tuple[[<ast.Call object at 0x7da18bcc96f0>, <ast.Call object at 0x7da18bcc9840>]]
while name[numb] begin[:]
<ast.Tuple object at 0x7da18bccae00> assign[=] tuple[[<ast.Name object at 0x7da18bccbb20>, <ast.IfExp object at 0x7da18bcc8bb0>]]
return[<ast.IfExp object at 0x7da18bcca110>] | keyword[def] identifier[pgcd] ( identifier[numa] , identifier[numb] ):
literal[string]
identifier[int_args] =( identifier[int] ( identifier[numa] )== identifier[numa] ) keyword[and] ( identifier[int] ( identifier[numb] )== identifier[numb] )
identifier[fraction_args] = identifier[isinstance] ( identifier[numa] , identifier[Fraction] ) keyword[and] identifier[isinstance] ( identifier[numb] , identifier[Fraction] )
keyword[if] identifier[int_args] :
identifier[numa] , identifier[numb] = identifier[int] ( identifier[numa] ), identifier[int] ( identifier[numb] )
keyword[elif] keyword[not] identifier[fraction_args] :
identifier[numa] , identifier[numb] = identifier[float] ( identifier[numa] ), identifier[float] ( identifier[numb] )
keyword[if] ( keyword[not] identifier[int_args] ) keyword[and] ( keyword[not] identifier[fraction_args] ):
identifier[numa] , identifier[numb] =(
identifier[Fraction] ( identifier[_no_exp] ( identifier[numa] )). identifier[limit_denominator] (),
identifier[Fraction] ( identifier[_no_exp] ( identifier[numb] )). identifier[limit_denominator] (),
)
keyword[while] identifier[numb] :
identifier[numa] , identifier[numb] =(
identifier[numb] ,
( identifier[numa] % identifier[numb] keyword[if] identifier[int_args] keyword[else] ( identifier[numa] % identifier[numb] ). identifier[limit_denominator] ()),
)
keyword[return] identifier[int] ( identifier[numa] ) keyword[if] identifier[int_args] keyword[else] ( identifier[numa] keyword[if] identifier[fraction_args] keyword[else] identifier[float] ( identifier[numa] )) | def pgcd(numa, numb):
"""
Calculate the greatest common divisor (GCD) of two numbers.
:param numa: First number
:type numa: number
:param numb: Second number
:type numb: number
:rtype: number
For example:
>>> import pmisc, fractions
>>> pmisc.pgcd(10, 15)
5
>>> str(pmisc.pgcd(0.05, 0.02))
'0.01'
>>> str(pmisc.pgcd(5/3.0, 2/3.0))[:6]
'0.3333'
>>> pmisc.pgcd(
... fractions.Fraction(str(5/3.0)),
... fractions.Fraction(str(2/3.0))
... )
Fraction(1, 3)
>>> pmisc.pgcd(
... fractions.Fraction(5, 3),
... fractions.Fraction(2, 3)
... )
Fraction(1, 3)
"""
# Test for integers this way to be valid also for Numpy data types without
# actually importing (and package depending on) Numpy
int_args = int(numa) == numa and int(numb) == numb
fraction_args = isinstance(numa, Fraction) and isinstance(numb, Fraction)
# Force conversion for Numpy data types
if int_args:
(numa, numb) = (int(numa), int(numb)) # depends on [control=['if'], data=[]]
elif not fraction_args:
(numa, numb) = (float(numa), float(numb)) # depends on [control=['if'], data=[]]
# Limit floating numbers to a "sane" fractional part resolution
if not int_args and (not fraction_args):
(numa, numb) = (Fraction(_no_exp(numa)).limit_denominator(), Fraction(_no_exp(numb)).limit_denominator()) # depends on [control=['if'], data=[]]
while numb:
(numa, numb) = (numb, numa % numb if int_args else (numa % numb).limit_denominator()) # depends on [control=['while'], data=[]]
return int(numa) if int_args else numa if fraction_args else float(numa) |
def get_agenda_for_sentence(self, sentence: str) -> List[str]:
"""
Given a ``sentence``, returns a list of actions the sentence triggers as an ``agenda``. The
``agenda`` can be used while by a parser to guide the decoder. sequences as possible. This
is a simplistic mapping at this point, and can be expanded.
Parameters
----------
sentence : ``str``
The sentence for which an agenda will be produced.
"""
agenda = []
sentence = sentence.lower()
if sentence.startswith("there is a box") or sentence.startswith("there is a tower "):
agenda.append(self.terminal_productions["box_exists"])
elif sentence.startswith("there is a "):
agenda.append(self.terminal_productions["object_exists"])
if "<Set[Box]:bool> -> box_exists" not in agenda:
# These are object filters and do not apply if we have a box_exists at the top.
if "touch" in sentence:
if "top" in sentence:
agenda.append(self.terminal_productions["touch_top"])
elif "bottom" in sentence or "base" in sentence:
agenda.append(self.terminal_productions["touch_bottom"])
elif "corner" in sentence:
agenda.append(self.terminal_productions["touch_corner"])
elif "right" in sentence:
agenda.append(self.terminal_productions["touch_right"])
elif "left" in sentence:
agenda.append(self.terminal_productions["touch_left"])
elif "wall" in sentence or "edge" in sentence:
agenda.append(self.terminal_productions["touch_wall"])
else:
agenda.append(self.terminal_productions["touch_object"])
else:
# The words "top" and "bottom" may be referring to top and bottom blocks in a tower.
if "top" in sentence:
agenda.append(self.terminal_productions["top"])
elif "bottom" in sentence or "base" in sentence:
agenda.append(self.terminal_productions["bottom"])
if " not " in sentence:
agenda.append(self.terminal_productions["negate_filter"])
if " contains " in sentence or " has " in sentence:
agenda.append(self.terminal_productions["all_boxes"])
# This takes care of shapes, colors, top, bottom, big, small etc.
for constant, production in self.terminal_productions.items():
# TODO(pradeep): Deal with constant names with underscores.
if "top" in constant or "bottom" in constant:
# We already dealt with top, bottom, touch_top and touch_bottom above.
continue
if constant in sentence:
if "<Set[Object]:Set[Object]> ->" in production and "<Set[Box]:bool> -> box_exists" in agenda:
if constant in ["square", "circle", "triangle"]:
agenda.append(self.terminal_productions[f"shape_{constant}"])
elif constant in ["yellow", "blue", "black"]:
agenda.append(self.terminal_productions[f"color_{constant}"])
else:
continue
else:
agenda.append(production)
# TODO (pradeep): Rules for "member_*" productions ("tower" or "box" followed by a color,
# shape or number...)
number_productions = self._get_number_productions(sentence)
for production in number_productions:
agenda.append(production)
if not agenda:
# None of the rules above was triggered!
if "box" in sentence:
agenda.append(self.terminal_productions["all_boxes"])
else:
agenda.append(self.terminal_productions["all_objects"])
return agenda | def function[get_agenda_for_sentence, parameter[self, sentence]]:
constant[
Given a ``sentence``, returns a list of actions the sentence triggers as an ``agenda``. The
``agenda`` can be used while by a parser to guide the decoder. sequences as possible. This
is a simplistic mapping at this point, and can be expanded.
Parameters
----------
sentence : ``str``
The sentence for which an agenda will be produced.
]
variable[agenda] assign[=] list[[]]
variable[sentence] assign[=] call[name[sentence].lower, parameter[]]
if <ast.BoolOp object at 0x7da2054a6860> begin[:]
call[name[agenda].append, parameter[call[name[self].terminal_productions][constant[box_exists]]]]
if compare[constant[<Set[Box]:bool> -> box_exists] <ast.NotIn object at 0x7da2590d7190> name[agenda]] begin[:]
if compare[constant[touch] in name[sentence]] begin[:]
if compare[constant[top] in name[sentence]] begin[:]
call[name[agenda].append, parameter[call[name[self].terminal_productions][constant[touch_top]]]]
if compare[constant[ not ] in name[sentence]] begin[:]
call[name[agenda].append, parameter[call[name[self].terminal_productions][constant[negate_filter]]]]
if <ast.BoolOp object at 0x7da2054a6cb0> begin[:]
call[name[agenda].append, parameter[call[name[self].terminal_productions][constant[all_boxes]]]]
for taget[tuple[[<ast.Name object at 0x7da2054a5c00>, <ast.Name object at 0x7da2054a7c70>]]] in starred[call[name[self].terminal_productions.items, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da2054a4dc0> begin[:]
continue
if compare[name[constant] in name[sentence]] begin[:]
if <ast.BoolOp object at 0x7da2054a58d0> begin[:]
if compare[name[constant] in list[[<ast.Constant object at 0x7da2054a5cc0>, <ast.Constant object at 0x7da2054a59c0>, <ast.Constant object at 0x7da2054a4490>]]] begin[:]
call[name[agenda].append, parameter[call[name[self].terminal_productions][<ast.JoinedStr object at 0x7da2054a7250>]]]
variable[number_productions] assign[=] call[name[self]._get_number_productions, parameter[name[sentence]]]
for taget[name[production]] in starred[name[number_productions]] begin[:]
call[name[agenda].append, parameter[name[production]]]
if <ast.UnaryOp object at 0x7da18fe92020> begin[:]
if compare[constant[box] in name[sentence]] begin[:]
call[name[agenda].append, parameter[call[name[self].terminal_productions][constant[all_boxes]]]]
return[name[agenda]] | keyword[def] identifier[get_agenda_for_sentence] ( identifier[self] , identifier[sentence] : identifier[str] )-> identifier[List] [ identifier[str] ]:
literal[string]
identifier[agenda] =[]
identifier[sentence] = identifier[sentence] . identifier[lower] ()
keyword[if] identifier[sentence] . identifier[startswith] ( literal[string] ) keyword[or] identifier[sentence] . identifier[startswith] ( literal[string] ):
identifier[agenda] . identifier[append] ( identifier[self] . identifier[terminal_productions] [ literal[string] ])
keyword[elif] identifier[sentence] . identifier[startswith] ( literal[string] ):
identifier[agenda] . identifier[append] ( identifier[self] . identifier[terminal_productions] [ literal[string] ])
keyword[if] literal[string] keyword[not] keyword[in] identifier[agenda] :
keyword[if] literal[string] keyword[in] identifier[sentence] :
keyword[if] literal[string] keyword[in] identifier[sentence] :
identifier[agenda] . identifier[append] ( identifier[self] . identifier[terminal_productions] [ literal[string] ])
keyword[elif] literal[string] keyword[in] identifier[sentence] keyword[or] literal[string] keyword[in] identifier[sentence] :
identifier[agenda] . identifier[append] ( identifier[self] . identifier[terminal_productions] [ literal[string] ])
keyword[elif] literal[string] keyword[in] identifier[sentence] :
identifier[agenda] . identifier[append] ( identifier[self] . identifier[terminal_productions] [ literal[string] ])
keyword[elif] literal[string] keyword[in] identifier[sentence] :
identifier[agenda] . identifier[append] ( identifier[self] . identifier[terminal_productions] [ literal[string] ])
keyword[elif] literal[string] keyword[in] identifier[sentence] :
identifier[agenda] . identifier[append] ( identifier[self] . identifier[terminal_productions] [ literal[string] ])
keyword[elif] literal[string] keyword[in] identifier[sentence] keyword[or] literal[string] keyword[in] identifier[sentence] :
identifier[agenda] . identifier[append] ( identifier[self] . identifier[terminal_productions] [ literal[string] ])
keyword[else] :
identifier[agenda] . identifier[append] ( identifier[self] . identifier[terminal_productions] [ literal[string] ])
keyword[else] :
keyword[if] literal[string] keyword[in] identifier[sentence] :
identifier[agenda] . identifier[append] ( identifier[self] . identifier[terminal_productions] [ literal[string] ])
keyword[elif] literal[string] keyword[in] identifier[sentence] keyword[or] literal[string] keyword[in] identifier[sentence] :
identifier[agenda] . identifier[append] ( identifier[self] . identifier[terminal_productions] [ literal[string] ])
keyword[if] literal[string] keyword[in] identifier[sentence] :
identifier[agenda] . identifier[append] ( identifier[self] . identifier[terminal_productions] [ literal[string] ])
keyword[if] literal[string] keyword[in] identifier[sentence] keyword[or] literal[string] keyword[in] identifier[sentence] :
identifier[agenda] . identifier[append] ( identifier[self] . identifier[terminal_productions] [ literal[string] ])
keyword[for] identifier[constant] , identifier[production] keyword[in] identifier[self] . identifier[terminal_productions] . identifier[items] ():
keyword[if] literal[string] keyword[in] identifier[constant] keyword[or] literal[string] keyword[in] identifier[constant] :
keyword[continue]
keyword[if] identifier[constant] keyword[in] identifier[sentence] :
keyword[if] literal[string] keyword[in] identifier[production] keyword[and] literal[string] keyword[in] identifier[agenda] :
keyword[if] identifier[constant] keyword[in] [ literal[string] , literal[string] , literal[string] ]:
identifier[agenda] . identifier[append] ( identifier[self] . identifier[terminal_productions] [ literal[string] ])
keyword[elif] identifier[constant] keyword[in] [ literal[string] , literal[string] , literal[string] ]:
identifier[agenda] . identifier[append] ( identifier[self] . identifier[terminal_productions] [ literal[string] ])
keyword[else] :
keyword[continue]
keyword[else] :
identifier[agenda] . identifier[append] ( identifier[production] )
identifier[number_productions] = identifier[self] . identifier[_get_number_productions] ( identifier[sentence] )
keyword[for] identifier[production] keyword[in] identifier[number_productions] :
identifier[agenda] . identifier[append] ( identifier[production] )
keyword[if] keyword[not] identifier[agenda] :
keyword[if] literal[string] keyword[in] identifier[sentence] :
identifier[agenda] . identifier[append] ( identifier[self] . identifier[terminal_productions] [ literal[string] ])
keyword[else] :
identifier[agenda] . identifier[append] ( identifier[self] . identifier[terminal_productions] [ literal[string] ])
keyword[return] identifier[agenda] | def get_agenda_for_sentence(self, sentence: str) -> List[str]:
"""
Given a ``sentence``, returns a list of actions the sentence triggers as an ``agenda``. The
``agenda`` can be used while by a parser to guide the decoder. sequences as possible. This
is a simplistic mapping at this point, and can be expanded.
Parameters
----------
sentence : ``str``
The sentence for which an agenda will be produced.
"""
agenda = []
sentence = sentence.lower()
if sentence.startswith('there is a box') or sentence.startswith('there is a tower '):
agenda.append(self.terminal_productions['box_exists']) # depends on [control=['if'], data=[]]
elif sentence.startswith('there is a '):
agenda.append(self.terminal_productions['object_exists']) # depends on [control=['if'], data=[]]
if '<Set[Box]:bool> -> box_exists' not in agenda:
# These are object filters and do not apply if we have a box_exists at the top.
if 'touch' in sentence:
if 'top' in sentence:
agenda.append(self.terminal_productions['touch_top']) # depends on [control=['if'], data=[]]
elif 'bottom' in sentence or 'base' in sentence:
agenda.append(self.terminal_productions['touch_bottom']) # depends on [control=['if'], data=[]]
elif 'corner' in sentence:
agenda.append(self.terminal_productions['touch_corner']) # depends on [control=['if'], data=[]]
elif 'right' in sentence:
agenda.append(self.terminal_productions['touch_right']) # depends on [control=['if'], data=[]]
elif 'left' in sentence:
agenda.append(self.terminal_productions['touch_left']) # depends on [control=['if'], data=[]]
elif 'wall' in sentence or 'edge' in sentence:
agenda.append(self.terminal_productions['touch_wall']) # depends on [control=['if'], data=[]]
else:
agenda.append(self.terminal_productions['touch_object']) # depends on [control=['if'], data=['sentence']]
# The words "top" and "bottom" may be referring to top and bottom blocks in a tower.
elif 'top' in sentence:
agenda.append(self.terminal_productions['top']) # depends on [control=['if'], data=[]]
elif 'bottom' in sentence or 'base' in sentence:
agenda.append(self.terminal_productions['bottom']) # depends on [control=['if'], data=[]]
if ' not ' in sentence:
agenda.append(self.terminal_productions['negate_filter']) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['agenda']]
if ' contains ' in sentence or ' has ' in sentence:
agenda.append(self.terminal_productions['all_boxes']) # depends on [control=['if'], data=[]]
# This takes care of shapes, colors, top, bottom, big, small etc.
for (constant, production) in self.terminal_productions.items():
# TODO(pradeep): Deal with constant names with underscores.
if 'top' in constant or 'bottom' in constant:
# We already dealt with top, bottom, touch_top and touch_bottom above.
continue # depends on [control=['if'], data=[]]
if constant in sentence:
if '<Set[Object]:Set[Object]> ->' in production and '<Set[Box]:bool> -> box_exists' in agenda:
if constant in ['square', 'circle', 'triangle']:
agenda.append(self.terminal_productions[f'shape_{constant}']) # depends on [control=['if'], data=['constant']]
elif constant in ['yellow', 'blue', 'black']:
agenda.append(self.terminal_productions[f'color_{constant}']) # depends on [control=['if'], data=['constant']]
else:
continue # depends on [control=['if'], data=[]]
else:
agenda.append(production) # depends on [control=['if'], data=['constant']] # depends on [control=['for'], data=[]]
# TODO (pradeep): Rules for "member_*" productions ("tower" or "box" followed by a color,
# shape or number...)
number_productions = self._get_number_productions(sentence)
for production in number_productions:
agenda.append(production) # depends on [control=['for'], data=['production']]
if not agenda:
# None of the rules above was triggered!
if 'box' in sentence:
agenda.append(self.terminal_productions['all_boxes']) # depends on [control=['if'], data=[]]
else:
agenda.append(self.terminal_productions['all_objects']) # depends on [control=['if'], data=[]]
return agenda |
def create_as(access_token, subscription_id, resource_group, as_name,
update_domains, fault_domains, location):
'''Create availability set.
Args:
access_token (str): A valid Azure authentication token.
subscription_id (str): Azure subscription id.
resource_group (str): Azure resource group name.
as_name (str): Name of the new availability set.
update_domains (int): Number of update domains.
fault_domains (int): Number of fault domains.
location (str): Azure data center location. E.g. westus.
Returns:
HTTP response. JSON body of the availability set properties.
'''
endpoint = ''.join([get_rm_endpoint(),
'/subscriptions/', subscription_id,
'/resourceGroups/', resource_group,
'/providers/Microsoft.Compute/availabilitySets/', as_name,
'?api-version=', COMP_API])
as_body = {'location': location}
properties = {'platformUpdateDomainCount': update_domains}
properties['platformFaultDomainCount'] = fault_domains
as_body['properties'] = properties
body = json.dumps(as_body)
return do_put(endpoint, body, access_token) | def function[create_as, parameter[access_token, subscription_id, resource_group, as_name, update_domains, fault_domains, location]]:
constant[Create availability set.
Args:
access_token (str): A valid Azure authentication token.
subscription_id (str): Azure subscription id.
resource_group (str): Azure resource group name.
as_name (str): Name of the new availability set.
update_domains (int): Number of update domains.
fault_domains (int): Number of fault domains.
location (str): Azure data center location. E.g. westus.
Returns:
HTTP response. JSON body of the availability set properties.
]
variable[endpoint] assign[=] call[constant[].join, parameter[list[[<ast.Call object at 0x7da1b04dafe0>, <ast.Constant object at 0x7da1b04d8340>, <ast.Name object at 0x7da1b04d8fd0>, <ast.Constant object at 0x7da1b04d9360>, <ast.Name object at 0x7da1b04d9db0>, <ast.Constant object at 0x7da1b04dbc40>, <ast.Name object at 0x7da1b04dbc10>, <ast.Constant object at 0x7da1b04d9a50>, <ast.Name object at 0x7da1b04d8f40>]]]]
variable[as_body] assign[=] dictionary[[<ast.Constant object at 0x7da1b04d8be0>], [<ast.Name object at 0x7da1b04db5b0>]]
variable[properties] assign[=] dictionary[[<ast.Constant object at 0x7da1b04d8ee0>], [<ast.Name object at 0x7da1b04da110>]]
call[name[properties]][constant[platformFaultDomainCount]] assign[=] name[fault_domains]
call[name[as_body]][constant[properties]] assign[=] name[properties]
variable[body] assign[=] call[name[json].dumps, parameter[name[as_body]]]
return[call[name[do_put], parameter[name[endpoint], name[body], name[access_token]]]] | keyword[def] identifier[create_as] ( identifier[access_token] , identifier[subscription_id] , identifier[resource_group] , identifier[as_name] ,
identifier[update_domains] , identifier[fault_domains] , identifier[location] ):
literal[string]
identifier[endpoint] = literal[string] . identifier[join] ([ identifier[get_rm_endpoint] (),
literal[string] , identifier[subscription_id] ,
literal[string] , identifier[resource_group] ,
literal[string] , identifier[as_name] ,
literal[string] , identifier[COMP_API] ])
identifier[as_body] ={ literal[string] : identifier[location] }
identifier[properties] ={ literal[string] : identifier[update_domains] }
identifier[properties] [ literal[string] ]= identifier[fault_domains]
identifier[as_body] [ literal[string] ]= identifier[properties]
identifier[body] = identifier[json] . identifier[dumps] ( identifier[as_body] )
keyword[return] identifier[do_put] ( identifier[endpoint] , identifier[body] , identifier[access_token] ) | def create_as(access_token, subscription_id, resource_group, as_name, update_domains, fault_domains, location):
"""Create availability set.
Args:
access_token (str): A valid Azure authentication token.
subscription_id (str): Azure subscription id.
resource_group (str): Azure resource group name.
as_name (str): Name of the new availability set.
update_domains (int): Number of update domains.
fault_domains (int): Number of fault domains.
location (str): Azure data center location. E.g. westus.
Returns:
HTTP response. JSON body of the availability set properties.
"""
endpoint = ''.join([get_rm_endpoint(), '/subscriptions/', subscription_id, '/resourceGroups/', resource_group, '/providers/Microsoft.Compute/availabilitySets/', as_name, '?api-version=', COMP_API])
as_body = {'location': location}
properties = {'platformUpdateDomainCount': update_domains}
properties['platformFaultDomainCount'] = fault_domains
as_body['properties'] = properties
body = json.dumps(as_body)
return do_put(endpoint, body, access_token) |
def _ic_matrix(ics, ic_i):
"""Store the previously computed pointwise predictive accuracy values (ics) in a 2D matrix."""
cols, _ = ics.shape
rows = len(ics[ic_i].iloc[0])
ic_i_val = np.zeros((rows, cols))
for idx, val in enumerate(ics.index):
ic = ics.loc[val][ic_i]
if len(ic) != rows:
raise ValueError("The number of observations should be the same across all models")
ic_i_val[:, idx] = ic
return rows, cols, ic_i_val | def function[_ic_matrix, parameter[ics, ic_i]]:
constant[Store the previously computed pointwise predictive accuracy values (ics) in a 2D matrix.]
<ast.Tuple object at 0x7da1b1c65b40> assign[=] name[ics].shape
variable[rows] assign[=] call[name[len], parameter[call[call[name[ics]][name[ic_i]].iloc][constant[0]]]]
variable[ic_i_val] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da1b1c641c0>, <ast.Name object at 0x7da2044c2ce0>]]]]
for taget[tuple[[<ast.Name object at 0x7da2044c3cd0>, <ast.Name object at 0x7da2044c1ed0>]]] in starred[call[name[enumerate], parameter[name[ics].index]]] begin[:]
variable[ic] assign[=] call[call[name[ics].loc][name[val]]][name[ic_i]]
if compare[call[name[len], parameter[name[ic]]] not_equal[!=] name[rows]] begin[:]
<ast.Raise object at 0x7da2044c2830>
call[name[ic_i_val]][tuple[[<ast.Slice object at 0x7da2044c1510>, <ast.Name object at 0x7da1b1c7cd00>]]] assign[=] name[ic]
return[tuple[[<ast.Name object at 0x7da1b1c7e290>, <ast.Name object at 0x7da1b1c7e3e0>, <ast.Name object at 0x7da1b1c7ff40>]]] | keyword[def] identifier[_ic_matrix] ( identifier[ics] , identifier[ic_i] ):
literal[string]
identifier[cols] , identifier[_] = identifier[ics] . identifier[shape]
identifier[rows] = identifier[len] ( identifier[ics] [ identifier[ic_i] ]. identifier[iloc] [ literal[int] ])
identifier[ic_i_val] = identifier[np] . identifier[zeros] (( identifier[rows] , identifier[cols] ))
keyword[for] identifier[idx] , identifier[val] keyword[in] identifier[enumerate] ( identifier[ics] . identifier[index] ):
identifier[ic] = identifier[ics] . identifier[loc] [ identifier[val] ][ identifier[ic_i] ]
keyword[if] identifier[len] ( identifier[ic] )!= identifier[rows] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[ic_i_val] [:, identifier[idx] ]= identifier[ic]
keyword[return] identifier[rows] , identifier[cols] , identifier[ic_i_val] | def _ic_matrix(ics, ic_i):
"""Store the previously computed pointwise predictive accuracy values (ics) in a 2D matrix."""
(cols, _) = ics.shape
rows = len(ics[ic_i].iloc[0])
ic_i_val = np.zeros((rows, cols))
for (idx, val) in enumerate(ics.index):
ic = ics.loc[val][ic_i]
if len(ic) != rows:
raise ValueError('The number of observations should be the same across all models') # depends on [control=['if'], data=[]]
ic_i_val[:, idx] = ic # depends on [control=['for'], data=[]]
return (rows, cols, ic_i_val) |
def build_caching_info_message(job_spec,
job_id,
workflow_workspace,
workflow_json,
result_path):
"""Build the caching info message with correct formatting."""
caching_info_message = {
"job_spec": job_spec,
"job_id": job_id,
"workflow_workspace": workflow_workspace,
"workflow_json": workflow_json,
"result_path": result_path
}
return caching_info_message | def function[build_caching_info_message, parameter[job_spec, job_id, workflow_workspace, workflow_json, result_path]]:
constant[Build the caching info message with correct formatting.]
variable[caching_info_message] assign[=] dictionary[[<ast.Constant object at 0x7da1b0400b20>, <ast.Constant object at 0x7da1b0401480>, <ast.Constant object at 0x7da1b04008e0>, <ast.Constant object at 0x7da1b0402dd0>, <ast.Constant object at 0x7da1b0400880>], [<ast.Name object at 0x7da1b0402ce0>, <ast.Name object at 0x7da1b0400d00>, <ast.Name object at 0x7da1b0402950>, <ast.Name object at 0x7da1b0403610>, <ast.Name object at 0x7da1b04037f0>]]
return[name[caching_info_message]] | keyword[def] identifier[build_caching_info_message] ( identifier[job_spec] ,
identifier[job_id] ,
identifier[workflow_workspace] ,
identifier[workflow_json] ,
identifier[result_path] ):
literal[string]
identifier[caching_info_message] ={
literal[string] : identifier[job_spec] ,
literal[string] : identifier[job_id] ,
literal[string] : identifier[workflow_workspace] ,
literal[string] : identifier[workflow_json] ,
literal[string] : identifier[result_path]
}
keyword[return] identifier[caching_info_message] | def build_caching_info_message(job_spec, job_id, workflow_workspace, workflow_json, result_path):
"""Build the caching info message with correct formatting."""
caching_info_message = {'job_spec': job_spec, 'job_id': job_id, 'workflow_workspace': workflow_workspace, 'workflow_json': workflow_json, 'result_path': result_path}
return caching_info_message |
def get_subfolder_queries(store, label_store, folders, fid, sid):
'''Returns [unicode].
This returns a list of queries that can be passed on to "other"
search engines. The list of queries is derived from the subfolder
identified by ``fid/sid``.
'''
queries = []
for cid, subid, url, stype, data in subtopics(store, folders, fid, sid):
if stype in ('text', 'manual'):
queries.append(data)
return queries | def function[get_subfolder_queries, parameter[store, label_store, folders, fid, sid]]:
constant[Returns [unicode].
This returns a list of queries that can be passed on to "other"
search engines. The list of queries is derived from the subfolder
identified by ``fid/sid``.
]
variable[queries] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da2045657e0>, <ast.Name object at 0x7da204566680>, <ast.Name object at 0x7da204566710>, <ast.Name object at 0x7da2045671f0>, <ast.Name object at 0x7da204564520>]]] in starred[call[name[subtopics], parameter[name[store], name[folders], name[fid], name[sid]]]] begin[:]
if compare[name[stype] in tuple[[<ast.Constant object at 0x7da204565810>, <ast.Constant object at 0x7da204567100>]]] begin[:]
call[name[queries].append, parameter[name[data]]]
return[name[queries]] | keyword[def] identifier[get_subfolder_queries] ( identifier[store] , identifier[label_store] , identifier[folders] , identifier[fid] , identifier[sid] ):
literal[string]
identifier[queries] =[]
keyword[for] identifier[cid] , identifier[subid] , identifier[url] , identifier[stype] , identifier[data] keyword[in] identifier[subtopics] ( identifier[store] , identifier[folders] , identifier[fid] , identifier[sid] ):
keyword[if] identifier[stype] keyword[in] ( literal[string] , literal[string] ):
identifier[queries] . identifier[append] ( identifier[data] )
keyword[return] identifier[queries] | def get_subfolder_queries(store, label_store, folders, fid, sid):
"""Returns [unicode].
This returns a list of queries that can be passed on to "other"
search engines. The list of queries is derived from the subfolder
identified by ``fid/sid``.
"""
queries = []
for (cid, subid, url, stype, data) in subtopics(store, folders, fid, sid):
if stype in ('text', 'manual'):
queries.append(data) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return queries |
def get_project_versions(self, key, expand=None):
"""
Contains a full representation of a the specified project's versions.
:param key:
:param expand: the parameters to expand
:return:
"""
params = {}
if expand is not None:
params['expand'] = expand
return self.get('rest/api/2/project/{}/versions'.format(key), params=params) | def function[get_project_versions, parameter[self, key, expand]]:
constant[
Contains a full representation of a the specified project's versions.
:param key:
:param expand: the parameters to expand
:return:
]
variable[params] assign[=] dictionary[[], []]
if compare[name[expand] is_not constant[None]] begin[:]
call[name[params]][constant[expand]] assign[=] name[expand]
return[call[name[self].get, parameter[call[constant[rest/api/2/project/{}/versions].format, parameter[name[key]]]]]] | keyword[def] identifier[get_project_versions] ( identifier[self] , identifier[key] , identifier[expand] = keyword[None] ):
literal[string]
identifier[params] ={}
keyword[if] identifier[expand] keyword[is] keyword[not] keyword[None] :
identifier[params] [ literal[string] ]= identifier[expand]
keyword[return] identifier[self] . identifier[get] ( literal[string] . identifier[format] ( identifier[key] ), identifier[params] = identifier[params] ) | def get_project_versions(self, key, expand=None):
"""
Contains a full representation of a the specified project's versions.
:param key:
:param expand: the parameters to expand
:return:
"""
params = {}
if expand is not None:
params['expand'] = expand # depends on [control=['if'], data=['expand']]
return self.get('rest/api/2/project/{}/versions'.format(key), params=params) |
def is_hitachi(dicom_input):
"""
Use this function to detect if a dicom series is a hitachi dataset
:param dicom_input: directory with dicom files for 1 scan of a dicom_header
"""
# read dicom header
header = dicom_input[0]
if 'Manufacturer' not in header or 'Modality' not in header:
return False # we try generic conversion in these cases
# check if Modality is mr
if header.Modality.upper() != 'MR':
return False
# check if manufacturer is hitachi
if 'HITACHI' not in header.Manufacturer.upper():
return False
return True | def function[is_hitachi, parameter[dicom_input]]:
constant[
Use this function to detect if a dicom series is a hitachi dataset
:param dicom_input: directory with dicom files for 1 scan of a dicom_header
]
variable[header] assign[=] call[name[dicom_input]][constant[0]]
if <ast.BoolOp object at 0x7da1b138ddb0> begin[:]
return[constant[False]]
if compare[call[name[header].Modality.upper, parameter[]] not_equal[!=] constant[MR]] begin[:]
return[constant[False]]
if compare[constant[HITACHI] <ast.NotIn object at 0x7da2590d7190> call[name[header].Manufacturer.upper, parameter[]]] begin[:]
return[constant[False]]
return[constant[True]] | keyword[def] identifier[is_hitachi] ( identifier[dicom_input] ):
literal[string]
identifier[header] = identifier[dicom_input] [ literal[int] ]
keyword[if] literal[string] keyword[not] keyword[in] identifier[header] keyword[or] literal[string] keyword[not] keyword[in] identifier[header] :
keyword[return] keyword[False]
keyword[if] identifier[header] . identifier[Modality] . identifier[upper] ()!= literal[string] :
keyword[return] keyword[False]
keyword[if] literal[string] keyword[not] keyword[in] identifier[header] . identifier[Manufacturer] . identifier[upper] ():
keyword[return] keyword[False]
keyword[return] keyword[True] | def is_hitachi(dicom_input):
"""
Use this function to detect if a dicom series is a hitachi dataset
:param dicom_input: directory with dicom files for 1 scan of a dicom_header
"""
# read dicom header
header = dicom_input[0]
if 'Manufacturer' not in header or 'Modality' not in header:
return False # we try generic conversion in these cases # depends on [control=['if'], data=[]]
# check if Modality is mr
if header.Modality.upper() != 'MR':
return False # depends on [control=['if'], data=[]]
# check if manufacturer is hitachi
if 'HITACHI' not in header.Manufacturer.upper():
return False # depends on [control=['if'], data=[]]
return True |
def info(self):
"""
Print information about the annotation file.
:return:
"""
for key, value in self.dataset['info'].items():
print('{}: {}'.format(key, value)) | def function[info, parameter[self]]:
constant[
Print information about the annotation file.
:return:
]
for taget[tuple[[<ast.Name object at 0x7da1b1ef1660>, <ast.Name object at 0x7da1b1ef1930>]]] in starred[call[call[name[self].dataset][constant[info]].items, parameter[]]] begin[:]
call[name[print], parameter[call[constant[{}: {}].format, parameter[name[key], name[value]]]]] | keyword[def] identifier[info] ( identifier[self] ):
literal[string]
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[self] . identifier[dataset] [ literal[string] ]. identifier[items] ():
identifier[print] ( literal[string] . identifier[format] ( identifier[key] , identifier[value] )) | def info(self):
"""
Print information about the annotation file.
:return:
"""
for (key, value) in self.dataset['info'].items():
print('{}: {}'.format(key, value)) # depends on [control=['for'], data=[]] |
def reorder_keys(self, keys):
'''Accepts a :keys: parameter, an iterable of keys in the
desired new order. The :keys: parameter must contain all
existing keys.'''
if len(keys) != len(self._set):
raise ValueError('The supplied number of keys does not match.')
if set(keys) != self._set:
raise ValueError('The supplied keys do not match the current set of keys.')
self._keys = list(keys) | def function[reorder_keys, parameter[self, keys]]:
constant[Accepts a :keys: parameter, an iterable of keys in the
desired new order. The :keys: parameter must contain all
existing keys.]
if compare[call[name[len], parameter[name[keys]]] not_equal[!=] call[name[len], parameter[name[self]._set]]] begin[:]
<ast.Raise object at 0x7da20c6e6020>
if compare[call[name[set], parameter[name[keys]]] not_equal[!=] name[self]._set] begin[:]
<ast.Raise object at 0x7da18f721b40>
name[self]._keys assign[=] call[name[list], parameter[name[keys]]] | keyword[def] identifier[reorder_keys] ( identifier[self] , identifier[keys] ):
literal[string]
keyword[if] identifier[len] ( identifier[keys] )!= identifier[len] ( identifier[self] . identifier[_set] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[set] ( identifier[keys] )!= identifier[self] . identifier[_set] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[self] . identifier[_keys] = identifier[list] ( identifier[keys] ) | def reorder_keys(self, keys):
"""Accepts a :keys: parameter, an iterable of keys in the
desired new order. The :keys: parameter must contain all
existing keys."""
if len(keys) != len(self._set):
raise ValueError('The supplied number of keys does not match.') # depends on [control=['if'], data=[]]
if set(keys) != self._set:
raise ValueError('The supplied keys do not match the current set of keys.') # depends on [control=['if'], data=[]]
self._keys = list(keys) |
def present(
name,
policy_document=None,
policy_document_from_pillars=None,
path=None,
policies=None,
policies_from_pillars=None,
managed_policies=None,
create_instance_profile=True,
region=None,
key=None,
keyid=None,
profile=None,
delete_policies=True):
'''
Ensure the IAM role exists.
name
Name of the IAM role.
policy_document
The policy that grants an entity permission to assume the role.
(See https://boto.readthedocs.io/en/latest/ref/iam.html#boto.iam.connection.IAMConnection.create_role)
policy_document_from_pillars
A pillar key that contains a role policy document. The statements
defined here will be appended with the policy document statements
defined in the policy_document argument.
.. versionadded:: 2017.7.0
path
The path to the role/instance profile.
(See https://boto.readthedocs.io/en/latest/ref/iam.html#boto.iam.connection.IAMConnection.create_role)
policies
A dict of IAM role policies.
policies_from_pillars
A list of pillars that contain role policy dicts. Policies in the
pillars will be merged in the order defined in the list and key
conflicts will be handled by later defined keys overriding earlier
defined keys. The policies defined here will be merged with the
policies defined in the policies argument. If keys conflict, the keys
in the policies argument will override the keys defined in
policies_from_pillars.
managed_policies
A list of (AWS or Customer) managed policies to be attached to the role.
create_instance_profile
A boolean of whether or not to create an instance profile and associate
it with this role.
region
Region to connect to.
key
Secret key to be used.
keyid
Access key to be used.
profile
A dict with region, key and keyid, or a pillar key (string)
that contains a dict with region, key and keyid.
delete_policies
Deletes existing policies that are not in the given list of policies. Default
value is ``True``. If ``False`` is specified, existing policies will not be deleted
allowing manual modifications on the IAM role to be persistent.
.. versionadded:: 2015.8.0
'''
ret = {'name': name, 'result': True, 'comment': '', 'changes': {}}
# Build up _policy_document
_policy_document = {}
if policy_document_from_pillars:
from_pillars = __salt__['pillar.get'](policy_document_from_pillars)
if from_pillars:
_policy_document['Version'] = from_pillars['Version']
_policy_document.setdefault('Statement', [])
_policy_document['Statement'].extend(from_pillars['Statement'])
if policy_document:
_policy_document['Version'] = policy_document['Version']
_policy_document.setdefault('Statement', [])
_policy_document['Statement'].extend(policy_document['Statement'])
_ret = _role_present(name, _policy_document, path, region, key, keyid,
profile)
# Build up _policies
if not policies:
policies = {}
if not policies_from_pillars:
policies_from_pillars = []
if not managed_policies:
managed_policies = []
_policies = {}
for policy in policies_from_pillars:
_policy = __salt__['pillar.get'](policy)
_policies.update(_policy)
_policies.update(policies)
ret['changes'] = _ret['changes']
ret['comment'] = ' '.join([ret['comment'], _ret['comment']])
if not _ret['result']:
ret['result'] = _ret['result']
if ret['result'] is False:
return ret
if create_instance_profile:
_ret = _instance_profile_present(name, region, key, keyid, profile)
ret['changes'] = dictupdate.update(ret['changes'], _ret['changes'])
ret['comment'] = ' '.join([ret['comment'], _ret['comment']])
if not _ret['result']:
ret['result'] = _ret['result']
if ret['result'] is False:
return ret
_ret = _instance_profile_associated(name, region, key, keyid, profile)
ret['changes'] = dictupdate.update(ret['changes'], _ret['changes'])
ret['comment'] = ' '.join([ret['comment'], _ret['comment']])
if not _ret['result']:
ret['result'] = _ret['result']
if ret['result'] is False:
return ret
_ret = _policies_present(name, _policies, region, key, keyid, profile,
delete_policies)
ret['changes'] = dictupdate.update(ret['changes'], _ret['changes'])
ret['comment'] = ' '.join([ret['comment'], _ret['comment']])
if not _ret['result']:
ret['result'] = _ret['result']
_ret = _policies_attached(name, managed_policies, region, key, keyid, profile)
ret['changes'] = dictupdate.update(ret['changes'], _ret['changes'])
ret['comment'] = ' '.join([ret['comment'], _ret['comment']])
if not _ret['result']:
ret['result'] = _ret['result']
return ret | def function[present, parameter[name, policy_document, policy_document_from_pillars, path, policies, policies_from_pillars, managed_policies, create_instance_profile, region, key, keyid, profile, delete_policies]]:
constant[
Ensure the IAM role exists.
name
Name of the IAM role.
policy_document
The policy that grants an entity permission to assume the role.
(See https://boto.readthedocs.io/en/latest/ref/iam.html#boto.iam.connection.IAMConnection.create_role)
policy_document_from_pillars
A pillar key that contains a role policy document. The statements
defined here will be appended with the policy document statements
defined in the policy_document argument.
.. versionadded:: 2017.7.0
path
The path to the role/instance profile.
(See https://boto.readthedocs.io/en/latest/ref/iam.html#boto.iam.connection.IAMConnection.create_role)
policies
A dict of IAM role policies.
policies_from_pillars
A list of pillars that contain role policy dicts. Policies in the
pillars will be merged in the order defined in the list and key
conflicts will be handled by later defined keys overriding earlier
defined keys. The policies defined here will be merged with the
policies defined in the policies argument. If keys conflict, the keys
in the policies argument will override the keys defined in
policies_from_pillars.
managed_policies
A list of (AWS or Customer) managed policies to be attached to the role.
create_instance_profile
A boolean of whether or not to create an instance profile and associate
it with this role.
region
Region to connect to.
key
Secret key to be used.
keyid
Access key to be used.
profile
A dict with region, key and keyid, or a pillar key (string)
that contains a dict with region, key and keyid.
delete_policies
Deletes existing policies that are not in the given list of policies. Default
value is ``True``. If ``False`` is specified, existing policies will not be deleted
allowing manual modifications on the IAM role to be persistent.
.. versionadded:: 2015.8.0
]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b2184190>, <ast.Constant object at 0x7da1b2186890>, <ast.Constant object at 0x7da1b2184070>, <ast.Constant object at 0x7da1b21850f0>], [<ast.Name object at 0x7da1b2186fb0>, <ast.Constant object at 0x7da1b2185420>, <ast.Constant object at 0x7da1b2185510>, <ast.Dict object at 0x7da1b21854e0>]]
variable[_policy_document] assign[=] dictionary[[], []]
if name[policy_document_from_pillars] begin[:]
variable[from_pillars] assign[=] call[call[name[__salt__]][constant[pillar.get]], parameter[name[policy_document_from_pillars]]]
if name[from_pillars] begin[:]
call[name[_policy_document]][constant[Version]] assign[=] call[name[from_pillars]][constant[Version]]
call[name[_policy_document].setdefault, parameter[constant[Statement], list[[]]]]
call[call[name[_policy_document]][constant[Statement]].extend, parameter[call[name[from_pillars]][constant[Statement]]]]
if name[policy_document] begin[:]
call[name[_policy_document]][constant[Version]] assign[=] call[name[policy_document]][constant[Version]]
call[name[_policy_document].setdefault, parameter[constant[Statement], list[[]]]]
call[call[name[_policy_document]][constant[Statement]].extend, parameter[call[name[policy_document]][constant[Statement]]]]
variable[_ret] assign[=] call[name[_role_present], parameter[name[name], name[_policy_document], name[path], name[region], name[key], name[keyid], name[profile]]]
if <ast.UnaryOp object at 0x7da1b2088b20> begin[:]
variable[policies] assign[=] dictionary[[], []]
if <ast.UnaryOp object at 0x7da2044c1870> begin[:]
variable[policies_from_pillars] assign[=] list[[]]
if <ast.UnaryOp object at 0x7da1b2184d30> begin[:]
variable[managed_policies] assign[=] list[[]]
variable[_policies] assign[=] dictionary[[], []]
for taget[name[policy]] in starred[name[policies_from_pillars]] begin[:]
variable[_policy] assign[=] call[call[name[__salt__]][constant[pillar.get]], parameter[name[policy]]]
call[name[_policies].update, parameter[name[_policy]]]
call[name[_policies].update, parameter[name[policies]]]
call[name[ret]][constant[changes]] assign[=] call[name[_ret]][constant[changes]]
call[name[ret]][constant[comment]] assign[=] call[constant[ ].join, parameter[list[[<ast.Subscript object at 0x7da1b2185c60>, <ast.Subscript object at 0x7da1b2187880>]]]]
if <ast.UnaryOp object at 0x7da1b21844c0> begin[:]
call[name[ret]][constant[result]] assign[=] call[name[_ret]][constant[result]]
if compare[call[name[ret]][constant[result]] is constant[False]] begin[:]
return[name[ret]]
if name[create_instance_profile] begin[:]
variable[_ret] assign[=] call[name[_instance_profile_present], parameter[name[name], name[region], name[key], name[keyid], name[profile]]]
call[name[ret]][constant[changes]] assign[=] call[name[dictupdate].update, parameter[call[name[ret]][constant[changes]], call[name[_ret]][constant[changes]]]]
call[name[ret]][constant[comment]] assign[=] call[constant[ ].join, parameter[list[[<ast.Subscript object at 0x7da1b2184310>, <ast.Subscript object at 0x7da1b2185870>]]]]
if <ast.UnaryOp object at 0x7da1b2187550> begin[:]
call[name[ret]][constant[result]] assign[=] call[name[_ret]][constant[result]]
if compare[call[name[ret]][constant[result]] is constant[False]] begin[:]
return[name[ret]]
variable[_ret] assign[=] call[name[_instance_profile_associated], parameter[name[name], name[region], name[key], name[keyid], name[profile]]]
call[name[ret]][constant[changes]] assign[=] call[name[dictupdate].update, parameter[call[name[ret]][constant[changes]], call[name[_ret]][constant[changes]]]]
call[name[ret]][constant[comment]] assign[=] call[constant[ ].join, parameter[list[[<ast.Subscript object at 0x7da204565db0>, <ast.Subscript object at 0x7da204564610>]]]]
if <ast.UnaryOp object at 0x7da204621840> begin[:]
call[name[ret]][constant[result]] assign[=] call[name[_ret]][constant[result]]
if compare[call[name[ret]][constant[result]] is constant[False]] begin[:]
return[name[ret]]
variable[_ret] assign[=] call[name[_policies_present], parameter[name[name], name[_policies], name[region], name[key], name[keyid], name[profile], name[delete_policies]]]
call[name[ret]][constant[changes]] assign[=] call[name[dictupdate].update, parameter[call[name[ret]][constant[changes]], call[name[_ret]][constant[changes]]]]
call[name[ret]][constant[comment]] assign[=] call[constant[ ].join, parameter[list[[<ast.Subscript object at 0x7da204623d90>, <ast.Subscript object at 0x7da204620dc0>]]]]
if <ast.UnaryOp object at 0x7da204620b50> begin[:]
call[name[ret]][constant[result]] assign[=] call[name[_ret]][constant[result]]
variable[_ret] assign[=] call[name[_policies_attached], parameter[name[name], name[managed_policies], name[region], name[key], name[keyid], name[profile]]]
call[name[ret]][constant[changes]] assign[=] call[name[dictupdate].update, parameter[call[name[ret]][constant[changes]], call[name[_ret]][constant[changes]]]]
call[name[ret]][constant[comment]] assign[=] call[constant[ ].join, parameter[list[[<ast.Subscript object at 0x7da20c7c9420>, <ast.Subscript object at 0x7da20c7c8580>]]]]
if <ast.UnaryOp object at 0x7da20c7c8490> begin[:]
call[name[ret]][constant[result]] assign[=] call[name[_ret]][constant[result]]
return[name[ret]] | keyword[def] identifier[present] (
identifier[name] ,
identifier[policy_document] = keyword[None] ,
identifier[policy_document_from_pillars] = keyword[None] ,
identifier[path] = keyword[None] ,
identifier[policies] = keyword[None] ,
identifier[policies_from_pillars] = keyword[None] ,
identifier[managed_policies] = keyword[None] ,
identifier[create_instance_profile] = keyword[True] ,
identifier[region] = keyword[None] ,
identifier[key] = keyword[None] ,
identifier[keyid] = keyword[None] ,
identifier[profile] = keyword[None] ,
identifier[delete_policies] = keyword[True] ):
literal[string]
identifier[ret] ={ literal[string] : identifier[name] , literal[string] : keyword[True] , literal[string] : literal[string] , literal[string] :{}}
identifier[_policy_document] ={}
keyword[if] identifier[policy_document_from_pillars] :
identifier[from_pillars] = identifier[__salt__] [ literal[string] ]( identifier[policy_document_from_pillars] )
keyword[if] identifier[from_pillars] :
identifier[_policy_document] [ literal[string] ]= identifier[from_pillars] [ literal[string] ]
identifier[_policy_document] . identifier[setdefault] ( literal[string] ,[])
identifier[_policy_document] [ literal[string] ]. identifier[extend] ( identifier[from_pillars] [ literal[string] ])
keyword[if] identifier[policy_document] :
identifier[_policy_document] [ literal[string] ]= identifier[policy_document] [ literal[string] ]
identifier[_policy_document] . identifier[setdefault] ( literal[string] ,[])
identifier[_policy_document] [ literal[string] ]. identifier[extend] ( identifier[policy_document] [ literal[string] ])
identifier[_ret] = identifier[_role_present] ( identifier[name] , identifier[_policy_document] , identifier[path] , identifier[region] , identifier[key] , identifier[keyid] ,
identifier[profile] )
keyword[if] keyword[not] identifier[policies] :
identifier[policies] ={}
keyword[if] keyword[not] identifier[policies_from_pillars] :
identifier[policies_from_pillars] =[]
keyword[if] keyword[not] identifier[managed_policies] :
identifier[managed_policies] =[]
identifier[_policies] ={}
keyword[for] identifier[policy] keyword[in] identifier[policies_from_pillars] :
identifier[_policy] = identifier[__salt__] [ literal[string] ]( identifier[policy] )
identifier[_policies] . identifier[update] ( identifier[_policy] )
identifier[_policies] . identifier[update] ( identifier[policies] )
identifier[ret] [ literal[string] ]= identifier[_ret] [ literal[string] ]
identifier[ret] [ literal[string] ]= literal[string] . identifier[join] ([ identifier[ret] [ literal[string] ], identifier[_ret] [ literal[string] ]])
keyword[if] keyword[not] identifier[_ret] [ literal[string] ]:
identifier[ret] [ literal[string] ]= identifier[_ret] [ literal[string] ]
keyword[if] identifier[ret] [ literal[string] ] keyword[is] keyword[False] :
keyword[return] identifier[ret]
keyword[if] identifier[create_instance_profile] :
identifier[_ret] = identifier[_instance_profile_present] ( identifier[name] , identifier[region] , identifier[key] , identifier[keyid] , identifier[profile] )
identifier[ret] [ literal[string] ]= identifier[dictupdate] . identifier[update] ( identifier[ret] [ literal[string] ], identifier[_ret] [ literal[string] ])
identifier[ret] [ literal[string] ]= literal[string] . identifier[join] ([ identifier[ret] [ literal[string] ], identifier[_ret] [ literal[string] ]])
keyword[if] keyword[not] identifier[_ret] [ literal[string] ]:
identifier[ret] [ literal[string] ]= identifier[_ret] [ literal[string] ]
keyword[if] identifier[ret] [ literal[string] ] keyword[is] keyword[False] :
keyword[return] identifier[ret]
identifier[_ret] = identifier[_instance_profile_associated] ( identifier[name] , identifier[region] , identifier[key] , identifier[keyid] , identifier[profile] )
identifier[ret] [ literal[string] ]= identifier[dictupdate] . identifier[update] ( identifier[ret] [ literal[string] ], identifier[_ret] [ literal[string] ])
identifier[ret] [ literal[string] ]= literal[string] . identifier[join] ([ identifier[ret] [ literal[string] ], identifier[_ret] [ literal[string] ]])
keyword[if] keyword[not] identifier[_ret] [ literal[string] ]:
identifier[ret] [ literal[string] ]= identifier[_ret] [ literal[string] ]
keyword[if] identifier[ret] [ literal[string] ] keyword[is] keyword[False] :
keyword[return] identifier[ret]
identifier[_ret] = identifier[_policies_present] ( identifier[name] , identifier[_policies] , identifier[region] , identifier[key] , identifier[keyid] , identifier[profile] ,
identifier[delete_policies] )
identifier[ret] [ literal[string] ]= identifier[dictupdate] . identifier[update] ( identifier[ret] [ literal[string] ], identifier[_ret] [ literal[string] ])
identifier[ret] [ literal[string] ]= literal[string] . identifier[join] ([ identifier[ret] [ literal[string] ], identifier[_ret] [ literal[string] ]])
keyword[if] keyword[not] identifier[_ret] [ literal[string] ]:
identifier[ret] [ literal[string] ]= identifier[_ret] [ literal[string] ]
identifier[_ret] = identifier[_policies_attached] ( identifier[name] , identifier[managed_policies] , identifier[region] , identifier[key] , identifier[keyid] , identifier[profile] )
identifier[ret] [ literal[string] ]= identifier[dictupdate] . identifier[update] ( identifier[ret] [ literal[string] ], identifier[_ret] [ literal[string] ])
identifier[ret] [ literal[string] ]= literal[string] . identifier[join] ([ identifier[ret] [ literal[string] ], identifier[_ret] [ literal[string] ]])
keyword[if] keyword[not] identifier[_ret] [ literal[string] ]:
identifier[ret] [ literal[string] ]= identifier[_ret] [ literal[string] ]
keyword[return] identifier[ret] | def present(name, policy_document=None, policy_document_from_pillars=None, path=None, policies=None, policies_from_pillars=None, managed_policies=None, create_instance_profile=True, region=None, key=None, keyid=None, profile=None, delete_policies=True):
"""
Ensure the IAM role exists.
name
Name of the IAM role.
policy_document
The policy that grants an entity permission to assume the role.
(See https://boto.readthedocs.io/en/latest/ref/iam.html#boto.iam.connection.IAMConnection.create_role)
policy_document_from_pillars
A pillar key that contains a role policy document. The statements
defined here will be appended with the policy document statements
defined in the policy_document argument.
.. versionadded:: 2017.7.0
path
The path to the role/instance profile.
(See https://boto.readthedocs.io/en/latest/ref/iam.html#boto.iam.connection.IAMConnection.create_role)
policies
A dict of IAM role policies.
policies_from_pillars
A list of pillars that contain role policy dicts. Policies in the
pillars will be merged in the order defined in the list and key
conflicts will be handled by later defined keys overriding earlier
defined keys. The policies defined here will be merged with the
policies defined in the policies argument. If keys conflict, the keys
in the policies argument will override the keys defined in
policies_from_pillars.
managed_policies
A list of (AWS or Customer) managed policies to be attached to the role.
create_instance_profile
A boolean of whether or not to create an instance profile and associate
it with this role.
region
Region to connect to.
key
Secret key to be used.
keyid
Access key to be used.
profile
A dict with region, key and keyid, or a pillar key (string)
that contains a dict with region, key and keyid.
delete_policies
Deletes existing policies that are not in the given list of policies. Default
value is ``True``. If ``False`` is specified, existing policies will not be deleted
allowing manual modifications on the IAM role to be persistent.
.. versionadded:: 2015.8.0
"""
ret = {'name': name, 'result': True, 'comment': '', 'changes': {}}
# Build up _policy_document
_policy_document = {}
if policy_document_from_pillars:
from_pillars = __salt__['pillar.get'](policy_document_from_pillars)
if from_pillars:
_policy_document['Version'] = from_pillars['Version']
_policy_document.setdefault('Statement', [])
_policy_document['Statement'].extend(from_pillars['Statement']) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if policy_document:
_policy_document['Version'] = policy_document['Version']
_policy_document.setdefault('Statement', [])
_policy_document['Statement'].extend(policy_document['Statement']) # depends on [control=['if'], data=[]]
_ret = _role_present(name, _policy_document, path, region, key, keyid, profile)
# Build up _policies
if not policies:
policies = {} # depends on [control=['if'], data=[]]
if not policies_from_pillars:
policies_from_pillars = [] # depends on [control=['if'], data=[]]
if not managed_policies:
managed_policies = [] # depends on [control=['if'], data=[]]
_policies = {}
for policy in policies_from_pillars:
_policy = __salt__['pillar.get'](policy)
_policies.update(_policy) # depends on [control=['for'], data=['policy']]
_policies.update(policies)
ret['changes'] = _ret['changes']
ret['comment'] = ' '.join([ret['comment'], _ret['comment']])
if not _ret['result']:
ret['result'] = _ret['result']
if ret['result'] is False:
return ret # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if create_instance_profile:
_ret = _instance_profile_present(name, region, key, keyid, profile)
ret['changes'] = dictupdate.update(ret['changes'], _ret['changes'])
ret['comment'] = ' '.join([ret['comment'], _ret['comment']])
if not _ret['result']:
ret['result'] = _ret['result']
if ret['result'] is False:
return ret # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
_ret = _instance_profile_associated(name, region, key, keyid, profile)
ret['changes'] = dictupdate.update(ret['changes'], _ret['changes'])
ret['comment'] = ' '.join([ret['comment'], _ret['comment']])
if not _ret['result']:
ret['result'] = _ret['result']
if ret['result'] is False:
return ret # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
_ret = _policies_present(name, _policies, region, key, keyid, profile, delete_policies)
ret['changes'] = dictupdate.update(ret['changes'], _ret['changes'])
ret['comment'] = ' '.join([ret['comment'], _ret['comment']])
if not _ret['result']:
ret['result'] = _ret['result'] # depends on [control=['if'], data=[]]
_ret = _policies_attached(name, managed_policies, region, key, keyid, profile)
ret['changes'] = dictupdate.update(ret['changes'], _ret['changes'])
ret['comment'] = ' '.join([ret['comment'], _ret['comment']])
if not _ret['result']:
ret['result'] = _ret['result'] # depends on [control=['if'], data=[]]
return ret |
def findViewWithAttributeThatMatches(self, attr, regex, root="ROOT"):
'''
Finds the list of Views with the specified attribute matching
regex
'''
return self.__findViewWithAttributeInTreeThatMatches(attr, regex, root) | def function[findViewWithAttributeThatMatches, parameter[self, attr, regex, root]]:
constant[
Finds the list of Views with the specified attribute matching
regex
]
return[call[name[self].__findViewWithAttributeInTreeThatMatches, parameter[name[attr], name[regex], name[root]]]] | keyword[def] identifier[findViewWithAttributeThatMatches] ( identifier[self] , identifier[attr] , identifier[regex] , identifier[root] = literal[string] ):
literal[string]
keyword[return] identifier[self] . identifier[__findViewWithAttributeInTreeThatMatches] ( identifier[attr] , identifier[regex] , identifier[root] ) | def findViewWithAttributeThatMatches(self, attr, regex, root='ROOT'):
"""
Finds the list of Views with the specified attribute matching
regex
"""
return self.__findViewWithAttributeInTreeThatMatches(attr, regex, root) |
def pipelines(self):
"""
Property for accessing :class:`PipelineManager` instance, which is used to manage pipelines.
:rtype: yagocd.resources.pipeline.PipelineManager
"""
if self._pipeline_manager is None:
self._pipeline_manager = PipelineManager(session=self._session)
return self._pipeline_manager | def function[pipelines, parameter[self]]:
constant[
Property for accessing :class:`PipelineManager` instance, which is used to manage pipelines.
:rtype: yagocd.resources.pipeline.PipelineManager
]
if compare[name[self]._pipeline_manager is constant[None]] begin[:]
name[self]._pipeline_manager assign[=] call[name[PipelineManager], parameter[]]
return[name[self]._pipeline_manager] | keyword[def] identifier[pipelines] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_pipeline_manager] keyword[is] keyword[None] :
identifier[self] . identifier[_pipeline_manager] = identifier[PipelineManager] ( identifier[session] = identifier[self] . identifier[_session] )
keyword[return] identifier[self] . identifier[_pipeline_manager] | def pipelines(self):
"""
Property for accessing :class:`PipelineManager` instance, which is used to manage pipelines.
:rtype: yagocd.resources.pipeline.PipelineManager
"""
if self._pipeline_manager is None:
self._pipeline_manager = PipelineManager(session=self._session) # depends on [control=['if'], data=[]]
return self._pipeline_manager |
def get(which):
"DEPRECATED; see :func:`~skyfield.data.hipparcos.load_dataframe() instead."
if isinstance(which, str):
pattern = ('H| %6s' % which).encode('ascii')
for star in load(lambda line: line.startswith(pattern)):
return star
else:
patterns = set(id.encode('ascii').rjust(6) for id in which)
return list(load(lambda line: line[8:14] in patterns)) | def function[get, parameter[which]]:
constant[DEPRECATED; see :func:`~skyfield.data.hipparcos.load_dataframe() instead.]
if call[name[isinstance], parameter[name[which], name[str]]] begin[:]
variable[pattern] assign[=] call[binary_operation[constant[H| %6s] <ast.Mod object at 0x7da2590d6920> name[which]].encode, parameter[constant[ascii]]]
for taget[name[star]] in starred[call[name[load], parameter[<ast.Lambda object at 0x7da1b17f9360>]]] begin[:]
return[name[star]] | keyword[def] identifier[get] ( identifier[which] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[which] , identifier[str] ):
identifier[pattern] =( literal[string] % identifier[which] ). identifier[encode] ( literal[string] )
keyword[for] identifier[star] keyword[in] identifier[load] ( keyword[lambda] identifier[line] : identifier[line] . identifier[startswith] ( identifier[pattern] )):
keyword[return] identifier[star]
keyword[else] :
identifier[patterns] = identifier[set] ( identifier[id] . identifier[encode] ( literal[string] ). identifier[rjust] ( literal[int] ) keyword[for] identifier[id] keyword[in] identifier[which] )
keyword[return] identifier[list] ( identifier[load] ( keyword[lambda] identifier[line] : identifier[line] [ literal[int] : literal[int] ] keyword[in] identifier[patterns] )) | def get(which):
"""DEPRECATED; see :func:`~skyfield.data.hipparcos.load_dataframe() instead."""
if isinstance(which, str):
pattern = ('H| %6s' % which).encode('ascii')
for star in load(lambda line: line.startswith(pattern)):
return star # depends on [control=['for'], data=['star']] # depends on [control=['if'], data=[]]
else:
patterns = set((id.encode('ascii').rjust(6) for id in which))
return list(load(lambda line: line[8:14] in patterns)) |
def chooseForm_slot(self, element, element_old):
"""Calling this slot chooses the form to be shown
:param element: an object that has *_id* and *classname* attributes
:param element_old: an object that has *_id* and *classname* attributes
This slot is typically connected to List classes, widget attribute's, currentItemChanged method (List.widget is QListWidget that has currentItemChanged slot), so the element and element_old parameters are QListWidgetItem instances with extra attributes "_id" and "_classname" attached.
Queries the database for element._id
"""
self.current_slot = None
if (verbose):
# enable this if you're unsure what's coming here..
print(self.pre, "chooseForm_slot :", element)
if (isinstance(element, type(None))):
self.current_row = None
self.element = None
else:
# print(self.pre,"chooseForm_slot :",element)
assert(hasattr(element, "_id"))
assert(hasattr(element, "classname"))
try:
self.current_row = self.row_instance_by_name[element.classname]
except KeyError:
print(
self.pre,
"chooseForm_slot : no such classname for this FormSet : ",
element.classname)
self.current_row = None
self.element = None
else:
self.resetForm()
self.current_row.get(self.collection, element._id)
self.element = element
self.current_slot = self.current_row.get_column_value("slot")
self.showCurrent() | def function[chooseForm_slot, parameter[self, element, element_old]]:
constant[Calling this slot chooses the form to be shown
:param element: an object that has *_id* and *classname* attributes
:param element_old: an object that has *_id* and *classname* attributes
This slot is typically connected to List classes, widget attribute's, currentItemChanged method (List.widget is QListWidget that has currentItemChanged slot), so the element and element_old parameters are QListWidgetItem instances with extra attributes "_id" and "_classname" attached.
Queries the database for element._id
]
name[self].current_slot assign[=] constant[None]
if name[verbose] begin[:]
call[name[print], parameter[name[self].pre, constant[chooseForm_slot :], name[element]]]
if call[name[isinstance], parameter[name[element], call[name[type], parameter[constant[None]]]]] begin[:]
name[self].current_row assign[=] constant[None]
name[self].element assign[=] constant[None]
call[name[self].showCurrent, parameter[]] | keyword[def] identifier[chooseForm_slot] ( identifier[self] , identifier[element] , identifier[element_old] ):
literal[string]
identifier[self] . identifier[current_slot] = keyword[None]
keyword[if] ( identifier[verbose] ):
identifier[print] ( identifier[self] . identifier[pre] , literal[string] , identifier[element] )
keyword[if] ( identifier[isinstance] ( identifier[element] , identifier[type] ( keyword[None] ))):
identifier[self] . identifier[current_row] = keyword[None]
identifier[self] . identifier[element] = keyword[None]
keyword[else] :
keyword[assert] ( identifier[hasattr] ( identifier[element] , literal[string] ))
keyword[assert] ( identifier[hasattr] ( identifier[element] , literal[string] ))
keyword[try] :
identifier[self] . identifier[current_row] = identifier[self] . identifier[row_instance_by_name] [ identifier[element] . identifier[classname] ]
keyword[except] identifier[KeyError] :
identifier[print] (
identifier[self] . identifier[pre] ,
literal[string] ,
identifier[element] . identifier[classname] )
identifier[self] . identifier[current_row] = keyword[None]
identifier[self] . identifier[element] = keyword[None]
keyword[else] :
identifier[self] . identifier[resetForm] ()
identifier[self] . identifier[current_row] . identifier[get] ( identifier[self] . identifier[collection] , identifier[element] . identifier[_id] )
identifier[self] . identifier[element] = identifier[element]
identifier[self] . identifier[current_slot] = identifier[self] . identifier[current_row] . identifier[get_column_value] ( literal[string] )
identifier[self] . identifier[showCurrent] () | def chooseForm_slot(self, element, element_old):
"""Calling this slot chooses the form to be shown
:param element: an object that has *_id* and *classname* attributes
:param element_old: an object that has *_id* and *classname* attributes
This slot is typically connected to List classes, widget attribute's, currentItemChanged method (List.widget is QListWidget that has currentItemChanged slot), so the element and element_old parameters are QListWidgetItem instances with extra attributes "_id" and "_classname" attached.
Queries the database for element._id
"""
self.current_slot = None
if verbose:
# enable this if you're unsure what's coming here..
print(self.pre, 'chooseForm_slot :', element) # depends on [control=['if'], data=[]]
if isinstance(element, type(None)):
self.current_row = None
self.element = None # depends on [control=['if'], data=[]]
else:
# print(self.pre,"chooseForm_slot :",element)
assert hasattr(element, '_id')
assert hasattr(element, 'classname')
try:
self.current_row = self.row_instance_by_name[element.classname] # depends on [control=['try'], data=[]]
except KeyError:
print(self.pre, 'chooseForm_slot : no such classname for this FormSet : ', element.classname)
self.current_row = None
self.element = None # depends on [control=['except'], data=[]]
else:
self.resetForm()
self.current_row.get(self.collection, element._id)
self.element = element
self.current_slot = self.current_row.get_column_value('slot')
self.showCurrent() |
def derivative(self, rate):
"""Get the instantaneous quaternion derivative representing a quaternion rotating at a 3D rate vector `rate`
Params:
rate: numpy 3-array (or array-like) describing rotation rates about the global x, y and z axes respectively.
Returns:
A unit quaternion describing the rotation rate
"""
rate = self._validate_number_sequence(rate, 3)
return 0.5 * self * Quaternion(vector=rate) | def function[derivative, parameter[self, rate]]:
constant[Get the instantaneous quaternion derivative representing a quaternion rotating at a 3D rate vector `rate`
Params:
rate: numpy 3-array (or array-like) describing rotation rates about the global x, y and z axes respectively.
Returns:
A unit quaternion describing the rotation rate
]
variable[rate] assign[=] call[name[self]._validate_number_sequence, parameter[name[rate], constant[3]]]
return[binary_operation[binary_operation[constant[0.5] * name[self]] * call[name[Quaternion], parameter[]]]] | keyword[def] identifier[derivative] ( identifier[self] , identifier[rate] ):
literal[string]
identifier[rate] = identifier[self] . identifier[_validate_number_sequence] ( identifier[rate] , literal[int] )
keyword[return] literal[int] * identifier[self] * identifier[Quaternion] ( identifier[vector] = identifier[rate] ) | def derivative(self, rate):
"""Get the instantaneous quaternion derivative representing a quaternion rotating at a 3D rate vector `rate`
Params:
rate: numpy 3-array (or array-like) describing rotation rates about the global x, y and z axes respectively.
Returns:
A unit quaternion describing the rotation rate
"""
rate = self._validate_number_sequence(rate, 3)
return 0.5 * self * Quaternion(vector=rate) |
def _create_rubber_bands_action(self):
"""Create action for toggling rubber bands."""
icon = resources_path('img', 'icons', 'toggle-rubber-bands.svg')
self.action_toggle_rubberbands = QAction(
QIcon(icon),
self.tr('Toggle Scenario Outlines'), self.iface.mainWindow())
message = self.tr('Toggle rubber bands showing scenario extents.')
self.action_toggle_rubberbands.setStatusTip(message)
self.action_toggle_rubberbands.setWhatsThis(message)
# Set initial state
self.action_toggle_rubberbands.setCheckable(True)
flag = setting('showRubberBands', False, expected_type=bool)
self.action_toggle_rubberbands.setChecked(flag)
# noinspection PyUnresolvedReferences
self.action_toggle_rubberbands.triggered.connect(
self.dock_widget.toggle_rubber_bands)
self.add_action(self.action_toggle_rubberbands) | def function[_create_rubber_bands_action, parameter[self]]:
constant[Create action for toggling rubber bands.]
variable[icon] assign[=] call[name[resources_path], parameter[constant[img], constant[icons], constant[toggle-rubber-bands.svg]]]
name[self].action_toggle_rubberbands assign[=] call[name[QAction], parameter[call[name[QIcon], parameter[name[icon]]], call[name[self].tr, parameter[constant[Toggle Scenario Outlines]]], call[name[self].iface.mainWindow, parameter[]]]]
variable[message] assign[=] call[name[self].tr, parameter[constant[Toggle rubber bands showing scenario extents.]]]
call[name[self].action_toggle_rubberbands.setStatusTip, parameter[name[message]]]
call[name[self].action_toggle_rubberbands.setWhatsThis, parameter[name[message]]]
call[name[self].action_toggle_rubberbands.setCheckable, parameter[constant[True]]]
variable[flag] assign[=] call[name[setting], parameter[constant[showRubberBands], constant[False]]]
call[name[self].action_toggle_rubberbands.setChecked, parameter[name[flag]]]
call[name[self].action_toggle_rubberbands.triggered.connect, parameter[name[self].dock_widget.toggle_rubber_bands]]
call[name[self].add_action, parameter[name[self].action_toggle_rubberbands]] | keyword[def] identifier[_create_rubber_bands_action] ( identifier[self] ):
literal[string]
identifier[icon] = identifier[resources_path] ( literal[string] , literal[string] , literal[string] )
identifier[self] . identifier[action_toggle_rubberbands] = identifier[QAction] (
identifier[QIcon] ( identifier[icon] ),
identifier[self] . identifier[tr] ( literal[string] ), identifier[self] . identifier[iface] . identifier[mainWindow] ())
identifier[message] = identifier[self] . identifier[tr] ( literal[string] )
identifier[self] . identifier[action_toggle_rubberbands] . identifier[setStatusTip] ( identifier[message] )
identifier[self] . identifier[action_toggle_rubberbands] . identifier[setWhatsThis] ( identifier[message] )
identifier[self] . identifier[action_toggle_rubberbands] . identifier[setCheckable] ( keyword[True] )
identifier[flag] = identifier[setting] ( literal[string] , keyword[False] , identifier[expected_type] = identifier[bool] )
identifier[self] . identifier[action_toggle_rubberbands] . identifier[setChecked] ( identifier[flag] )
identifier[self] . identifier[action_toggle_rubberbands] . identifier[triggered] . identifier[connect] (
identifier[self] . identifier[dock_widget] . identifier[toggle_rubber_bands] )
identifier[self] . identifier[add_action] ( identifier[self] . identifier[action_toggle_rubberbands] ) | def _create_rubber_bands_action(self):
"""Create action for toggling rubber bands."""
icon = resources_path('img', 'icons', 'toggle-rubber-bands.svg')
self.action_toggle_rubberbands = QAction(QIcon(icon), self.tr('Toggle Scenario Outlines'), self.iface.mainWindow())
message = self.tr('Toggle rubber bands showing scenario extents.')
self.action_toggle_rubberbands.setStatusTip(message)
self.action_toggle_rubberbands.setWhatsThis(message)
# Set initial state
self.action_toggle_rubberbands.setCheckable(True)
flag = setting('showRubberBands', False, expected_type=bool)
self.action_toggle_rubberbands.setChecked(flag)
# noinspection PyUnresolvedReferences
self.action_toggle_rubberbands.triggered.connect(self.dock_widget.toggle_rubber_bands)
self.add_action(self.action_toggle_rubberbands) |
async def on_raw_313(self, message):
""" WHOIS operator info. """
target, nickname = message.params[:2]
info = {
'oper': True
}
if nickname in self._pending['whois']:
self._whois_info[nickname].update(info) | <ast.AsyncFunctionDef object at 0x7da1b2347cd0> | keyword[async] keyword[def] identifier[on_raw_313] ( identifier[self] , identifier[message] ):
literal[string]
identifier[target] , identifier[nickname] = identifier[message] . identifier[params] [: literal[int] ]
identifier[info] ={
literal[string] : keyword[True]
}
keyword[if] identifier[nickname] keyword[in] identifier[self] . identifier[_pending] [ literal[string] ]:
identifier[self] . identifier[_whois_info] [ identifier[nickname] ]. identifier[update] ( identifier[info] ) | async def on_raw_313(self, message):
""" WHOIS operator info. """
(target, nickname) = message.params[:2]
info = {'oper': True}
if nickname in self._pending['whois']:
self._whois_info[nickname].update(info) # depends on [control=['if'], data=['nickname']] |
def _reconstruct_matrix(data_list):
"""Reconstructs a matrix from a list containing sparse matrix extracted properties
`data_list` needs to be formatted as the first result of
:func:`~pypet.parameter.SparseParameter._serialize_matrix`
"""
matrix_format = data_list[0]
data = data_list[1]
is_empty = isinstance(data, str) and data == '__empty__'
if matrix_format == 'csc':
if is_empty:
return spsp.csc_matrix(data_list[4])
else:
return spsp.csc_matrix(tuple(data_list[1:4]), shape=data_list[4])
elif matrix_format == 'csr':
if is_empty:
return spsp.csr_matrix(data_list[4])
else:
return spsp.csr_matrix(tuple(data_list[1:4]), shape=data_list[4])
elif matrix_format == 'bsr':
if is_empty:
# We have an empty matrix, that cannot be build as in elee case
return spsp.bsr_matrix(data_list[4])
else:
return spsp.bsr_matrix(tuple(data_list[1:4]), shape=data_list[4])
elif matrix_format == 'dia':
if is_empty:
return spsp.dia_matrix(data_list[3])
else:
return spsp.dia_matrix(tuple(data_list[1:3]), shape=data_list[3])
else:
raise RuntimeError('You shall not pass!') | def function[_reconstruct_matrix, parameter[data_list]]:
constant[Reconstructs a matrix from a list containing sparse matrix extracted properties
`data_list` needs to be formatted as the first result of
:func:`~pypet.parameter.SparseParameter._serialize_matrix`
]
variable[matrix_format] assign[=] call[name[data_list]][constant[0]]
variable[data] assign[=] call[name[data_list]][constant[1]]
variable[is_empty] assign[=] <ast.BoolOp object at 0x7da1b0355a50>
if compare[name[matrix_format] equal[==] constant[csc]] begin[:]
if name[is_empty] begin[:]
return[call[name[spsp].csc_matrix, parameter[call[name[data_list]][constant[4]]]]] | keyword[def] identifier[_reconstruct_matrix] ( identifier[data_list] ):
literal[string]
identifier[matrix_format] = identifier[data_list] [ literal[int] ]
identifier[data] = identifier[data_list] [ literal[int] ]
identifier[is_empty] = identifier[isinstance] ( identifier[data] , identifier[str] ) keyword[and] identifier[data] == literal[string]
keyword[if] identifier[matrix_format] == literal[string] :
keyword[if] identifier[is_empty] :
keyword[return] identifier[spsp] . identifier[csc_matrix] ( identifier[data_list] [ literal[int] ])
keyword[else] :
keyword[return] identifier[spsp] . identifier[csc_matrix] ( identifier[tuple] ( identifier[data_list] [ literal[int] : literal[int] ]), identifier[shape] = identifier[data_list] [ literal[int] ])
keyword[elif] identifier[matrix_format] == literal[string] :
keyword[if] identifier[is_empty] :
keyword[return] identifier[spsp] . identifier[csr_matrix] ( identifier[data_list] [ literal[int] ])
keyword[else] :
keyword[return] identifier[spsp] . identifier[csr_matrix] ( identifier[tuple] ( identifier[data_list] [ literal[int] : literal[int] ]), identifier[shape] = identifier[data_list] [ literal[int] ])
keyword[elif] identifier[matrix_format] == literal[string] :
keyword[if] identifier[is_empty] :
keyword[return] identifier[spsp] . identifier[bsr_matrix] ( identifier[data_list] [ literal[int] ])
keyword[else] :
keyword[return] identifier[spsp] . identifier[bsr_matrix] ( identifier[tuple] ( identifier[data_list] [ literal[int] : literal[int] ]), identifier[shape] = identifier[data_list] [ literal[int] ])
keyword[elif] identifier[matrix_format] == literal[string] :
keyword[if] identifier[is_empty] :
keyword[return] identifier[spsp] . identifier[dia_matrix] ( identifier[data_list] [ literal[int] ])
keyword[else] :
keyword[return] identifier[spsp] . identifier[dia_matrix] ( identifier[tuple] ( identifier[data_list] [ literal[int] : literal[int] ]), identifier[shape] = identifier[data_list] [ literal[int] ])
keyword[else] :
keyword[raise] identifier[RuntimeError] ( literal[string] ) | def _reconstruct_matrix(data_list):
"""Reconstructs a matrix from a list containing sparse matrix extracted properties
`data_list` needs to be formatted as the first result of
:func:`~pypet.parameter.SparseParameter._serialize_matrix`
"""
matrix_format = data_list[0]
data = data_list[1]
is_empty = isinstance(data, str) and data == '__empty__'
if matrix_format == 'csc':
if is_empty:
return spsp.csc_matrix(data_list[4]) # depends on [control=['if'], data=[]]
else:
return spsp.csc_matrix(tuple(data_list[1:4]), shape=data_list[4]) # depends on [control=['if'], data=[]]
elif matrix_format == 'csr':
if is_empty:
return spsp.csr_matrix(data_list[4]) # depends on [control=['if'], data=[]]
else:
return spsp.csr_matrix(tuple(data_list[1:4]), shape=data_list[4]) # depends on [control=['if'], data=[]]
elif matrix_format == 'bsr':
if is_empty:
# We have an empty matrix, that cannot be build as in elee case
return spsp.bsr_matrix(data_list[4]) # depends on [control=['if'], data=[]]
else:
return spsp.bsr_matrix(tuple(data_list[1:4]), shape=data_list[4]) # depends on [control=['if'], data=[]]
elif matrix_format == 'dia':
if is_empty:
return spsp.dia_matrix(data_list[3]) # depends on [control=['if'], data=[]]
else:
return spsp.dia_matrix(tuple(data_list[1:3]), shape=data_list[3]) # depends on [control=['if'], data=[]]
else:
raise RuntimeError('You shall not pass!') |
def createService(self, createServiceParameter,
description=None,
tags="Feature Service",
snippet=None):
"""
The Create Service operation allows users to create a hosted
feature service. You can use the API to create an empty hosted
feaure service from feature service metadata JSON.
Inputs:
createServiceParameter - create service object
"""
url = "%s/createService" % self.location
val = createServiceParameter.value
params = {
"f" : "json",
"outputType" : "featureService",
"createParameters" : json.dumps(val),
"tags" : tags
}
if snippet is not None:
params['snippet'] = snippet
if description is not None:
params['description'] = description
res = self._post(url=url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
if 'id' in res or \
'serviceItemId' in res:
if 'id' in res:
url = "%s/items/%s" % (self.location, res['id'])
else:
url = "%s/items/%s" % (self.location, res['serviceItemId'])
return UserItem(url=url,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
return res | def function[createService, parameter[self, createServiceParameter, description, tags, snippet]]:
constant[
The Create Service operation allows users to create a hosted
feature service. You can use the API to create an empty hosted
feaure service from feature service metadata JSON.
Inputs:
createServiceParameter - create service object
]
variable[url] assign[=] binary_operation[constant[%s/createService] <ast.Mod object at 0x7da2590d6920> name[self].location]
variable[val] assign[=] name[createServiceParameter].value
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da18fe925f0>, <ast.Constant object at 0x7da18fe90820>, <ast.Constant object at 0x7da18fe917e0>, <ast.Constant object at 0x7da18fe92500>], [<ast.Constant object at 0x7da18fe93160>, <ast.Constant object at 0x7da18fe93070>, <ast.Call object at 0x7da18fe90970>, <ast.Name object at 0x7da18fe90c70>]]
if compare[name[snippet] is_not constant[None]] begin[:]
call[name[params]][constant[snippet]] assign[=] name[snippet]
if compare[name[description] is_not constant[None]] begin[:]
call[name[params]][constant[description]] assign[=] name[description]
variable[res] assign[=] call[name[self]._post, parameter[]]
if <ast.BoolOp object at 0x7da207f01060> begin[:]
if compare[constant[id] in name[res]] begin[:]
variable[url] assign[=] binary_operation[constant[%s/items/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da207f039a0>, <ast.Subscript object at 0x7da207f00490>]]]
return[call[name[UserItem], parameter[]]]
return[name[res]] | keyword[def] identifier[createService] ( identifier[self] , identifier[createServiceParameter] ,
identifier[description] = keyword[None] ,
identifier[tags] = literal[string] ,
identifier[snippet] = keyword[None] ):
literal[string]
identifier[url] = literal[string] % identifier[self] . identifier[location]
identifier[val] = identifier[createServiceParameter] . identifier[value]
identifier[params] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : identifier[json] . identifier[dumps] ( identifier[val] ),
literal[string] : identifier[tags]
}
keyword[if] identifier[snippet] keyword[is] keyword[not] keyword[None] :
identifier[params] [ literal[string] ]= identifier[snippet]
keyword[if] identifier[description] keyword[is] keyword[not] keyword[None] :
identifier[params] [ literal[string] ]= identifier[description]
identifier[res] = identifier[self] . identifier[_post] ( identifier[url] = identifier[url] ,
identifier[param_dict] = identifier[params] ,
identifier[securityHandler] = identifier[self] . identifier[_securityHandler] ,
identifier[proxy_port] = identifier[self] . identifier[_proxy_port] ,
identifier[proxy_url] = identifier[self] . identifier[_proxy_url] )
keyword[if] literal[string] keyword[in] identifier[res] keyword[or] literal[string] keyword[in] identifier[res] :
keyword[if] literal[string] keyword[in] identifier[res] :
identifier[url] = literal[string] %( identifier[self] . identifier[location] , identifier[res] [ literal[string] ])
keyword[else] :
identifier[url] = literal[string] %( identifier[self] . identifier[location] , identifier[res] [ literal[string] ])
keyword[return] identifier[UserItem] ( identifier[url] = identifier[url] ,
identifier[securityHandler] = identifier[self] . identifier[_securityHandler] ,
identifier[proxy_url] = identifier[self] . identifier[_proxy_url] ,
identifier[proxy_port] = identifier[self] . identifier[_proxy_port] )
keyword[return] identifier[res] | def createService(self, createServiceParameter, description=None, tags='Feature Service', snippet=None):
"""
The Create Service operation allows users to create a hosted
feature service. You can use the API to create an empty hosted
feaure service from feature service metadata JSON.
Inputs:
createServiceParameter - create service object
"""
url = '%s/createService' % self.location
val = createServiceParameter.value
params = {'f': 'json', 'outputType': 'featureService', 'createParameters': json.dumps(val), 'tags': tags}
if snippet is not None:
params['snippet'] = snippet # depends on [control=['if'], data=['snippet']]
if description is not None:
params['description'] = description # depends on [control=['if'], data=['description']]
res = self._post(url=url, param_dict=params, securityHandler=self._securityHandler, proxy_port=self._proxy_port, proxy_url=self._proxy_url)
if 'id' in res or 'serviceItemId' in res:
if 'id' in res:
url = '%s/items/%s' % (self.location, res['id']) # depends on [control=['if'], data=['res']]
else:
url = '%s/items/%s' % (self.location, res['serviceItemId'])
return UserItem(url=url, securityHandler=self._securityHandler, proxy_url=self._proxy_url, proxy_port=self._proxy_port) # depends on [control=['if'], data=[]]
return res |
def from_corpus(cls, corpus):
"""
Create a new modifiable corpus from any other CorpusView.
This for example can be used to create a independent modifiable corpus from a subview.
Args:
corpus (CorpusView): The corpus to create a copy from.
Returns:
Corpus: A new corpus with the same data as the given one.
"""
ds = Corpus()
# Tracks
tracks = copy.deepcopy(list(corpus.tracks.values()))
track_mapping = ds.import_tracks(tracks)
# Issuers
issuers = copy.deepcopy(list(corpus.issuers.values()))
issuer_mapping = ds.import_issuers(issuers)
# Utterances, with replacing changed track- and issuer-ids
utterances = copy.deepcopy(list(corpus.utterances.values()))
for utterance in utterances:
utterance.track = track_mapping[utterance.track.idx]
if utterance.issuer is not None:
utterance.issuer = issuer_mapping[utterance.issuer.idx]
ds.import_utterances(utterances)
# Subviews
subviews = copy.deepcopy(corpus.subviews)
for subview_idx, subview in subviews.items():
ds.import_subview(subview_idx, subview)
# Feat-Containers
for feat_container_idx, feature_container in corpus.feature_containers.items():
ds.new_feature_container(feat_container_idx, feature_container.path)
return ds | def function[from_corpus, parameter[cls, corpus]]:
constant[
Create a new modifiable corpus from any other CorpusView.
This for example can be used to create a independent modifiable corpus from a subview.
Args:
corpus (CorpusView): The corpus to create a copy from.
Returns:
Corpus: A new corpus with the same data as the given one.
]
variable[ds] assign[=] call[name[Corpus], parameter[]]
variable[tracks] assign[=] call[name[copy].deepcopy, parameter[call[name[list], parameter[call[name[corpus].tracks.values, parameter[]]]]]]
variable[track_mapping] assign[=] call[name[ds].import_tracks, parameter[name[tracks]]]
variable[issuers] assign[=] call[name[copy].deepcopy, parameter[call[name[list], parameter[call[name[corpus].issuers.values, parameter[]]]]]]
variable[issuer_mapping] assign[=] call[name[ds].import_issuers, parameter[name[issuers]]]
variable[utterances] assign[=] call[name[copy].deepcopy, parameter[call[name[list], parameter[call[name[corpus].utterances.values, parameter[]]]]]]
for taget[name[utterance]] in starred[name[utterances]] begin[:]
name[utterance].track assign[=] call[name[track_mapping]][name[utterance].track.idx]
if compare[name[utterance].issuer is_not constant[None]] begin[:]
name[utterance].issuer assign[=] call[name[issuer_mapping]][name[utterance].issuer.idx]
call[name[ds].import_utterances, parameter[name[utterances]]]
variable[subviews] assign[=] call[name[copy].deepcopy, parameter[name[corpus].subviews]]
for taget[tuple[[<ast.Name object at 0x7da1b0b50c70>, <ast.Name object at 0x7da1b0b51360>]]] in starred[call[name[subviews].items, parameter[]]] begin[:]
call[name[ds].import_subview, parameter[name[subview_idx], name[subview]]]
for taget[tuple[[<ast.Name object at 0x7da1b0b526b0>, <ast.Name object at 0x7da1b0b53c40>]]] in starred[call[name[corpus].feature_containers.items, parameter[]]] begin[:]
call[name[ds].new_feature_container, parameter[name[feat_container_idx], name[feature_container].path]]
return[name[ds]] | keyword[def] identifier[from_corpus] ( identifier[cls] , identifier[corpus] ):
literal[string]
identifier[ds] = identifier[Corpus] ()
identifier[tracks] = identifier[copy] . identifier[deepcopy] ( identifier[list] ( identifier[corpus] . identifier[tracks] . identifier[values] ()))
identifier[track_mapping] = identifier[ds] . identifier[import_tracks] ( identifier[tracks] )
identifier[issuers] = identifier[copy] . identifier[deepcopy] ( identifier[list] ( identifier[corpus] . identifier[issuers] . identifier[values] ()))
identifier[issuer_mapping] = identifier[ds] . identifier[import_issuers] ( identifier[issuers] )
identifier[utterances] = identifier[copy] . identifier[deepcopy] ( identifier[list] ( identifier[corpus] . identifier[utterances] . identifier[values] ()))
keyword[for] identifier[utterance] keyword[in] identifier[utterances] :
identifier[utterance] . identifier[track] = identifier[track_mapping] [ identifier[utterance] . identifier[track] . identifier[idx] ]
keyword[if] identifier[utterance] . identifier[issuer] keyword[is] keyword[not] keyword[None] :
identifier[utterance] . identifier[issuer] = identifier[issuer_mapping] [ identifier[utterance] . identifier[issuer] . identifier[idx] ]
identifier[ds] . identifier[import_utterances] ( identifier[utterances] )
identifier[subviews] = identifier[copy] . identifier[deepcopy] ( identifier[corpus] . identifier[subviews] )
keyword[for] identifier[subview_idx] , identifier[subview] keyword[in] identifier[subviews] . identifier[items] ():
identifier[ds] . identifier[import_subview] ( identifier[subview_idx] , identifier[subview] )
keyword[for] identifier[feat_container_idx] , identifier[feature_container] keyword[in] identifier[corpus] . identifier[feature_containers] . identifier[items] ():
identifier[ds] . identifier[new_feature_container] ( identifier[feat_container_idx] , identifier[feature_container] . identifier[path] )
keyword[return] identifier[ds] | def from_corpus(cls, corpus):
"""
Create a new modifiable corpus from any other CorpusView.
This for example can be used to create a independent modifiable corpus from a subview.
Args:
corpus (CorpusView): The corpus to create a copy from.
Returns:
Corpus: A new corpus with the same data as the given one.
"""
ds = Corpus()
# Tracks
tracks = copy.deepcopy(list(corpus.tracks.values()))
track_mapping = ds.import_tracks(tracks)
# Issuers
issuers = copy.deepcopy(list(corpus.issuers.values()))
issuer_mapping = ds.import_issuers(issuers)
# Utterances, with replacing changed track- and issuer-ids
utterances = copy.deepcopy(list(corpus.utterances.values()))
for utterance in utterances:
utterance.track = track_mapping[utterance.track.idx]
if utterance.issuer is not None:
utterance.issuer = issuer_mapping[utterance.issuer.idx] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['utterance']]
ds.import_utterances(utterances)
# Subviews
subviews = copy.deepcopy(corpus.subviews)
for (subview_idx, subview) in subviews.items():
ds.import_subview(subview_idx, subview) # depends on [control=['for'], data=[]]
# Feat-Containers
for (feat_container_idx, feature_container) in corpus.feature_containers.items():
ds.new_feature_container(feat_container_idx, feature_container.path) # depends on [control=['for'], data=[]]
return ds |
def cli(ctx, cmd):
"""Execute commands using Apio packages."""
exit_code = util.call(cmd)
ctx.exit(exit_code) | def function[cli, parameter[ctx, cmd]]:
constant[Execute commands using Apio packages.]
variable[exit_code] assign[=] call[name[util].call, parameter[name[cmd]]]
call[name[ctx].exit, parameter[name[exit_code]]] | keyword[def] identifier[cli] ( identifier[ctx] , identifier[cmd] ):
literal[string]
identifier[exit_code] = identifier[util] . identifier[call] ( identifier[cmd] )
identifier[ctx] . identifier[exit] ( identifier[exit_code] ) | def cli(ctx, cmd):
"""Execute commands using Apio packages."""
exit_code = util.call(cmd)
ctx.exit(exit_code) |
def disconnect(self):
"""
Closes connection to Scratch
"""
try: # connection may already be disconnected, so catch exceptions
self.socket.shutdown(socket.SHUT_RDWR) # a proper disconnect
except socket.error:
pass
self.socket.close()
self.connected = False | def function[disconnect, parameter[self]]:
constant[
Closes connection to Scratch
]
<ast.Try object at 0x7da1b10c6350>
call[name[self].socket.close, parameter[]]
name[self].connected assign[=] constant[False] | keyword[def] identifier[disconnect] ( identifier[self] ):
literal[string]
keyword[try] :
identifier[self] . identifier[socket] . identifier[shutdown] ( identifier[socket] . identifier[SHUT_RDWR] )
keyword[except] identifier[socket] . identifier[error] :
keyword[pass]
identifier[self] . identifier[socket] . identifier[close] ()
identifier[self] . identifier[connected] = keyword[False] | def disconnect(self):
"""
Closes connection to Scratch
"""
try: # connection may already be disconnected, so catch exceptions
self.socket.shutdown(socket.SHUT_RDWR) # a proper disconnect # depends on [control=['try'], data=[]]
except socket.error:
pass # depends on [control=['except'], data=[]]
self.socket.close()
self.connected = False |
def _def_check(self):
"""
Checks that the definition provided contains only valid arguments for a
text index.
"""
if self._def != dict():
for key, val in iteritems_(self._def):
if key not in list(TEXT_INDEX_ARGS.keys()):
raise CloudantArgumentError(127, key)
if not isinstance(val, TEXT_INDEX_ARGS[key]):
raise CloudantArgumentError(128, key, TEXT_INDEX_ARGS[key]) | def function[_def_check, parameter[self]]:
constant[
Checks that the definition provided contains only valid arguments for a
text index.
]
if compare[name[self]._def not_equal[!=] call[name[dict], parameter[]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da20e74bf70>, <ast.Name object at 0x7da20c7c8670>]]] in starred[call[name[iteritems_], parameter[name[self]._def]]] begin[:]
if compare[name[key] <ast.NotIn object at 0x7da2590d7190> call[name[list], parameter[call[name[TEXT_INDEX_ARGS].keys, parameter[]]]]] begin[:]
<ast.Raise object at 0x7da20c7cbc10>
if <ast.UnaryOp object at 0x7da20c7c9930> begin[:]
<ast.Raise object at 0x7da20c7cb400> | keyword[def] identifier[_def_check] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_def] != identifier[dict] ():
keyword[for] identifier[key] , identifier[val] keyword[in] identifier[iteritems_] ( identifier[self] . identifier[_def] ):
keyword[if] identifier[key] keyword[not] keyword[in] identifier[list] ( identifier[TEXT_INDEX_ARGS] . identifier[keys] ()):
keyword[raise] identifier[CloudantArgumentError] ( literal[int] , identifier[key] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[val] , identifier[TEXT_INDEX_ARGS] [ identifier[key] ]):
keyword[raise] identifier[CloudantArgumentError] ( literal[int] , identifier[key] , identifier[TEXT_INDEX_ARGS] [ identifier[key] ]) | def _def_check(self):
"""
Checks that the definition provided contains only valid arguments for a
text index.
"""
if self._def != dict():
for (key, val) in iteritems_(self._def):
if key not in list(TEXT_INDEX_ARGS.keys()):
raise CloudantArgumentError(127, key) # depends on [control=['if'], data=['key']]
if not isinstance(val, TEXT_INDEX_ARGS[key]):
raise CloudantArgumentError(128, key, TEXT_INDEX_ARGS[key]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] |
def _parse_btrfs_info(data):
'''
Parse BTRFS device info data.
'''
ret = {}
for line in [line for line in data.split("\n") if line][:-1]:
if line.startswith("Label:"):
line = re.sub(r"Label:\s+", "", line)
label, uuid_ = [tkn.strip() for tkn in line.split("uuid:")]
ret['label'] = label != 'none' and label or None
ret['uuid'] = uuid_
continue
if line.startswith("\tdevid"):
dev_data = re.split(r"\s+", line.strip())
dev_id = dev_data[-1]
ret[dev_id] = {
'device_id': dev_data[1],
'size': dev_data[3],
'used': dev_data[5],
}
return ret | def function[_parse_btrfs_info, parameter[data]]:
constant[
Parse BTRFS device info data.
]
variable[ret] assign[=] dictionary[[], []]
for taget[name[line]] in starred[call[<ast.ListComp object at 0x7da1b2096500>][<ast.Slice object at 0x7da1b20947f0>]] begin[:]
if call[name[line].startswith, parameter[constant[Label:]]] begin[:]
variable[line] assign[=] call[name[re].sub, parameter[constant[Label:\s+], constant[], name[line]]]
<ast.Tuple object at 0x7da1b20948e0> assign[=] <ast.ListComp object at 0x7da1b20940d0>
call[name[ret]][constant[label]] assign[=] <ast.BoolOp object at 0x7da1b210b2b0>
call[name[ret]][constant[uuid]] assign[=] name[uuid_]
continue
if call[name[line].startswith, parameter[constant[ devid]]] begin[:]
variable[dev_data] assign[=] call[name[re].split, parameter[constant[\s+], call[name[line].strip, parameter[]]]]
variable[dev_id] assign[=] call[name[dev_data]][<ast.UnaryOp object at 0x7da1b210b940>]
call[name[ret]][name[dev_id]] assign[=] dictionary[[<ast.Constant object at 0x7da1b210b7c0>, <ast.Constant object at 0x7da1b210b820>, <ast.Constant object at 0x7da1b2109b40>], [<ast.Subscript object at 0x7da1b2109a80>, <ast.Subscript object at 0x7da1b210bc10>, <ast.Subscript object at 0x7da1b2108430>]]
return[name[ret]] | keyword[def] identifier[_parse_btrfs_info] ( identifier[data] ):
literal[string]
identifier[ret] ={}
keyword[for] identifier[line] keyword[in] [ identifier[line] keyword[for] identifier[line] keyword[in] identifier[data] . identifier[split] ( literal[string] ) keyword[if] identifier[line] ][:- literal[int] ]:
keyword[if] identifier[line] . identifier[startswith] ( literal[string] ):
identifier[line] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[line] )
identifier[label] , identifier[uuid_] =[ identifier[tkn] . identifier[strip] () keyword[for] identifier[tkn] keyword[in] identifier[line] . identifier[split] ( literal[string] )]
identifier[ret] [ literal[string] ]= identifier[label] != literal[string] keyword[and] identifier[label] keyword[or] keyword[None]
identifier[ret] [ literal[string] ]= identifier[uuid_]
keyword[continue]
keyword[if] identifier[line] . identifier[startswith] ( literal[string] ):
identifier[dev_data] = identifier[re] . identifier[split] ( literal[string] , identifier[line] . identifier[strip] ())
identifier[dev_id] = identifier[dev_data] [- literal[int] ]
identifier[ret] [ identifier[dev_id] ]={
literal[string] : identifier[dev_data] [ literal[int] ],
literal[string] : identifier[dev_data] [ literal[int] ],
literal[string] : identifier[dev_data] [ literal[int] ],
}
keyword[return] identifier[ret] | def _parse_btrfs_info(data):
"""
Parse BTRFS device info data.
"""
ret = {}
for line in [line for line in data.split('\n') if line][:-1]:
if line.startswith('Label:'):
line = re.sub('Label:\\s+', '', line)
(label, uuid_) = [tkn.strip() for tkn in line.split('uuid:')]
ret['label'] = label != 'none' and label or None
ret['uuid'] = uuid_
continue # depends on [control=['if'], data=[]]
if line.startswith('\tdevid'):
dev_data = re.split('\\s+', line.strip())
dev_id = dev_data[-1]
ret[dev_id] = {'device_id': dev_data[1], 'size': dev_data[3], 'used': dev_data[5]} # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']]
return ret |
def events(self, year, simple=False, keys=False):
"""
Get a list of events in a given year.
:param year: Year to get events from.
:param keys: Get only keys of the events rather than full data.
:param simple: Get only vital data.
:return: List of string event keys or Event objects.
"""
if keys:
return self._get('events/%s/keys' % year)
else:
return [Event(raw) for raw in self._get('events/%s%s' % (year, '/simple' if simple else ''))] | def function[events, parameter[self, year, simple, keys]]:
constant[
Get a list of events in a given year.
:param year: Year to get events from.
:param keys: Get only keys of the events rather than full data.
:param simple: Get only vital data.
:return: List of string event keys or Event objects.
]
if name[keys] begin[:]
return[call[name[self]._get, parameter[binary_operation[constant[events/%s/keys] <ast.Mod object at 0x7da2590d6920> name[year]]]]] | keyword[def] identifier[events] ( identifier[self] , identifier[year] , identifier[simple] = keyword[False] , identifier[keys] = keyword[False] ):
literal[string]
keyword[if] identifier[keys] :
keyword[return] identifier[self] . identifier[_get] ( literal[string] % identifier[year] )
keyword[else] :
keyword[return] [ identifier[Event] ( identifier[raw] ) keyword[for] identifier[raw] keyword[in] identifier[self] . identifier[_get] ( literal[string] %( identifier[year] , literal[string] keyword[if] identifier[simple] keyword[else] literal[string] ))] | def events(self, year, simple=False, keys=False):
"""
Get a list of events in a given year.
:param year: Year to get events from.
:param keys: Get only keys of the events rather than full data.
:param simple: Get only vital data.
:return: List of string event keys or Event objects.
"""
if keys:
return self._get('events/%s/keys' % year) # depends on [control=['if'], data=[]]
else:
return [Event(raw) for raw in self._get('events/%s%s' % (year, '/simple' if simple else ''))] |
def get_deployments(self, project, definition_id=None, definition_environment_id=None, created_by=None, min_modified_time=None, max_modified_time=None, deployment_status=None, operation_status=None, latest_attempts_only=None, query_order=None, top=None, continuation_token=None, created_for=None, min_started_time=None, max_started_time=None, source_branch=None):
"""GetDeployments.
:param str project: Project ID or project name
:param int definition_id:
:param int definition_environment_id:
:param str created_by:
:param datetime min_modified_time:
:param datetime max_modified_time:
:param str deployment_status:
:param str operation_status:
:param bool latest_attempts_only:
:param str query_order:
:param int top:
:param int continuation_token:
:param str created_for:
:param datetime min_started_time:
:param datetime max_started_time:
:param str source_branch:
:rtype: [Deployment]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if definition_id is not None:
query_parameters['definitionId'] = self._serialize.query('definition_id', definition_id, 'int')
if definition_environment_id is not None:
query_parameters['definitionEnvironmentId'] = self._serialize.query('definition_environment_id', definition_environment_id, 'int')
if created_by is not None:
query_parameters['createdBy'] = self._serialize.query('created_by', created_by, 'str')
if min_modified_time is not None:
query_parameters['minModifiedTime'] = self._serialize.query('min_modified_time', min_modified_time, 'iso-8601')
if max_modified_time is not None:
query_parameters['maxModifiedTime'] = self._serialize.query('max_modified_time', max_modified_time, 'iso-8601')
if deployment_status is not None:
query_parameters['deploymentStatus'] = self._serialize.query('deployment_status', deployment_status, 'str')
if operation_status is not None:
query_parameters['operationStatus'] = self._serialize.query('operation_status', operation_status, 'str')
if latest_attempts_only is not None:
query_parameters['latestAttemptsOnly'] = self._serialize.query('latest_attempts_only', latest_attempts_only, 'bool')
if query_order is not None:
query_parameters['queryOrder'] = self._serialize.query('query_order', query_order, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
if continuation_token is not None:
query_parameters['continuationToken'] = self._serialize.query('continuation_token', continuation_token, 'int')
if created_for is not None:
query_parameters['createdFor'] = self._serialize.query('created_for', created_for, 'str')
if min_started_time is not None:
query_parameters['minStartedTime'] = self._serialize.query('min_started_time', min_started_time, 'iso-8601')
if max_started_time is not None:
query_parameters['maxStartedTime'] = self._serialize.query('max_started_time', max_started_time, 'iso-8601')
if source_branch is not None:
query_parameters['sourceBranch'] = self._serialize.query('source_branch', source_branch, 'str')
response = self._send(http_method='GET',
location_id='b005ef73-cddc-448e-9ba2-5193bf36b19f',
version='5.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[Deployment]', self._unwrap_collection(response)) | def function[get_deployments, parameter[self, project, definition_id, definition_environment_id, created_by, min_modified_time, max_modified_time, deployment_status, operation_status, latest_attempts_only, query_order, top, continuation_token, created_for, min_started_time, max_started_time, source_branch]]:
constant[GetDeployments.
:param str project: Project ID or project name
:param int definition_id:
:param int definition_environment_id:
:param str created_by:
:param datetime min_modified_time:
:param datetime max_modified_time:
:param str deployment_status:
:param str operation_status:
:param bool latest_attempts_only:
:param str query_order:
:param int top:
:param int continuation_token:
:param str created_for:
:param datetime min_started_time:
:param datetime max_started_time:
:param str source_branch:
:rtype: [Deployment]
]
variable[route_values] assign[=] dictionary[[], []]
if compare[name[project] is_not constant[None]] begin[:]
call[name[route_values]][constant[project]] assign[=] call[name[self]._serialize.url, parameter[constant[project], name[project], constant[str]]]
variable[query_parameters] assign[=] dictionary[[], []]
if compare[name[definition_id] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[definitionId]] assign[=] call[name[self]._serialize.query, parameter[constant[definition_id], name[definition_id], constant[int]]]
if compare[name[definition_environment_id] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[definitionEnvironmentId]] assign[=] call[name[self]._serialize.query, parameter[constant[definition_environment_id], name[definition_environment_id], constant[int]]]
if compare[name[created_by] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[createdBy]] assign[=] call[name[self]._serialize.query, parameter[constant[created_by], name[created_by], constant[str]]]
if compare[name[min_modified_time] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[minModifiedTime]] assign[=] call[name[self]._serialize.query, parameter[constant[min_modified_time], name[min_modified_time], constant[iso-8601]]]
if compare[name[max_modified_time] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[maxModifiedTime]] assign[=] call[name[self]._serialize.query, parameter[constant[max_modified_time], name[max_modified_time], constant[iso-8601]]]
if compare[name[deployment_status] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[deploymentStatus]] assign[=] call[name[self]._serialize.query, parameter[constant[deployment_status], name[deployment_status], constant[str]]]
if compare[name[operation_status] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[operationStatus]] assign[=] call[name[self]._serialize.query, parameter[constant[operation_status], name[operation_status], constant[str]]]
if compare[name[latest_attempts_only] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[latestAttemptsOnly]] assign[=] call[name[self]._serialize.query, parameter[constant[latest_attempts_only], name[latest_attempts_only], constant[bool]]]
if compare[name[query_order] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[queryOrder]] assign[=] call[name[self]._serialize.query, parameter[constant[query_order], name[query_order], constant[str]]]
if compare[name[top] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[$top]] assign[=] call[name[self]._serialize.query, parameter[constant[top], name[top], constant[int]]]
if compare[name[continuation_token] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[continuationToken]] assign[=] call[name[self]._serialize.query, parameter[constant[continuation_token], name[continuation_token], constant[int]]]
if compare[name[created_for] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[createdFor]] assign[=] call[name[self]._serialize.query, parameter[constant[created_for], name[created_for], constant[str]]]
if compare[name[min_started_time] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[minStartedTime]] assign[=] call[name[self]._serialize.query, parameter[constant[min_started_time], name[min_started_time], constant[iso-8601]]]
if compare[name[max_started_time] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[maxStartedTime]] assign[=] call[name[self]._serialize.query, parameter[constant[max_started_time], name[max_started_time], constant[iso-8601]]]
if compare[name[source_branch] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[sourceBranch]] assign[=] call[name[self]._serialize.query, parameter[constant[source_branch], name[source_branch], constant[str]]]
variable[response] assign[=] call[name[self]._send, parameter[]]
return[call[name[self]._deserialize, parameter[constant[[Deployment]], call[name[self]._unwrap_collection, parameter[name[response]]]]]] | keyword[def] identifier[get_deployments] ( identifier[self] , identifier[project] , identifier[definition_id] = keyword[None] , identifier[definition_environment_id] = keyword[None] , identifier[created_by] = keyword[None] , identifier[min_modified_time] = keyword[None] , identifier[max_modified_time] = keyword[None] , identifier[deployment_status] = keyword[None] , identifier[operation_status] = keyword[None] , identifier[latest_attempts_only] = keyword[None] , identifier[query_order] = keyword[None] , identifier[top] = keyword[None] , identifier[continuation_token] = keyword[None] , identifier[created_for] = keyword[None] , identifier[min_started_time] = keyword[None] , identifier[max_started_time] = keyword[None] , identifier[source_branch] = keyword[None] ):
literal[string]
identifier[route_values] ={}
keyword[if] identifier[project] keyword[is] keyword[not] keyword[None] :
identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[project] , literal[string] )
identifier[query_parameters] ={}
keyword[if] identifier[definition_id] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[definition_id] , literal[string] )
keyword[if] identifier[definition_environment_id] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[definition_environment_id] , literal[string] )
keyword[if] identifier[created_by] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[created_by] , literal[string] )
keyword[if] identifier[min_modified_time] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[min_modified_time] , literal[string] )
keyword[if] identifier[max_modified_time] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[max_modified_time] , literal[string] )
keyword[if] identifier[deployment_status] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[deployment_status] , literal[string] )
keyword[if] identifier[operation_status] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[operation_status] , literal[string] )
keyword[if] identifier[latest_attempts_only] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[latest_attempts_only] , literal[string] )
keyword[if] identifier[query_order] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[query_order] , literal[string] )
keyword[if] identifier[top] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[top] , literal[string] )
keyword[if] identifier[continuation_token] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[continuation_token] , literal[string] )
keyword[if] identifier[created_for] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[created_for] , literal[string] )
keyword[if] identifier[min_started_time] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[min_started_time] , literal[string] )
keyword[if] identifier[max_started_time] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[max_started_time] , literal[string] )
keyword[if] identifier[source_branch] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[source_branch] , literal[string] )
identifier[response] = identifier[self] . identifier[_send] ( identifier[http_method] = literal[string] ,
identifier[location_id] = literal[string] ,
identifier[version] = literal[string] ,
identifier[route_values] = identifier[route_values] ,
identifier[query_parameters] = identifier[query_parameters] )
keyword[return] identifier[self] . identifier[_deserialize] ( literal[string] , identifier[self] . identifier[_unwrap_collection] ( identifier[response] )) | def get_deployments(self, project, definition_id=None, definition_environment_id=None, created_by=None, min_modified_time=None, max_modified_time=None, deployment_status=None, operation_status=None, latest_attempts_only=None, query_order=None, top=None, continuation_token=None, created_for=None, min_started_time=None, max_started_time=None, source_branch=None):
"""GetDeployments.
:param str project: Project ID or project name
:param int definition_id:
:param int definition_environment_id:
:param str created_by:
:param datetime min_modified_time:
:param datetime max_modified_time:
:param str deployment_status:
:param str operation_status:
:param bool latest_attempts_only:
:param str query_order:
:param int top:
:param int continuation_token:
:param str created_for:
:param datetime min_started_time:
:param datetime max_started_time:
:param str source_branch:
:rtype: [Deployment]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str') # depends on [control=['if'], data=['project']]
query_parameters = {}
if definition_id is not None:
query_parameters['definitionId'] = self._serialize.query('definition_id', definition_id, 'int') # depends on [control=['if'], data=['definition_id']]
if definition_environment_id is not None:
query_parameters['definitionEnvironmentId'] = self._serialize.query('definition_environment_id', definition_environment_id, 'int') # depends on [control=['if'], data=['definition_environment_id']]
if created_by is not None:
query_parameters['createdBy'] = self._serialize.query('created_by', created_by, 'str') # depends on [control=['if'], data=['created_by']]
if min_modified_time is not None:
query_parameters['minModifiedTime'] = self._serialize.query('min_modified_time', min_modified_time, 'iso-8601') # depends on [control=['if'], data=['min_modified_time']]
if max_modified_time is not None:
query_parameters['maxModifiedTime'] = self._serialize.query('max_modified_time', max_modified_time, 'iso-8601') # depends on [control=['if'], data=['max_modified_time']]
if deployment_status is not None:
query_parameters['deploymentStatus'] = self._serialize.query('deployment_status', deployment_status, 'str') # depends on [control=['if'], data=['deployment_status']]
if operation_status is not None:
query_parameters['operationStatus'] = self._serialize.query('operation_status', operation_status, 'str') # depends on [control=['if'], data=['operation_status']]
if latest_attempts_only is not None:
query_parameters['latestAttemptsOnly'] = self._serialize.query('latest_attempts_only', latest_attempts_only, 'bool') # depends on [control=['if'], data=['latest_attempts_only']]
if query_order is not None:
query_parameters['queryOrder'] = self._serialize.query('query_order', query_order, 'str') # depends on [control=['if'], data=['query_order']]
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int') # depends on [control=['if'], data=['top']]
if continuation_token is not None:
query_parameters['continuationToken'] = self._serialize.query('continuation_token', continuation_token, 'int') # depends on [control=['if'], data=['continuation_token']]
if created_for is not None:
query_parameters['createdFor'] = self._serialize.query('created_for', created_for, 'str') # depends on [control=['if'], data=['created_for']]
if min_started_time is not None:
query_parameters['minStartedTime'] = self._serialize.query('min_started_time', min_started_time, 'iso-8601') # depends on [control=['if'], data=['min_started_time']]
if max_started_time is not None:
query_parameters['maxStartedTime'] = self._serialize.query('max_started_time', max_started_time, 'iso-8601') # depends on [control=['if'], data=['max_started_time']]
if source_branch is not None:
query_parameters['sourceBranch'] = self._serialize.query('source_branch', source_branch, 'str') # depends on [control=['if'], data=['source_branch']]
response = self._send(http_method='GET', location_id='b005ef73-cddc-448e-9ba2-5193bf36b19f', version='5.0', route_values=route_values, query_parameters=query_parameters)
return self._deserialize('[Deployment]', self._unwrap_collection(response)) |
def copy_files_to(src_fpath_list, dst_dpath=None, dst_fpath_list=None,
overwrite=False, verbose=True, veryverbose=False):
"""
parallel copier
Example:
>>> # DISABLE_DOCTEST
>>> from utool.util_path import *
>>> import utool as ut
>>> overwrite = False
>>> veryverbose = False
>>> verbose = True
>>> src_fpath_list = [ut.grab_test_imgpath(key)
>>> for key in ut.get_valid_test_imgkeys()]
>>> dst_dpath = ut.get_app_resource_dir('utool', 'filecopy_tests')
>>> copy_files_to(src_fpath_list, dst_dpath, overwrite=overwrite,
>>> verbose=verbose)
"""
from utool import util_list
from utool import util_parallel
if verbose:
print('[util_path] +--- COPYING FILES ---')
print('[util_path] * len(src_fpath_list) = %r' % (len(src_fpath_list)))
print('[util_path] * dst_dpath = %r' % (dst_dpath,))
if dst_fpath_list is None:
ensuredir(dst_dpath, verbose=veryverbose)
dst_fpath_list = [join(dst_dpath, basename(fpath))
for fpath in src_fpath_list]
else:
assert dst_dpath is None, 'dst_dpath was specified but overrided'
assert len(dst_fpath_list) == len(src_fpath_list), 'bad correspondence'
exists_list = list(map(exists, dst_fpath_list))
if verbose:
print('[util_path] * %d files already exist dst_dpath' % (
sum(exists_list),))
if not overwrite:
notexists_list = util_list.not_list(exists_list)
dst_fpath_list_ = util_list.compress(dst_fpath_list, notexists_list)
src_fpath_list_ = util_list.compress(src_fpath_list, notexists_list)
else:
dst_fpath_list_ = dst_fpath_list
src_fpath_list_ = src_fpath_list
args_list = zip(src_fpath_list_, dst_fpath_list_)
_gen = util_parallel.generate2(_copy_worker, args_list,
ntasks=len(src_fpath_list_))
success_list = list(_gen)
#success_list = copy_list(src_fpath_list_, dst_fpath_list_)
if verbose:
print('[util_path] * Copied %d / %d' % (sum(success_list),
len(src_fpath_list)))
print('[util_path] L___ DONE COPYING FILES ___') | def function[copy_files_to, parameter[src_fpath_list, dst_dpath, dst_fpath_list, overwrite, verbose, veryverbose]]:
constant[
parallel copier
Example:
>>> # DISABLE_DOCTEST
>>> from utool.util_path import *
>>> import utool as ut
>>> overwrite = False
>>> veryverbose = False
>>> verbose = True
>>> src_fpath_list = [ut.grab_test_imgpath(key)
>>> for key in ut.get_valid_test_imgkeys()]
>>> dst_dpath = ut.get_app_resource_dir('utool', 'filecopy_tests')
>>> copy_files_to(src_fpath_list, dst_dpath, overwrite=overwrite,
>>> verbose=verbose)
]
from relative_module[utool] import module[util_list]
from relative_module[utool] import module[util_parallel]
if name[verbose] begin[:]
call[name[print], parameter[constant[[util_path] +--- COPYING FILES ---]]]
call[name[print], parameter[binary_operation[constant[[util_path] * len(src_fpath_list) = %r] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[src_fpath_list]]]]]]
call[name[print], parameter[binary_operation[constant[[util_path] * dst_dpath = %r] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b245c4c0>]]]]]
if compare[name[dst_fpath_list] is constant[None]] begin[:]
call[name[ensuredir], parameter[name[dst_dpath]]]
variable[dst_fpath_list] assign[=] <ast.ListComp object at 0x7da1b245c7f0>
variable[exists_list] assign[=] call[name[list], parameter[call[name[map], parameter[name[exists], name[dst_fpath_list]]]]]
if name[verbose] begin[:]
call[name[print], parameter[binary_operation[constant[[util_path] * %d files already exist dst_dpath] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b245dae0>]]]]]
if <ast.UnaryOp object at 0x7da1b245ca00> begin[:]
variable[notexists_list] assign[=] call[name[util_list].not_list, parameter[name[exists_list]]]
variable[dst_fpath_list_] assign[=] call[name[util_list].compress, parameter[name[dst_fpath_list], name[notexists_list]]]
variable[src_fpath_list_] assign[=] call[name[util_list].compress, parameter[name[src_fpath_list], name[notexists_list]]]
variable[args_list] assign[=] call[name[zip], parameter[name[src_fpath_list_], name[dst_fpath_list_]]]
variable[_gen] assign[=] call[name[util_parallel].generate2, parameter[name[_copy_worker], name[args_list]]]
variable[success_list] assign[=] call[name[list], parameter[name[_gen]]]
if name[verbose] begin[:]
call[name[print], parameter[binary_operation[constant[[util_path] * Copied %d / %d] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b245e800>, <ast.Call object at 0x7da1b245dc90>]]]]]
call[name[print], parameter[constant[[util_path] L___ DONE COPYING FILES ___]]] | keyword[def] identifier[copy_files_to] ( identifier[src_fpath_list] , identifier[dst_dpath] = keyword[None] , identifier[dst_fpath_list] = keyword[None] ,
identifier[overwrite] = keyword[False] , identifier[verbose] = keyword[True] , identifier[veryverbose] = keyword[False] ):
literal[string]
keyword[from] identifier[utool] keyword[import] identifier[util_list]
keyword[from] identifier[utool] keyword[import] identifier[util_parallel]
keyword[if] identifier[verbose] :
identifier[print] ( literal[string] )
identifier[print] ( literal[string] %( identifier[len] ( identifier[src_fpath_list] )))
identifier[print] ( literal[string] %( identifier[dst_dpath] ,))
keyword[if] identifier[dst_fpath_list] keyword[is] keyword[None] :
identifier[ensuredir] ( identifier[dst_dpath] , identifier[verbose] = identifier[veryverbose] )
identifier[dst_fpath_list] =[ identifier[join] ( identifier[dst_dpath] , identifier[basename] ( identifier[fpath] ))
keyword[for] identifier[fpath] keyword[in] identifier[src_fpath_list] ]
keyword[else] :
keyword[assert] identifier[dst_dpath] keyword[is] keyword[None] , literal[string]
keyword[assert] identifier[len] ( identifier[dst_fpath_list] )== identifier[len] ( identifier[src_fpath_list] ), literal[string]
identifier[exists_list] = identifier[list] ( identifier[map] ( identifier[exists] , identifier[dst_fpath_list] ))
keyword[if] identifier[verbose] :
identifier[print] ( literal[string] %(
identifier[sum] ( identifier[exists_list] ),))
keyword[if] keyword[not] identifier[overwrite] :
identifier[notexists_list] = identifier[util_list] . identifier[not_list] ( identifier[exists_list] )
identifier[dst_fpath_list_] = identifier[util_list] . identifier[compress] ( identifier[dst_fpath_list] , identifier[notexists_list] )
identifier[src_fpath_list_] = identifier[util_list] . identifier[compress] ( identifier[src_fpath_list] , identifier[notexists_list] )
keyword[else] :
identifier[dst_fpath_list_] = identifier[dst_fpath_list]
identifier[src_fpath_list_] = identifier[src_fpath_list]
identifier[args_list] = identifier[zip] ( identifier[src_fpath_list_] , identifier[dst_fpath_list_] )
identifier[_gen] = identifier[util_parallel] . identifier[generate2] ( identifier[_copy_worker] , identifier[args_list] ,
identifier[ntasks] = identifier[len] ( identifier[src_fpath_list_] ))
identifier[success_list] = identifier[list] ( identifier[_gen] )
keyword[if] identifier[verbose] :
identifier[print] ( literal[string] %( identifier[sum] ( identifier[success_list] ),
identifier[len] ( identifier[src_fpath_list] )))
identifier[print] ( literal[string] ) | def copy_files_to(src_fpath_list, dst_dpath=None, dst_fpath_list=None, overwrite=False, verbose=True, veryverbose=False):
"""
parallel copier
Example:
>>> # DISABLE_DOCTEST
>>> from utool.util_path import *
>>> import utool as ut
>>> overwrite = False
>>> veryverbose = False
>>> verbose = True
>>> src_fpath_list = [ut.grab_test_imgpath(key)
>>> for key in ut.get_valid_test_imgkeys()]
>>> dst_dpath = ut.get_app_resource_dir('utool', 'filecopy_tests')
>>> copy_files_to(src_fpath_list, dst_dpath, overwrite=overwrite,
>>> verbose=verbose)
"""
from utool import util_list
from utool import util_parallel
if verbose:
print('[util_path] +--- COPYING FILES ---')
print('[util_path] * len(src_fpath_list) = %r' % len(src_fpath_list))
print('[util_path] * dst_dpath = %r' % (dst_dpath,)) # depends on [control=['if'], data=[]]
if dst_fpath_list is None:
ensuredir(dst_dpath, verbose=veryverbose)
dst_fpath_list = [join(dst_dpath, basename(fpath)) for fpath in src_fpath_list] # depends on [control=['if'], data=['dst_fpath_list']]
else:
assert dst_dpath is None, 'dst_dpath was specified but overrided'
assert len(dst_fpath_list) == len(src_fpath_list), 'bad correspondence'
exists_list = list(map(exists, dst_fpath_list))
if verbose:
print('[util_path] * %d files already exist dst_dpath' % (sum(exists_list),)) # depends on [control=['if'], data=[]]
if not overwrite:
notexists_list = util_list.not_list(exists_list)
dst_fpath_list_ = util_list.compress(dst_fpath_list, notexists_list)
src_fpath_list_ = util_list.compress(src_fpath_list, notexists_list) # depends on [control=['if'], data=[]]
else:
dst_fpath_list_ = dst_fpath_list
src_fpath_list_ = src_fpath_list
args_list = zip(src_fpath_list_, dst_fpath_list_)
_gen = util_parallel.generate2(_copy_worker, args_list, ntasks=len(src_fpath_list_))
success_list = list(_gen)
#success_list = copy_list(src_fpath_list_, dst_fpath_list_)
if verbose:
print('[util_path] * Copied %d / %d' % (sum(success_list), len(src_fpath_list)))
print('[util_path] L___ DONE COPYING FILES ___') # depends on [control=['if'], data=[]] |
def fit(self, X, C):
"""
Fit one weighted classifier per class
Parameters
----------
X : array (n_samples, n_features)
The data on which to fit a cost-sensitive classifier.
C : array (n_samples, n_classes)
The cost of predicting each label for each observation (more means worse).
"""
X, C = _check_fit_input(X, C)
C = np.asfortranarray(C)
self.nclasses = C.shape[1]
self.classifiers = [deepcopy(self.base_classifier) for i in range(self.nclasses)]
if not self.weight_simple_diff:
C = WeightedAllPairs._calculate_v(self, C)
Parallel(n_jobs=self.njobs, verbose=0, require="sharedmem")(delayed(self._fit)(c, X, C) for c in range(self.nclasses))
return self | def function[fit, parameter[self, X, C]]:
constant[
Fit one weighted classifier per class
Parameters
----------
X : array (n_samples, n_features)
The data on which to fit a cost-sensitive classifier.
C : array (n_samples, n_classes)
The cost of predicting each label for each observation (more means worse).
]
<ast.Tuple object at 0x7da18fe908e0> assign[=] call[name[_check_fit_input], parameter[name[X], name[C]]]
variable[C] assign[=] call[name[np].asfortranarray, parameter[name[C]]]
name[self].nclasses assign[=] call[name[C].shape][constant[1]]
name[self].classifiers assign[=] <ast.ListComp object at 0x7da18fe91420>
if <ast.UnaryOp object at 0x7da18fe91300> begin[:]
variable[C] assign[=] call[name[WeightedAllPairs]._calculate_v, parameter[name[self], name[C]]]
call[call[name[Parallel], parameter[]], parameter[<ast.GeneratorExp object at 0x7da18bccaa70>]]
return[name[self]] | keyword[def] identifier[fit] ( identifier[self] , identifier[X] , identifier[C] ):
literal[string]
identifier[X] , identifier[C] = identifier[_check_fit_input] ( identifier[X] , identifier[C] )
identifier[C] = identifier[np] . identifier[asfortranarray] ( identifier[C] )
identifier[self] . identifier[nclasses] = identifier[C] . identifier[shape] [ literal[int] ]
identifier[self] . identifier[classifiers] =[ identifier[deepcopy] ( identifier[self] . identifier[base_classifier] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[nclasses] )]
keyword[if] keyword[not] identifier[self] . identifier[weight_simple_diff] :
identifier[C] = identifier[WeightedAllPairs] . identifier[_calculate_v] ( identifier[self] , identifier[C] )
identifier[Parallel] ( identifier[n_jobs] = identifier[self] . identifier[njobs] , identifier[verbose] = literal[int] , identifier[require] = literal[string] )( identifier[delayed] ( identifier[self] . identifier[_fit] )( identifier[c] , identifier[X] , identifier[C] ) keyword[for] identifier[c] keyword[in] identifier[range] ( identifier[self] . identifier[nclasses] ))
keyword[return] identifier[self] | def fit(self, X, C):
"""
Fit one weighted classifier per class
Parameters
----------
X : array (n_samples, n_features)
The data on which to fit a cost-sensitive classifier.
C : array (n_samples, n_classes)
The cost of predicting each label for each observation (more means worse).
"""
(X, C) = _check_fit_input(X, C)
C = np.asfortranarray(C)
self.nclasses = C.shape[1]
self.classifiers = [deepcopy(self.base_classifier) for i in range(self.nclasses)]
if not self.weight_simple_diff:
C = WeightedAllPairs._calculate_v(self, C) # depends on [control=['if'], data=[]]
Parallel(n_jobs=self.njobs, verbose=0, require='sharedmem')((delayed(self._fit)(c, X, C) for c in range(self.nclasses)))
return self |
def update_firmware(node):
"""Performs SUM based firmware update on the node.
This method performs SUM firmware update by mounting the
SPP ISO on the node. It performs firmware update on all or
some of the firmware components.
:param node: A node object of type dict.
:returns: Operation Status string.
:raises: SUMOperationError, when the vmedia device is not found or
when the mount operation fails or when the image validation fails.
:raises: IloConnectionError, when the iLO connection fails.
:raises: IloError, when vmedia eject or insert operation fails.
"""
sum_update_iso = node['clean_step']['args'].get('url')
# Validates the http image reference for SUM update ISO.
try:
utils.validate_href(sum_update_iso)
except exception.ImageRefValidationFailed as e:
raise exception.SUMOperationError(reason=e)
# Ejects the CDROM device in the iLO and inserts the SUM update ISO
# to the CDROM device.
info = node.get('driver_info')
ilo_object = client.IloClient(info.get('ilo_address'),
info.get('ilo_username'),
info.get('ilo_password'))
ilo_object.eject_virtual_media('CDROM')
ilo_object.insert_virtual_media(sum_update_iso, 'CDROM')
# Waits for the OS to detect the disk and update the label file. SPP ISO
# is identified by matching its label.
time.sleep(WAIT_TIME_DISK_LABEL_TO_BE_VISIBLE)
vmedia_device_dir = "/dev/disk/by-label/"
for file in os.listdir(vmedia_device_dir):
if fnmatch.fnmatch(file, 'SPP*'):
vmedia_device_file = os.path.join(vmedia_device_dir, file)
if not os.path.exists(vmedia_device_file):
msg = "Unable to find the virtual media device for SUM"
raise exception.SUMOperationError(reason=msg)
# Validates the SPP ISO image for any file corruption using the checksum
# of the ISO file.
expected_checksum = node['clean_step']['args'].get('checksum')
try:
utils.verify_image_checksum(vmedia_device_file, expected_checksum)
except exception.ImageRefValidationFailed as e:
raise exception.SUMOperationError(reason=e)
# Mounts SPP ISO on a temporary directory.
vmedia_mount_point = tempfile.mkdtemp()
try:
try:
processutils.execute("mount", vmedia_device_file,
vmedia_mount_point)
except processutils.ProcessExecutionError as e:
msg = ("Unable to mount virtual media device %(device)s: "
"%(error)s" % {'device': vmedia_device_file, 'error': e})
raise exception.SUMOperationError(reason=msg)
# Executes the SUM based firmware update by passing the 'smartupdate'
# executable path if exists else 'hpsum' executable path and the
# components specified (if any).
sum_file_path = os.path.join(vmedia_mount_point, SUM_LOCATION)
if not os.path.exists(sum_file_path):
sum_file_path = os.path.join(vmedia_mount_point, HPSUM_LOCATION)
components = node['clean_step']['args'].get('components')
result = _execute_sum(sum_file_path, vmedia_mount_point,
components=components)
processutils.trycmd("umount", vmedia_mount_point)
finally:
shutil.rmtree(vmedia_mount_point, ignore_errors=True)
return result | def function[update_firmware, parameter[node]]:
constant[Performs SUM based firmware update on the node.
This method performs SUM firmware update by mounting the
SPP ISO on the node. It performs firmware update on all or
some of the firmware components.
:param node: A node object of type dict.
:returns: Operation Status string.
:raises: SUMOperationError, when the vmedia device is not found or
when the mount operation fails or when the image validation fails.
:raises: IloConnectionError, when the iLO connection fails.
:raises: IloError, when vmedia eject or insert operation fails.
]
variable[sum_update_iso] assign[=] call[call[call[name[node]][constant[clean_step]]][constant[args]].get, parameter[constant[url]]]
<ast.Try object at 0x7da1b1a2d8d0>
variable[info] assign[=] call[name[node].get, parameter[constant[driver_info]]]
variable[ilo_object] assign[=] call[name[client].IloClient, parameter[call[name[info].get, parameter[constant[ilo_address]]], call[name[info].get, parameter[constant[ilo_username]]], call[name[info].get, parameter[constant[ilo_password]]]]]
call[name[ilo_object].eject_virtual_media, parameter[constant[CDROM]]]
call[name[ilo_object].insert_virtual_media, parameter[name[sum_update_iso], constant[CDROM]]]
call[name[time].sleep, parameter[name[WAIT_TIME_DISK_LABEL_TO_BE_VISIBLE]]]
variable[vmedia_device_dir] assign[=] constant[/dev/disk/by-label/]
for taget[name[file]] in starred[call[name[os].listdir, parameter[name[vmedia_device_dir]]]] begin[:]
if call[name[fnmatch].fnmatch, parameter[name[file], constant[SPP*]]] begin[:]
variable[vmedia_device_file] assign[=] call[name[os].path.join, parameter[name[vmedia_device_dir], name[file]]]
if <ast.UnaryOp object at 0x7da1b1a2ed70> begin[:]
variable[msg] assign[=] constant[Unable to find the virtual media device for SUM]
<ast.Raise object at 0x7da1b1a2ebc0>
variable[expected_checksum] assign[=] call[call[call[name[node]][constant[clean_step]]][constant[args]].get, parameter[constant[checksum]]]
<ast.Try object at 0x7da1b1a2e8c0>
variable[vmedia_mount_point] assign[=] call[name[tempfile].mkdtemp, parameter[]]
<ast.Try object at 0x7da1b1a2e4a0>
return[name[result]] | keyword[def] identifier[update_firmware] ( identifier[node] ):
literal[string]
identifier[sum_update_iso] = identifier[node] [ literal[string] ][ literal[string] ]. identifier[get] ( literal[string] )
keyword[try] :
identifier[utils] . identifier[validate_href] ( identifier[sum_update_iso] )
keyword[except] identifier[exception] . identifier[ImageRefValidationFailed] keyword[as] identifier[e] :
keyword[raise] identifier[exception] . identifier[SUMOperationError] ( identifier[reason] = identifier[e] )
identifier[info] = identifier[node] . identifier[get] ( literal[string] )
identifier[ilo_object] = identifier[client] . identifier[IloClient] ( identifier[info] . identifier[get] ( literal[string] ),
identifier[info] . identifier[get] ( literal[string] ),
identifier[info] . identifier[get] ( literal[string] ))
identifier[ilo_object] . identifier[eject_virtual_media] ( literal[string] )
identifier[ilo_object] . identifier[insert_virtual_media] ( identifier[sum_update_iso] , literal[string] )
identifier[time] . identifier[sleep] ( identifier[WAIT_TIME_DISK_LABEL_TO_BE_VISIBLE] )
identifier[vmedia_device_dir] = literal[string]
keyword[for] identifier[file] keyword[in] identifier[os] . identifier[listdir] ( identifier[vmedia_device_dir] ):
keyword[if] identifier[fnmatch] . identifier[fnmatch] ( identifier[file] , literal[string] ):
identifier[vmedia_device_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[vmedia_device_dir] , identifier[file] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[vmedia_device_file] ):
identifier[msg] = literal[string]
keyword[raise] identifier[exception] . identifier[SUMOperationError] ( identifier[reason] = identifier[msg] )
identifier[expected_checksum] = identifier[node] [ literal[string] ][ literal[string] ]. identifier[get] ( literal[string] )
keyword[try] :
identifier[utils] . identifier[verify_image_checksum] ( identifier[vmedia_device_file] , identifier[expected_checksum] )
keyword[except] identifier[exception] . identifier[ImageRefValidationFailed] keyword[as] identifier[e] :
keyword[raise] identifier[exception] . identifier[SUMOperationError] ( identifier[reason] = identifier[e] )
identifier[vmedia_mount_point] = identifier[tempfile] . identifier[mkdtemp] ()
keyword[try] :
keyword[try] :
identifier[processutils] . identifier[execute] ( literal[string] , identifier[vmedia_device_file] ,
identifier[vmedia_mount_point] )
keyword[except] identifier[processutils] . identifier[ProcessExecutionError] keyword[as] identifier[e] :
identifier[msg] =( literal[string]
literal[string] %{ literal[string] : identifier[vmedia_device_file] , literal[string] : identifier[e] })
keyword[raise] identifier[exception] . identifier[SUMOperationError] ( identifier[reason] = identifier[msg] )
identifier[sum_file_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[vmedia_mount_point] , identifier[SUM_LOCATION] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[sum_file_path] ):
identifier[sum_file_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[vmedia_mount_point] , identifier[HPSUM_LOCATION] )
identifier[components] = identifier[node] [ literal[string] ][ literal[string] ]. identifier[get] ( literal[string] )
identifier[result] = identifier[_execute_sum] ( identifier[sum_file_path] , identifier[vmedia_mount_point] ,
identifier[components] = identifier[components] )
identifier[processutils] . identifier[trycmd] ( literal[string] , identifier[vmedia_mount_point] )
keyword[finally] :
identifier[shutil] . identifier[rmtree] ( identifier[vmedia_mount_point] , identifier[ignore_errors] = keyword[True] )
keyword[return] identifier[result] | def update_firmware(node):
"""Performs SUM based firmware update on the node.
This method performs SUM firmware update by mounting the
SPP ISO on the node. It performs firmware update on all or
some of the firmware components.
:param node: A node object of type dict.
:returns: Operation Status string.
:raises: SUMOperationError, when the vmedia device is not found or
when the mount operation fails or when the image validation fails.
:raises: IloConnectionError, when the iLO connection fails.
:raises: IloError, when vmedia eject or insert operation fails.
"""
sum_update_iso = node['clean_step']['args'].get('url')
# Validates the http image reference for SUM update ISO.
try:
utils.validate_href(sum_update_iso) # depends on [control=['try'], data=[]]
except exception.ImageRefValidationFailed as e:
raise exception.SUMOperationError(reason=e) # depends on [control=['except'], data=['e']]
# Ejects the CDROM device in the iLO and inserts the SUM update ISO
# to the CDROM device.
info = node.get('driver_info')
ilo_object = client.IloClient(info.get('ilo_address'), info.get('ilo_username'), info.get('ilo_password'))
ilo_object.eject_virtual_media('CDROM')
ilo_object.insert_virtual_media(sum_update_iso, 'CDROM')
# Waits for the OS to detect the disk and update the label file. SPP ISO
# is identified by matching its label.
time.sleep(WAIT_TIME_DISK_LABEL_TO_BE_VISIBLE)
vmedia_device_dir = '/dev/disk/by-label/'
for file in os.listdir(vmedia_device_dir):
if fnmatch.fnmatch(file, 'SPP*'):
vmedia_device_file = os.path.join(vmedia_device_dir, file) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['file']]
if not os.path.exists(vmedia_device_file):
msg = 'Unable to find the virtual media device for SUM'
raise exception.SUMOperationError(reason=msg) # depends on [control=['if'], data=[]]
# Validates the SPP ISO image for any file corruption using the checksum
# of the ISO file.
expected_checksum = node['clean_step']['args'].get('checksum')
try:
utils.verify_image_checksum(vmedia_device_file, expected_checksum) # depends on [control=['try'], data=[]]
except exception.ImageRefValidationFailed as e:
raise exception.SUMOperationError(reason=e) # depends on [control=['except'], data=['e']]
# Mounts SPP ISO on a temporary directory.
vmedia_mount_point = tempfile.mkdtemp()
try:
try:
processutils.execute('mount', vmedia_device_file, vmedia_mount_point) # depends on [control=['try'], data=[]]
except processutils.ProcessExecutionError as e:
msg = 'Unable to mount virtual media device %(device)s: %(error)s' % {'device': vmedia_device_file, 'error': e}
raise exception.SUMOperationError(reason=msg) # depends on [control=['except'], data=['e']]
# Executes the SUM based firmware update by passing the 'smartupdate'
# executable path if exists else 'hpsum' executable path and the
# components specified (if any).
sum_file_path = os.path.join(vmedia_mount_point, SUM_LOCATION)
if not os.path.exists(sum_file_path):
sum_file_path = os.path.join(vmedia_mount_point, HPSUM_LOCATION) # depends on [control=['if'], data=[]]
components = node['clean_step']['args'].get('components')
result = _execute_sum(sum_file_path, vmedia_mount_point, components=components)
processutils.trycmd('umount', vmedia_mount_point) # depends on [control=['try'], data=[]]
finally:
shutil.rmtree(vmedia_mount_point, ignore_errors=True)
return result |
def list_(formatter, value, name, option, format):
"""Repeats the items of an array.
Spec: `{:[l[ist]:]item|spacer[|final_spacer[|two_spacer]]}`
Example::
>>> fruits = [u'apple', u'banana', u'coconut']
>>> smart.format(u'{fruits:list:{}|, |, and | and }', fruits=fruits)
u'apple, banana, and coconut'
>>> smart.format(u'{fruits:list:{}|, |, and | and }', fruits=fruits[:2])
u'apple and banana'
"""
if not format:
return
if not hasattr(value, '__getitem__') or isinstance(value, string_types):
return
words = format.split(u'|', 4)
num_words = len(words)
if num_words < 2:
# Require at least two words for item format and spacer.
return
num_items = len(value)
item_format = words[0]
# NOTE: SmartFormat.NET treats a not nested item format as the format
# string to format each items. For example, `x` will be treated as `{:x}`.
# But the original tells us this behavior has been deprecated so that
# should be removed. So SmartFormat for Python doesn't implement the
# behavior.
spacer = u'' if num_words < 2 else words[1]
final_spacer = spacer if num_words < 3 else words[2]
two_spacer = final_spacer if num_words < 4 else words[3]
buf = io.StringIO()
for x, item in enumerate(value):
if x == 0:
pass
elif x < num_items - 1:
buf.write(spacer)
elif x == 1:
buf.write(two_spacer)
else:
buf.write(final_spacer)
buf.write(formatter.format(item_format, item, index=x))
return buf.getvalue() | def function[list_, parameter[formatter, value, name, option, format]]:
constant[Repeats the items of an array.
Spec: `{:[l[ist]:]item|spacer[|final_spacer[|two_spacer]]}`
Example::
>>> fruits = [u'apple', u'banana', u'coconut']
>>> smart.format(u'{fruits:list:{}|, |, and | and }', fruits=fruits)
u'apple, banana, and coconut'
>>> smart.format(u'{fruits:list:{}|, |, and | and }', fruits=fruits[:2])
u'apple and banana'
]
if <ast.UnaryOp object at 0x7da2044c3400> begin[:]
return[None]
if <ast.BoolOp object at 0x7da2044c2f80> begin[:]
return[None]
variable[words] assign[=] call[name[format].split, parameter[constant[|], constant[4]]]
variable[num_words] assign[=] call[name[len], parameter[name[words]]]
if compare[name[num_words] less[<] constant[2]] begin[:]
return[None]
variable[num_items] assign[=] call[name[len], parameter[name[value]]]
variable[item_format] assign[=] call[name[words]][constant[0]]
variable[spacer] assign[=] <ast.IfExp object at 0x7da2044c19f0>
variable[final_spacer] assign[=] <ast.IfExp object at 0x7da2044c39a0>
variable[two_spacer] assign[=] <ast.IfExp object at 0x7da2044c1f30>
variable[buf] assign[=] call[name[io].StringIO, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da2044c2050>, <ast.Name object at 0x7da2044c0460>]]] in starred[call[name[enumerate], parameter[name[value]]]] begin[:]
if compare[name[x] equal[==] constant[0]] begin[:]
pass
call[name[buf].write, parameter[call[name[formatter].format, parameter[name[item_format], name[item]]]]]
return[call[name[buf].getvalue, parameter[]]] | keyword[def] identifier[list_] ( identifier[formatter] , identifier[value] , identifier[name] , identifier[option] , identifier[format] ):
literal[string]
keyword[if] keyword[not] identifier[format] :
keyword[return]
keyword[if] keyword[not] identifier[hasattr] ( identifier[value] , literal[string] ) keyword[or] identifier[isinstance] ( identifier[value] , identifier[string_types] ):
keyword[return]
identifier[words] = identifier[format] . identifier[split] ( literal[string] , literal[int] )
identifier[num_words] = identifier[len] ( identifier[words] )
keyword[if] identifier[num_words] < literal[int] :
keyword[return]
identifier[num_items] = identifier[len] ( identifier[value] )
identifier[item_format] = identifier[words] [ literal[int] ]
identifier[spacer] = literal[string] keyword[if] identifier[num_words] < literal[int] keyword[else] identifier[words] [ literal[int] ]
identifier[final_spacer] = identifier[spacer] keyword[if] identifier[num_words] < literal[int] keyword[else] identifier[words] [ literal[int] ]
identifier[two_spacer] = identifier[final_spacer] keyword[if] identifier[num_words] < literal[int] keyword[else] identifier[words] [ literal[int] ]
identifier[buf] = identifier[io] . identifier[StringIO] ()
keyword[for] identifier[x] , identifier[item] keyword[in] identifier[enumerate] ( identifier[value] ):
keyword[if] identifier[x] == literal[int] :
keyword[pass]
keyword[elif] identifier[x] < identifier[num_items] - literal[int] :
identifier[buf] . identifier[write] ( identifier[spacer] )
keyword[elif] identifier[x] == literal[int] :
identifier[buf] . identifier[write] ( identifier[two_spacer] )
keyword[else] :
identifier[buf] . identifier[write] ( identifier[final_spacer] )
identifier[buf] . identifier[write] ( identifier[formatter] . identifier[format] ( identifier[item_format] , identifier[item] , identifier[index] = identifier[x] ))
keyword[return] identifier[buf] . identifier[getvalue] () | def list_(formatter, value, name, option, format):
"""Repeats the items of an array.
Spec: `{:[l[ist]:]item|spacer[|final_spacer[|two_spacer]]}`
Example::
>>> fruits = [u'apple', u'banana', u'coconut']
>>> smart.format(u'{fruits:list:{}|, |, and | and }', fruits=fruits)
u'apple, banana, and coconut'
>>> smart.format(u'{fruits:list:{}|, |, and | and }', fruits=fruits[:2])
u'apple and banana'
"""
if not format:
return # depends on [control=['if'], data=[]]
if not hasattr(value, '__getitem__') or isinstance(value, string_types):
return # depends on [control=['if'], data=[]]
words = format.split(u'|', 4)
num_words = len(words)
if num_words < 2:
# Require at least two words for item format and spacer.
return # depends on [control=['if'], data=[]]
num_items = len(value)
item_format = words[0]
# NOTE: SmartFormat.NET treats a not nested item format as the format
# string to format each items. For example, `x` will be treated as `{:x}`.
# But the original tells us this behavior has been deprecated so that
# should be removed. So SmartFormat for Python doesn't implement the
# behavior.
spacer = u'' if num_words < 2 else words[1]
final_spacer = spacer if num_words < 3 else words[2]
two_spacer = final_spacer if num_words < 4 else words[3]
buf = io.StringIO()
for (x, item) in enumerate(value):
if x == 0:
pass # depends on [control=['if'], data=[]]
elif x < num_items - 1:
buf.write(spacer) # depends on [control=['if'], data=[]]
elif x == 1:
buf.write(two_spacer) # depends on [control=['if'], data=[]]
else:
buf.write(final_spacer)
buf.write(formatter.format(item_format, item, index=x)) # depends on [control=['for'], data=[]]
return buf.getvalue() |
def sample(self, frame):
"""Samples the given frame."""
frames = self.frame_stack(frame)
if frames:
frames.pop()
parent_stats = self.stats
for f in frames:
parent_stats = parent_stats.ensure_child(f.f_code, void)
stats = parent_stats.ensure_child(frame.f_code, RecordingStatistics)
stats.own_hits += 1 | def function[sample, parameter[self, frame]]:
constant[Samples the given frame.]
variable[frames] assign[=] call[name[self].frame_stack, parameter[name[frame]]]
if name[frames] begin[:]
call[name[frames].pop, parameter[]]
variable[parent_stats] assign[=] name[self].stats
for taget[name[f]] in starred[name[frames]] begin[:]
variable[parent_stats] assign[=] call[name[parent_stats].ensure_child, parameter[name[f].f_code, name[void]]]
variable[stats] assign[=] call[name[parent_stats].ensure_child, parameter[name[frame].f_code, name[RecordingStatistics]]]
<ast.AugAssign object at 0x7da1b12a84c0> | keyword[def] identifier[sample] ( identifier[self] , identifier[frame] ):
literal[string]
identifier[frames] = identifier[self] . identifier[frame_stack] ( identifier[frame] )
keyword[if] identifier[frames] :
identifier[frames] . identifier[pop] ()
identifier[parent_stats] = identifier[self] . identifier[stats]
keyword[for] identifier[f] keyword[in] identifier[frames] :
identifier[parent_stats] = identifier[parent_stats] . identifier[ensure_child] ( identifier[f] . identifier[f_code] , identifier[void] )
identifier[stats] = identifier[parent_stats] . identifier[ensure_child] ( identifier[frame] . identifier[f_code] , identifier[RecordingStatistics] )
identifier[stats] . identifier[own_hits] += literal[int] | def sample(self, frame):
"""Samples the given frame."""
frames = self.frame_stack(frame)
if frames:
frames.pop() # depends on [control=['if'], data=[]]
parent_stats = self.stats
for f in frames:
parent_stats = parent_stats.ensure_child(f.f_code, void) # depends on [control=['for'], data=['f']]
stats = parent_stats.ensure_child(frame.f_code, RecordingStatistics)
stats.own_hits += 1 |
def render(self, name, value, attrs=None, renderer=None):
"""
Render the default widget and initialize select2.
"""
output = [super(TagAutoComplete, self).render(name, value, attrs)]
output.append('<script type="text/javascript">')
output.append('(function($) {')
output.append(' $(document).ready(function() {')
output.append(' $("#id_%s").select2({' % name)
output.append(' width: "element",')
output.append(' maximumInputLength: 50,')
output.append(' tokenSeparators: [",", " "],')
output.append(' tags: %s' % json.dumps(self.get_tags()))
output.append(' });')
output.append(' });')
output.append('}(django.jQuery));')
output.append('</script>')
return mark_safe('\n'.join(output)) | def function[render, parameter[self, name, value, attrs, renderer]]:
constant[
Render the default widget and initialize select2.
]
variable[output] assign[=] list[[<ast.Call object at 0x7da1b1d766b0>]]
call[name[output].append, parameter[constant[<script type="text/javascript">]]]
call[name[output].append, parameter[constant[(function($) {]]]
call[name[output].append, parameter[constant[ $(document).ready(function() {]]]
call[name[output].append, parameter[binary_operation[constant[ $("#id_%s").select2({] <ast.Mod object at 0x7da2590d6920> name[name]]]]
call[name[output].append, parameter[constant[ width: "element",]]]
call[name[output].append, parameter[constant[ maximumInputLength: 50,]]]
call[name[output].append, parameter[constant[ tokenSeparators: [",", " "],]]]
call[name[output].append, parameter[binary_operation[constant[ tags: %s] <ast.Mod object at 0x7da2590d6920> call[name[json].dumps, parameter[call[name[self].get_tags, parameter[]]]]]]]
call[name[output].append, parameter[constant[ });]]]
call[name[output].append, parameter[constant[ });]]]
call[name[output].append, parameter[constant[}(django.jQuery));]]]
call[name[output].append, parameter[constant[</script>]]]
return[call[name[mark_safe], parameter[call[constant[
].join, parameter[name[output]]]]]] | keyword[def] identifier[render] ( identifier[self] , identifier[name] , identifier[value] , identifier[attrs] = keyword[None] , identifier[renderer] = keyword[None] ):
literal[string]
identifier[output] =[ identifier[super] ( identifier[TagAutoComplete] , identifier[self] ). identifier[render] ( identifier[name] , identifier[value] , identifier[attrs] )]
identifier[output] . identifier[append] ( literal[string] )
identifier[output] . identifier[append] ( literal[string] )
identifier[output] . identifier[append] ( literal[string] )
identifier[output] . identifier[append] ( literal[string] % identifier[name] )
identifier[output] . identifier[append] ( literal[string] )
identifier[output] . identifier[append] ( literal[string] )
identifier[output] . identifier[append] ( literal[string] )
identifier[output] . identifier[append] ( literal[string] % identifier[json] . identifier[dumps] ( identifier[self] . identifier[get_tags] ()))
identifier[output] . identifier[append] ( literal[string] )
identifier[output] . identifier[append] ( literal[string] )
identifier[output] . identifier[append] ( literal[string] )
identifier[output] . identifier[append] ( literal[string] )
keyword[return] identifier[mark_safe] ( literal[string] . identifier[join] ( identifier[output] )) | def render(self, name, value, attrs=None, renderer=None):
"""
Render the default widget and initialize select2.
"""
output = [super(TagAutoComplete, self).render(name, value, attrs)]
output.append('<script type="text/javascript">')
output.append('(function($) {')
output.append(' $(document).ready(function() {')
output.append(' $("#id_%s").select2({' % name)
output.append(' width: "element",')
output.append(' maximumInputLength: 50,')
output.append(' tokenSeparators: [",", " "],')
output.append(' tags: %s' % json.dumps(self.get_tags()))
output.append(' });')
output.append(' });')
output.append('}(django.jQuery));')
output.append('</script>')
return mark_safe('\n'.join(output)) |
def get_blockdata(self, x, z):
"""
Return the decompressed binary data representing a chunk.
May raise a RegionFileFormatError().
If decompression of the data succeeds, all available data is returned,
even if it is shorter than what is specified in the header (e.g. in case
of a truncated while and non-compressed data).
"""
# read metadata block
m = self.metadata[x, z]
if m.status == STATUS_CHUNK_NOT_CREATED:
raise InconceivedChunk("Chunk %d,%d is not present in region" % (x,z))
elif m.status == STATUS_CHUNK_IN_HEADER:
raise RegionHeaderError('Chunk %d,%d is in the region header' % (x,z))
elif m.status == STATUS_CHUNK_OUT_OF_FILE and (m.length <= 1 or m.compression == None):
# Chunk header is outside of the file.
raise RegionHeaderError('Chunk %d,%d is partially/completely outside the file' % (x,z))
elif m.status == STATUS_CHUNK_ZERO_LENGTH:
if m.blocklength == 0:
raise RegionHeaderError('Chunk %d,%d has zero length' % (x,z))
else:
raise ChunkHeaderError('Chunk %d,%d has zero length' % (x,z))
elif m.blockstart * SECTOR_LENGTH + 5 >= self.size:
raise RegionHeaderError('Chunk %d,%d is partially/completely outside the file' % (x,z))
# status is STATUS_CHUNK_OK, STATUS_CHUNK_MISMATCHED_LENGTHS, STATUS_CHUNK_OVERLAPPING
# or STATUS_CHUNK_OUT_OF_FILE.
# The chunk is always read, but in case of an error, the exception may be different
# based on the status.
err = None
try:
# offset comes in sectors of 4096 bytes + length bytes + compression byte
self.file.seek(m.blockstart * SECTOR_LENGTH + 5)
# Do not read past the length of the file.
# The length in the file includes the compression byte, hence the -1.
length = min(m.length - 1, self.size - (m.blockstart * SECTOR_LENGTH + 5))
chunk = self.file.read(length)
if (m.compression == COMPRESSION_GZIP):
# Python 3.1 and earlier do not yet support gzip.decompress(chunk)
f = gzip.GzipFile(fileobj=BytesIO(chunk))
chunk = bytes(f.read())
f.close()
elif (m.compression == COMPRESSION_ZLIB):
chunk = zlib.decompress(chunk)
elif m.compression != COMPRESSION_NONE:
raise ChunkDataError('Unknown chunk compression/format (%s)' % m.compression)
return chunk
except RegionFileFormatError:
raise
except Exception as e:
# Deliberately catch the Exception and re-raise.
# The details in gzip/zlib/nbt are irrelevant, just that the data is garbled.
err = '%s' % e # avoid str(e) due to Unicode issues in Python 2.
if err:
# don't raise during exception handling to avoid the warning
# "During handling of the above exception, another exception occurred".
# Python 3.3 solution (see PEP 409 & 415): "raise ChunkDataError(str(e)) from None"
if m.status == STATUS_CHUNK_MISMATCHED_LENGTHS:
raise ChunkHeaderError('The length in region header and the length in the header of chunk %d,%d are incompatible' % (x,z))
elif m.status == STATUS_CHUNK_OVERLAPPING:
raise ChunkHeaderError('Chunk %d,%d is overlapping with another chunk' % (x,z))
else:
raise ChunkDataError(err) | def function[get_blockdata, parameter[self, x, z]]:
constant[
Return the decompressed binary data representing a chunk.
May raise a RegionFileFormatError().
If decompression of the data succeeds, all available data is returned,
even if it is shorter than what is specified in the header (e.g. in case
of a truncated while and non-compressed data).
]
variable[m] assign[=] call[name[self].metadata][tuple[[<ast.Name object at 0x7da2101f50c0>, <ast.Name object at 0x7da2101f4f40>]]]
if compare[name[m].status equal[==] name[STATUS_CHUNK_NOT_CREATED]] begin[:]
<ast.Raise object at 0x7da204961e40>
variable[err] assign[=] constant[None]
<ast.Try object at 0x7da204961600>
if name[err] begin[:]
if compare[name[m].status equal[==] name[STATUS_CHUNK_MISMATCHED_LENGTHS]] begin[:]
<ast.Raise object at 0x7da18eb57bb0> | keyword[def] identifier[get_blockdata] ( identifier[self] , identifier[x] , identifier[z] ):
literal[string]
identifier[m] = identifier[self] . identifier[metadata] [ identifier[x] , identifier[z] ]
keyword[if] identifier[m] . identifier[status] == identifier[STATUS_CHUNK_NOT_CREATED] :
keyword[raise] identifier[InconceivedChunk] ( literal[string] %( identifier[x] , identifier[z] ))
keyword[elif] identifier[m] . identifier[status] == identifier[STATUS_CHUNK_IN_HEADER] :
keyword[raise] identifier[RegionHeaderError] ( literal[string] %( identifier[x] , identifier[z] ))
keyword[elif] identifier[m] . identifier[status] == identifier[STATUS_CHUNK_OUT_OF_FILE] keyword[and] ( identifier[m] . identifier[length] <= literal[int] keyword[or] identifier[m] . identifier[compression] == keyword[None] ):
keyword[raise] identifier[RegionHeaderError] ( literal[string] %( identifier[x] , identifier[z] ))
keyword[elif] identifier[m] . identifier[status] == identifier[STATUS_CHUNK_ZERO_LENGTH] :
keyword[if] identifier[m] . identifier[blocklength] == literal[int] :
keyword[raise] identifier[RegionHeaderError] ( literal[string] %( identifier[x] , identifier[z] ))
keyword[else] :
keyword[raise] identifier[ChunkHeaderError] ( literal[string] %( identifier[x] , identifier[z] ))
keyword[elif] identifier[m] . identifier[blockstart] * identifier[SECTOR_LENGTH] + literal[int] >= identifier[self] . identifier[size] :
keyword[raise] identifier[RegionHeaderError] ( literal[string] %( identifier[x] , identifier[z] ))
identifier[err] = keyword[None]
keyword[try] :
identifier[self] . identifier[file] . identifier[seek] ( identifier[m] . identifier[blockstart] * identifier[SECTOR_LENGTH] + literal[int] )
identifier[length] = identifier[min] ( identifier[m] . identifier[length] - literal[int] , identifier[self] . identifier[size] -( identifier[m] . identifier[blockstart] * identifier[SECTOR_LENGTH] + literal[int] ))
identifier[chunk] = identifier[self] . identifier[file] . identifier[read] ( identifier[length] )
keyword[if] ( identifier[m] . identifier[compression] == identifier[COMPRESSION_GZIP] ):
identifier[f] = identifier[gzip] . identifier[GzipFile] ( identifier[fileobj] = identifier[BytesIO] ( identifier[chunk] ))
identifier[chunk] = identifier[bytes] ( identifier[f] . identifier[read] ())
identifier[f] . identifier[close] ()
keyword[elif] ( identifier[m] . identifier[compression] == identifier[COMPRESSION_ZLIB] ):
identifier[chunk] = identifier[zlib] . identifier[decompress] ( identifier[chunk] )
keyword[elif] identifier[m] . identifier[compression] != identifier[COMPRESSION_NONE] :
keyword[raise] identifier[ChunkDataError] ( literal[string] % identifier[m] . identifier[compression] )
keyword[return] identifier[chunk]
keyword[except] identifier[RegionFileFormatError] :
keyword[raise]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[err] = literal[string] % identifier[e]
keyword[if] identifier[err] :
keyword[if] identifier[m] . identifier[status] == identifier[STATUS_CHUNK_MISMATCHED_LENGTHS] :
keyword[raise] identifier[ChunkHeaderError] ( literal[string] %( identifier[x] , identifier[z] ))
keyword[elif] identifier[m] . identifier[status] == identifier[STATUS_CHUNK_OVERLAPPING] :
keyword[raise] identifier[ChunkHeaderError] ( literal[string] %( identifier[x] , identifier[z] ))
keyword[else] :
keyword[raise] identifier[ChunkDataError] ( identifier[err] ) | def get_blockdata(self, x, z):
"""
Return the decompressed binary data representing a chunk.
May raise a RegionFileFormatError().
If decompression of the data succeeds, all available data is returned,
even if it is shorter than what is specified in the header (e.g. in case
of a truncated while and non-compressed data).
"""
# read metadata block
m = self.metadata[x, z]
if m.status == STATUS_CHUNK_NOT_CREATED:
raise InconceivedChunk('Chunk %d,%d is not present in region' % (x, z)) # depends on [control=['if'], data=[]]
elif m.status == STATUS_CHUNK_IN_HEADER:
raise RegionHeaderError('Chunk %d,%d is in the region header' % (x, z)) # depends on [control=['if'], data=[]]
elif m.status == STATUS_CHUNK_OUT_OF_FILE and (m.length <= 1 or m.compression == None):
# Chunk header is outside of the file.
raise RegionHeaderError('Chunk %d,%d is partially/completely outside the file' % (x, z)) # depends on [control=['if'], data=[]]
elif m.status == STATUS_CHUNK_ZERO_LENGTH:
if m.blocklength == 0:
raise RegionHeaderError('Chunk %d,%d has zero length' % (x, z)) # depends on [control=['if'], data=[]]
else:
raise ChunkHeaderError('Chunk %d,%d has zero length' % (x, z)) # depends on [control=['if'], data=[]]
elif m.blockstart * SECTOR_LENGTH + 5 >= self.size:
raise RegionHeaderError('Chunk %d,%d is partially/completely outside the file' % (x, z)) # depends on [control=['if'], data=[]]
# status is STATUS_CHUNK_OK, STATUS_CHUNK_MISMATCHED_LENGTHS, STATUS_CHUNK_OVERLAPPING
# or STATUS_CHUNK_OUT_OF_FILE.
# The chunk is always read, but in case of an error, the exception may be different
# based on the status.
err = None
try:
# offset comes in sectors of 4096 bytes + length bytes + compression byte
self.file.seek(m.blockstart * SECTOR_LENGTH + 5)
# Do not read past the length of the file.
# The length in the file includes the compression byte, hence the -1.
length = min(m.length - 1, self.size - (m.blockstart * SECTOR_LENGTH + 5))
chunk = self.file.read(length)
if m.compression == COMPRESSION_GZIP:
# Python 3.1 and earlier do not yet support gzip.decompress(chunk)
f = gzip.GzipFile(fileobj=BytesIO(chunk))
chunk = bytes(f.read())
f.close() # depends on [control=['if'], data=[]]
elif m.compression == COMPRESSION_ZLIB:
chunk = zlib.decompress(chunk) # depends on [control=['if'], data=[]]
elif m.compression != COMPRESSION_NONE:
raise ChunkDataError('Unknown chunk compression/format (%s)' % m.compression) # depends on [control=['if'], data=[]]
return chunk # depends on [control=['try'], data=[]]
except RegionFileFormatError:
raise # depends on [control=['except'], data=[]]
except Exception as e:
# Deliberately catch the Exception and re-raise.
# The details in gzip/zlib/nbt are irrelevant, just that the data is garbled.
err = '%s' % e # avoid str(e) due to Unicode issues in Python 2. # depends on [control=['except'], data=['e']]
if err: # don't raise during exception handling to avoid the warning
# "During handling of the above exception, another exception occurred".
# Python 3.3 solution (see PEP 409 & 415): "raise ChunkDataError(str(e)) from None"
if m.status == STATUS_CHUNK_MISMATCHED_LENGTHS:
raise ChunkHeaderError('The length in region header and the length in the header of chunk %d,%d are incompatible' % (x, z)) # depends on [control=['if'], data=[]]
elif m.status == STATUS_CHUNK_OVERLAPPING:
raise ChunkHeaderError('Chunk %d,%d is overlapping with another chunk' % (x, z)) # depends on [control=['if'], data=[]]
else:
raise ChunkDataError(err) # depends on [control=['if'], data=[]] |
def get_interpolated_value(self, energy, integrated=False):
"""
Returns the COHP for a particular energy.
Args:
energy: Energy to return the COHP value for.
"""
inter = {}
for spin in self.cohp:
if not integrated:
inter[spin] = get_linear_interpolated_value(self.energies,
self.cohp[spin],
energy)
elif self.icohp is not None:
inter[spin] = get_linear_interpolated_value(self.energies,
self.icohp[spin],
energy)
else:
raise ValueError("ICOHP is empty.")
return inter | def function[get_interpolated_value, parameter[self, energy, integrated]]:
constant[
Returns the COHP for a particular energy.
Args:
energy: Energy to return the COHP value for.
]
variable[inter] assign[=] dictionary[[], []]
for taget[name[spin]] in starred[name[self].cohp] begin[:]
if <ast.UnaryOp object at 0x7da1b21ae7d0> begin[:]
call[name[inter]][name[spin]] assign[=] call[name[get_linear_interpolated_value], parameter[name[self].energies, call[name[self].cohp][name[spin]], name[energy]]]
return[name[inter]] | keyword[def] identifier[get_interpolated_value] ( identifier[self] , identifier[energy] , identifier[integrated] = keyword[False] ):
literal[string]
identifier[inter] ={}
keyword[for] identifier[spin] keyword[in] identifier[self] . identifier[cohp] :
keyword[if] keyword[not] identifier[integrated] :
identifier[inter] [ identifier[spin] ]= identifier[get_linear_interpolated_value] ( identifier[self] . identifier[energies] ,
identifier[self] . identifier[cohp] [ identifier[spin] ],
identifier[energy] )
keyword[elif] identifier[self] . identifier[icohp] keyword[is] keyword[not] keyword[None] :
identifier[inter] [ identifier[spin] ]= identifier[get_linear_interpolated_value] ( identifier[self] . identifier[energies] ,
identifier[self] . identifier[icohp] [ identifier[spin] ],
identifier[energy] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] identifier[inter] | def get_interpolated_value(self, energy, integrated=False):
"""
Returns the COHP for a particular energy.
Args:
energy: Energy to return the COHP value for.
"""
inter = {}
for spin in self.cohp:
if not integrated:
inter[spin] = get_linear_interpolated_value(self.energies, self.cohp[spin], energy) # depends on [control=['if'], data=[]]
elif self.icohp is not None:
inter[spin] = get_linear_interpolated_value(self.energies, self.icohp[spin], energy) # depends on [control=['if'], data=[]]
else:
raise ValueError('ICOHP is empty.') # depends on [control=['for'], data=['spin']]
return inter |
def get_items_from_uuid(uuid, enrich_backend, ocean_backend):
""" Get all items that include uuid """
# logger.debug("Getting items for merged uuid %s " % (uuid))
uuid_fields = enrich_backend.get_fields_uuid()
terms = "" # all terms with uuids in the enriched item
for field in uuid_fields:
terms += """
{"term": {
"%s": {
"value": "%s"
}
}}
""" % (field, uuid)
terms += ","
terms = terms[:-1] # remove last , for last item
query = """
{"query": { "bool": { "should": [%s] }}}
""" % (terms)
url_search = enrich_backend.elastic.index_url + "/_search"
url_search += "?size=1000" # TODO get all items
r = requests_ses.post(url_search, data=query)
eitems = r.json()['hits']['hits']
if len(eitems) == 0:
# logger.warning("No enriched items found for uuid: %s " % (uuid))
return []
items_ids = []
for eitem in eitems:
item_id = enrich_backend.get_item_id(eitem)
# For one item several eitems could be generated
if item_id not in items_ids:
items_ids.append(item_id)
# Time to get the items
logger.debug("Items to be renriched for merged uuids: %s" % (",".join(items_ids)))
url_mget = ocean_backend.elastic.index_url + "/_mget"
items_ids_query = ""
for item_id in items_ids:
items_ids_query += '{"_id" : "%s"}' % (item_id)
items_ids_query += ","
items_ids_query = items_ids_query[:-1] # remove last , for last item
query = '{"docs" : [%s]}' % (items_ids_query)
r = requests_ses.post(url_mget, data=query)
res_items = r.json()['docs']
items = []
for res_item in res_items:
if res_item['found']:
items.append(res_item["_source"])
return items | def function[get_items_from_uuid, parameter[uuid, enrich_backend, ocean_backend]]:
constant[ Get all items that include uuid ]
variable[uuid_fields] assign[=] call[name[enrich_backend].get_fields_uuid, parameter[]]
variable[terms] assign[=] constant[]
for taget[name[field]] in starred[name[uuid_fields]] begin[:]
<ast.AugAssign object at 0x7da1b0f07940>
<ast.AugAssign object at 0x7da1b0f045b0>
variable[terms] assign[=] call[name[terms]][<ast.Slice object at 0x7da1b0f065c0>]
variable[query] assign[=] binary_operation[constant[
{"query": { "bool": { "should": [%s] }}}
] <ast.Mod object at 0x7da2590d6920> name[terms]]
variable[url_search] assign[=] binary_operation[name[enrich_backend].elastic.index_url + constant[/_search]]
<ast.AugAssign object at 0x7da1b0f07e50>
variable[r] assign[=] call[name[requests_ses].post, parameter[name[url_search]]]
variable[eitems] assign[=] call[call[call[name[r].json, parameter[]]][constant[hits]]][constant[hits]]
if compare[call[name[len], parameter[name[eitems]]] equal[==] constant[0]] begin[:]
return[list[[]]]
variable[items_ids] assign[=] list[[]]
for taget[name[eitem]] in starred[name[eitems]] begin[:]
variable[item_id] assign[=] call[name[enrich_backend].get_item_id, parameter[name[eitem]]]
if compare[name[item_id] <ast.NotIn object at 0x7da2590d7190> name[items_ids]] begin[:]
call[name[items_ids].append, parameter[name[item_id]]]
call[name[logger].debug, parameter[binary_operation[constant[Items to be renriched for merged uuids: %s] <ast.Mod object at 0x7da2590d6920> call[constant[,].join, parameter[name[items_ids]]]]]]
variable[url_mget] assign[=] binary_operation[name[ocean_backend].elastic.index_url + constant[/_mget]]
variable[items_ids_query] assign[=] constant[]
for taget[name[item_id]] in starred[name[items_ids]] begin[:]
<ast.AugAssign object at 0x7da1b0f04790>
<ast.AugAssign object at 0x7da1b0f049a0>
variable[items_ids_query] assign[=] call[name[items_ids_query]][<ast.Slice object at 0x7da1b0f07e20>]
variable[query] assign[=] binary_operation[constant[{"docs" : [%s]}] <ast.Mod object at 0x7da2590d6920> name[items_ids_query]]
variable[r] assign[=] call[name[requests_ses].post, parameter[name[url_mget]]]
variable[res_items] assign[=] call[call[name[r].json, parameter[]]][constant[docs]]
variable[items] assign[=] list[[]]
for taget[name[res_item]] in starred[name[res_items]] begin[:]
if call[name[res_item]][constant[found]] begin[:]
call[name[items].append, parameter[call[name[res_item]][constant[_source]]]]
return[name[items]] | keyword[def] identifier[get_items_from_uuid] ( identifier[uuid] , identifier[enrich_backend] , identifier[ocean_backend] ):
literal[string]
identifier[uuid_fields] = identifier[enrich_backend] . identifier[get_fields_uuid] ()
identifier[terms] = literal[string]
keyword[for] identifier[field] keyword[in] identifier[uuid_fields] :
identifier[terms] += literal[string] %( identifier[field] , identifier[uuid] )
identifier[terms] += literal[string]
identifier[terms] = identifier[terms] [:- literal[int] ]
identifier[query] = literal[string] %( identifier[terms] )
identifier[url_search] = identifier[enrich_backend] . identifier[elastic] . identifier[index_url] + literal[string]
identifier[url_search] += literal[string]
identifier[r] = identifier[requests_ses] . identifier[post] ( identifier[url_search] , identifier[data] = identifier[query] )
identifier[eitems] = identifier[r] . identifier[json] ()[ literal[string] ][ literal[string] ]
keyword[if] identifier[len] ( identifier[eitems] )== literal[int] :
keyword[return] []
identifier[items_ids] =[]
keyword[for] identifier[eitem] keyword[in] identifier[eitems] :
identifier[item_id] = identifier[enrich_backend] . identifier[get_item_id] ( identifier[eitem] )
keyword[if] identifier[item_id] keyword[not] keyword[in] identifier[items_ids] :
identifier[items_ids] . identifier[append] ( identifier[item_id] )
identifier[logger] . identifier[debug] ( literal[string] %( literal[string] . identifier[join] ( identifier[items_ids] )))
identifier[url_mget] = identifier[ocean_backend] . identifier[elastic] . identifier[index_url] + literal[string]
identifier[items_ids_query] = literal[string]
keyword[for] identifier[item_id] keyword[in] identifier[items_ids] :
identifier[items_ids_query] += literal[string] %( identifier[item_id] )
identifier[items_ids_query] += literal[string]
identifier[items_ids_query] = identifier[items_ids_query] [:- literal[int] ]
identifier[query] = literal[string] %( identifier[items_ids_query] )
identifier[r] = identifier[requests_ses] . identifier[post] ( identifier[url_mget] , identifier[data] = identifier[query] )
identifier[res_items] = identifier[r] . identifier[json] ()[ literal[string] ]
identifier[items] =[]
keyword[for] identifier[res_item] keyword[in] identifier[res_items] :
keyword[if] identifier[res_item] [ literal[string] ]:
identifier[items] . identifier[append] ( identifier[res_item] [ literal[string] ])
keyword[return] identifier[items] | def get_items_from_uuid(uuid, enrich_backend, ocean_backend):
""" Get all items that include uuid """
# logger.debug("Getting items for merged uuid %s " % (uuid))
uuid_fields = enrich_backend.get_fields_uuid()
terms = '' # all terms with uuids in the enriched item
for field in uuid_fields:
terms += '\n {"term": {\n "%s": {\n "value": "%s"\n }\n }}\n ' % (field, uuid)
terms += ',' # depends on [control=['for'], data=['field']]
terms = terms[:-1] # remove last , for last item
query = '\n {"query": { "bool": { "should": [%s] }}}\n ' % terms
url_search = enrich_backend.elastic.index_url + '/_search'
url_search += '?size=1000' # TODO get all items
r = requests_ses.post(url_search, data=query)
eitems = r.json()['hits']['hits']
if len(eitems) == 0:
# logger.warning("No enriched items found for uuid: %s " % (uuid))
return [] # depends on [control=['if'], data=[]]
items_ids = []
for eitem in eitems:
item_id = enrich_backend.get_item_id(eitem)
# For one item several eitems could be generated
if item_id not in items_ids:
items_ids.append(item_id) # depends on [control=['if'], data=['item_id', 'items_ids']] # depends on [control=['for'], data=['eitem']]
# Time to get the items
logger.debug('Items to be renriched for merged uuids: %s' % ','.join(items_ids))
url_mget = ocean_backend.elastic.index_url + '/_mget'
items_ids_query = ''
for item_id in items_ids:
items_ids_query += '{"_id" : "%s"}' % item_id
items_ids_query += ',' # depends on [control=['for'], data=['item_id']]
items_ids_query = items_ids_query[:-1] # remove last , for last item
query = '{"docs" : [%s]}' % items_ids_query
r = requests_ses.post(url_mget, data=query)
res_items = r.json()['docs']
items = []
for res_item in res_items:
if res_item['found']:
items.append(res_item['_source']) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['res_item']]
return items |
def getDMCparam(fn: Path, xyPix, xyBin,
FrameIndReq=None, ut1req=None, kineticsec=None, startUTC=None, nHeadBytes=4, verbose=0):
"""
nHeadBytes=4 for 2013-2016 data
nHeadBytes=0 for 2011 data
"""
Nmetadata = nHeadBytes // 2 # FIXME for DMCdata version 1 only
if not fn.is_file(): # leave this here, getsize() doesn't fail on directory
raise ValueError(f'{fn} is not a file!')
print(f'reading {fn}')
# int() in case we are fed a float or int
SuperX = int(xyPix[0] // xyBin[0])
SuperY = int(xyPix[1] // xyBin[1])
PixelsPerImage, BytesPerImage, BytesPerFrame = howbig(
SuperX, SuperY, nHeadBytes)
(firstRawInd, lastRawInd) = getRawInd(
fn, BytesPerImage, nHeadBytes, Nmetadata)
FrameIndRel = whichframes(fn, FrameIndReq, kineticsec, ut1req, startUTC, firstRawInd, lastRawInd,
BytesPerImage, BytesPerFrame, verbose)
return {'superx': SuperX, 'supery': SuperY, 'nmetadata': Nmetadata,
'bytesperframe': BytesPerFrame, 'pixelsperimage': PixelsPerImage,
'nframeextract': FrameIndRel.size,
'frameindrel': FrameIndRel} | def function[getDMCparam, parameter[fn, xyPix, xyBin, FrameIndReq, ut1req, kineticsec, startUTC, nHeadBytes, verbose]]:
constant[
nHeadBytes=4 for 2013-2016 data
nHeadBytes=0 for 2011 data
]
variable[Nmetadata] assign[=] binary_operation[name[nHeadBytes] <ast.FloorDiv object at 0x7da2590d6bc0> constant[2]]
if <ast.UnaryOp object at 0x7da20e963760> begin[:]
<ast.Raise object at 0x7da20e961600>
call[name[print], parameter[<ast.JoinedStr object at 0x7da20e960ee0>]]
variable[SuperX] assign[=] call[name[int], parameter[binary_operation[call[name[xyPix]][constant[0]] <ast.FloorDiv object at 0x7da2590d6bc0> call[name[xyBin]][constant[0]]]]]
variable[SuperY] assign[=] call[name[int], parameter[binary_operation[call[name[xyPix]][constant[1]] <ast.FloorDiv object at 0x7da2590d6bc0> call[name[xyBin]][constant[1]]]]]
<ast.Tuple object at 0x7da1b26ad5a0> assign[=] call[name[howbig], parameter[name[SuperX], name[SuperY], name[nHeadBytes]]]
<ast.Tuple object at 0x7da1b26aec20> assign[=] call[name[getRawInd], parameter[name[fn], name[BytesPerImage], name[nHeadBytes], name[Nmetadata]]]
variable[FrameIndRel] assign[=] call[name[whichframes], parameter[name[fn], name[FrameIndReq], name[kineticsec], name[ut1req], name[startUTC], name[firstRawInd], name[lastRawInd], name[BytesPerImage], name[BytesPerFrame], name[verbose]]]
return[dictionary[[<ast.Constant object at 0x7da1b26ac340>, <ast.Constant object at 0x7da1b26aed40>, <ast.Constant object at 0x7da1b26ad990>, <ast.Constant object at 0x7da1b26ae230>, <ast.Constant object at 0x7da1b26aefb0>, <ast.Constant object at 0x7da1b26ac820>, <ast.Constant object at 0x7da1b26af910>], [<ast.Name object at 0x7da1b26ad630>, <ast.Name object at 0x7da1b26ac2b0>, <ast.Name object at 0x7da1b26ae7a0>, <ast.Name object at 0x7da1b26ae920>, <ast.Name object at 0x7da1b26af100>, <ast.Attribute object at 0x7da1b26ada50>, <ast.Name object at 0x7da1b26aeb30>]]] | keyword[def] identifier[getDMCparam] ( identifier[fn] : identifier[Path] , identifier[xyPix] , identifier[xyBin] ,
identifier[FrameIndReq] = keyword[None] , identifier[ut1req] = keyword[None] , identifier[kineticsec] = keyword[None] , identifier[startUTC] = keyword[None] , identifier[nHeadBytes] = literal[int] , identifier[verbose] = literal[int] ):
literal[string]
identifier[Nmetadata] = identifier[nHeadBytes] // literal[int]
keyword[if] keyword[not] identifier[fn] . identifier[is_file] ():
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[print] ( literal[string] )
identifier[SuperX] = identifier[int] ( identifier[xyPix] [ literal[int] ]// identifier[xyBin] [ literal[int] ])
identifier[SuperY] = identifier[int] ( identifier[xyPix] [ literal[int] ]// identifier[xyBin] [ literal[int] ])
identifier[PixelsPerImage] , identifier[BytesPerImage] , identifier[BytesPerFrame] = identifier[howbig] (
identifier[SuperX] , identifier[SuperY] , identifier[nHeadBytes] )
( identifier[firstRawInd] , identifier[lastRawInd] )= identifier[getRawInd] (
identifier[fn] , identifier[BytesPerImage] , identifier[nHeadBytes] , identifier[Nmetadata] )
identifier[FrameIndRel] = identifier[whichframes] ( identifier[fn] , identifier[FrameIndReq] , identifier[kineticsec] , identifier[ut1req] , identifier[startUTC] , identifier[firstRawInd] , identifier[lastRawInd] ,
identifier[BytesPerImage] , identifier[BytesPerFrame] , identifier[verbose] )
keyword[return] { literal[string] : identifier[SuperX] , literal[string] : identifier[SuperY] , literal[string] : identifier[Nmetadata] ,
literal[string] : identifier[BytesPerFrame] , literal[string] : identifier[PixelsPerImage] ,
literal[string] : identifier[FrameIndRel] . identifier[size] ,
literal[string] : identifier[FrameIndRel] } | def getDMCparam(fn: Path, xyPix, xyBin, FrameIndReq=None, ut1req=None, kineticsec=None, startUTC=None, nHeadBytes=4, verbose=0):
"""
nHeadBytes=4 for 2013-2016 data
nHeadBytes=0 for 2011 data
"""
Nmetadata = nHeadBytes // 2 # FIXME for DMCdata version 1 only
if not fn.is_file(): # leave this here, getsize() doesn't fail on directory
raise ValueError(f'{fn} is not a file!') # depends on [control=['if'], data=[]]
print(f'reading {fn}')
# int() in case we are fed a float or int
SuperX = int(xyPix[0] // xyBin[0])
SuperY = int(xyPix[1] // xyBin[1])
(PixelsPerImage, BytesPerImage, BytesPerFrame) = howbig(SuperX, SuperY, nHeadBytes)
(firstRawInd, lastRawInd) = getRawInd(fn, BytesPerImage, nHeadBytes, Nmetadata)
FrameIndRel = whichframes(fn, FrameIndReq, kineticsec, ut1req, startUTC, firstRawInd, lastRawInd, BytesPerImage, BytesPerFrame, verbose)
return {'superx': SuperX, 'supery': SuperY, 'nmetadata': Nmetadata, 'bytesperframe': BytesPerFrame, 'pixelsperimage': PixelsPerImage, 'nframeextract': FrameIndRel.size, 'frameindrel': FrameIndRel} |
def planck(wave, temp, wavelength=True):
"""The Planck radiation or Blackbody radiation as a function of wavelength
or wavenumber. SI units.
_planck(wave, temperature, wavelength=True)
wave = Wavelength/wavenumber or a sequence of wavelengths/wavenumbers (m or m^-1)
temp = Temperature (scalar) or a sequence of temperatures (K)
Output: Wavelength space: The spectral radiance per meter (not micron!)
Unit = W/m^2 sr^-1 m^-1
Wavenumber space: The spectral radiance in Watts per square meter
per steradian per m-1:
Unit = W/m^2 sr^-1 (m^-1)^-1 = W/m sr^-1
Converting from SI units to mW/m^2 sr^-1 (cm^-1)^-1:
1.0 W/m^2 sr^-1 (m^-1)^-1 = 0.1 mW/m^2 sr^-1 (cm^-1)^-1
"""
units = ['wavelengths', 'wavenumbers']
if wavelength:
LOG.debug("Using {0} when calculating the Blackbody radiance".format(
units[(wavelength == True) - 1]))
if np.isscalar(temp):
temperature = np.array([temp, ], dtype='float64')
else:
temperature = np.array(temp, dtype='float64')
shape = temperature.shape
if np.isscalar(wave):
wln = np.array([wave, ], dtype='float64')
else:
wln = np.array(wave, dtype='float64')
if wavelength:
const = 2 * H_PLANCK * C_SPEED ** 2
nom = const / wln ** 5
arg1 = H_PLANCK * C_SPEED / (K_BOLTZMANN * wln)
else:
nom = 2 * H_PLANCK * (C_SPEED ** 2) * (wln ** 3)
arg1 = H_PLANCK * C_SPEED * wln / K_BOLTZMANN
arg2 = np.where(np.greater(np.abs(temperature), EPSILON),
np.array(1. / temperature), -9).reshape(-1, 1)
arg2 = np.ma.masked_array(arg2, mask=arg2 == -9)
LOG.debug("Max and min - arg1: %s %s", str(arg1.max()), str(arg1.min()))
LOG.debug("Max and min - arg2: %s %s", str(arg2.max()), str(arg2.min()))
try:
exp_arg = np.multiply(arg1.astype('float32'), arg2.astype('float32'))
except MemoryError:
LOG.warning(("Dimensions used in numpy.multiply probably reached "
"limit!\n"
"Make sure the Radiance<->Tb table has been created "
"and try running again"))
raise
LOG.debug("Max and min before exp: %s %s", str(exp_arg.max()),
str(exp_arg.min()))
if exp_arg.min() < 0:
LOG.warning("Something is fishy: \n" +
"\tDenominator might be zero or negative in radiance derivation:")
dubious = np.where(exp_arg < 0)[0]
LOG.warning(
"Number of items having dubious values: " + str(dubious.shape[0]))
denom = np.exp(exp_arg) - 1
rad = nom / denom
rad = np.where(rad.mask, np.nan, rad.data)
radshape = rad.shape
if wln.shape[0] == 1:
if temperature.shape[0] == 1:
return rad[0, 0]
else:
return rad[:, 0].reshape(shape)
else:
if temperature.shape[0] == 1:
return rad[0, :]
else:
if len(shape) == 1:
return np.reshape(rad, (shape[0], radshape[1]))
else:
return np.reshape(rad, (shape[0], shape[1], radshape[1])) | def function[planck, parameter[wave, temp, wavelength]]:
constant[The Planck radiation or Blackbody radiation as a function of wavelength
or wavenumber. SI units.
_planck(wave, temperature, wavelength=True)
wave = Wavelength/wavenumber or a sequence of wavelengths/wavenumbers (m or m^-1)
temp = Temperature (scalar) or a sequence of temperatures (K)
Output: Wavelength space: The spectral radiance per meter (not micron!)
Unit = W/m^2 sr^-1 m^-1
Wavenumber space: The spectral radiance in Watts per square meter
per steradian per m-1:
Unit = W/m^2 sr^-1 (m^-1)^-1 = W/m sr^-1
Converting from SI units to mW/m^2 sr^-1 (cm^-1)^-1:
1.0 W/m^2 sr^-1 (m^-1)^-1 = 0.1 mW/m^2 sr^-1 (cm^-1)^-1
]
variable[units] assign[=] list[[<ast.Constant object at 0x7da1b0ea1060>, <ast.Constant object at 0x7da1b0ea2c20>]]
if name[wavelength] begin[:]
call[name[LOG].debug, parameter[call[constant[Using {0} when calculating the Blackbody radiance].format, parameter[call[name[units]][binary_operation[compare[name[wavelength] equal[==] constant[True]] - constant[1]]]]]]]
if call[name[np].isscalar, parameter[name[temp]]] begin[:]
variable[temperature] assign[=] call[name[np].array, parameter[list[[<ast.Name object at 0x7da1b0ea2b90>]]]]
variable[shape] assign[=] name[temperature].shape
if call[name[np].isscalar, parameter[name[wave]]] begin[:]
variable[wln] assign[=] call[name[np].array, parameter[list[[<ast.Name object at 0x7da1b0ea1240>]]]]
if name[wavelength] begin[:]
variable[const] assign[=] binary_operation[binary_operation[constant[2] * name[H_PLANCK]] * binary_operation[name[C_SPEED] ** constant[2]]]
variable[nom] assign[=] binary_operation[name[const] / binary_operation[name[wln] ** constant[5]]]
variable[arg1] assign[=] binary_operation[binary_operation[name[H_PLANCK] * name[C_SPEED]] / binary_operation[name[K_BOLTZMANN] * name[wln]]]
variable[arg2] assign[=] call[call[name[np].where, parameter[call[name[np].greater, parameter[call[name[np].abs, parameter[name[temperature]]], name[EPSILON]]], call[name[np].array, parameter[binary_operation[constant[1.0] / name[temperature]]]], <ast.UnaryOp object at 0x7da1b0e463b0>]].reshape, parameter[<ast.UnaryOp object at 0x7da1b0e440a0>, constant[1]]]
variable[arg2] assign[=] call[name[np].ma.masked_array, parameter[name[arg2]]]
call[name[LOG].debug, parameter[constant[Max and min - arg1: %s %s], call[name[str], parameter[call[name[arg1].max, parameter[]]]], call[name[str], parameter[call[name[arg1].min, parameter[]]]]]]
call[name[LOG].debug, parameter[constant[Max and min - arg2: %s %s], call[name[str], parameter[call[name[arg2].max, parameter[]]]], call[name[str], parameter[call[name[arg2].min, parameter[]]]]]]
<ast.Try object at 0x7da1b0e472b0>
call[name[LOG].debug, parameter[constant[Max and min before exp: %s %s], call[name[str], parameter[call[name[exp_arg].max, parameter[]]]], call[name[str], parameter[call[name[exp_arg].min, parameter[]]]]]]
if compare[call[name[exp_arg].min, parameter[]] less[<] constant[0]] begin[:]
call[name[LOG].warning, parameter[binary_operation[constant[Something is fishy:
] + constant[ Denominator might be zero or negative in radiance derivation:]]]]
variable[dubious] assign[=] call[call[name[np].where, parameter[compare[name[exp_arg] less[<] constant[0]]]]][constant[0]]
call[name[LOG].warning, parameter[binary_operation[constant[Number of items having dubious values: ] + call[name[str], parameter[call[name[dubious].shape][constant[0]]]]]]]
variable[denom] assign[=] binary_operation[call[name[np].exp, parameter[name[exp_arg]]] - constant[1]]
variable[rad] assign[=] binary_operation[name[nom] / name[denom]]
variable[rad] assign[=] call[name[np].where, parameter[name[rad].mask, name[np].nan, name[rad].data]]
variable[radshape] assign[=] name[rad].shape
if compare[call[name[wln].shape][constant[0]] equal[==] constant[1]] begin[:]
if compare[call[name[temperature].shape][constant[0]] equal[==] constant[1]] begin[:]
return[call[name[rad]][tuple[[<ast.Constant object at 0x7da20e961f00>, <ast.Constant object at 0x7da20e963550>]]]] | keyword[def] identifier[planck] ( identifier[wave] , identifier[temp] , identifier[wavelength] = keyword[True] ):
literal[string]
identifier[units] =[ literal[string] , literal[string] ]
keyword[if] identifier[wavelength] :
identifier[LOG] . identifier[debug] ( literal[string] . identifier[format] (
identifier[units] [( identifier[wavelength] == keyword[True] )- literal[int] ]))
keyword[if] identifier[np] . identifier[isscalar] ( identifier[temp] ):
identifier[temperature] = identifier[np] . identifier[array] ([ identifier[temp] ,], identifier[dtype] = literal[string] )
keyword[else] :
identifier[temperature] = identifier[np] . identifier[array] ( identifier[temp] , identifier[dtype] = literal[string] )
identifier[shape] = identifier[temperature] . identifier[shape]
keyword[if] identifier[np] . identifier[isscalar] ( identifier[wave] ):
identifier[wln] = identifier[np] . identifier[array] ([ identifier[wave] ,], identifier[dtype] = literal[string] )
keyword[else] :
identifier[wln] = identifier[np] . identifier[array] ( identifier[wave] , identifier[dtype] = literal[string] )
keyword[if] identifier[wavelength] :
identifier[const] = literal[int] * identifier[H_PLANCK] * identifier[C_SPEED] ** literal[int]
identifier[nom] = identifier[const] / identifier[wln] ** literal[int]
identifier[arg1] = identifier[H_PLANCK] * identifier[C_SPEED] /( identifier[K_BOLTZMANN] * identifier[wln] )
keyword[else] :
identifier[nom] = literal[int] * identifier[H_PLANCK] *( identifier[C_SPEED] ** literal[int] )*( identifier[wln] ** literal[int] )
identifier[arg1] = identifier[H_PLANCK] * identifier[C_SPEED] * identifier[wln] / identifier[K_BOLTZMANN]
identifier[arg2] = identifier[np] . identifier[where] ( identifier[np] . identifier[greater] ( identifier[np] . identifier[abs] ( identifier[temperature] ), identifier[EPSILON] ),
identifier[np] . identifier[array] ( literal[int] / identifier[temperature] ),- literal[int] ). identifier[reshape] (- literal[int] , literal[int] )
identifier[arg2] = identifier[np] . identifier[ma] . identifier[masked_array] ( identifier[arg2] , identifier[mask] = identifier[arg2] ==- literal[int] )
identifier[LOG] . identifier[debug] ( literal[string] , identifier[str] ( identifier[arg1] . identifier[max] ()), identifier[str] ( identifier[arg1] . identifier[min] ()))
identifier[LOG] . identifier[debug] ( literal[string] , identifier[str] ( identifier[arg2] . identifier[max] ()), identifier[str] ( identifier[arg2] . identifier[min] ()))
keyword[try] :
identifier[exp_arg] = identifier[np] . identifier[multiply] ( identifier[arg1] . identifier[astype] ( literal[string] ), identifier[arg2] . identifier[astype] ( literal[string] ))
keyword[except] identifier[MemoryError] :
identifier[LOG] . identifier[warning] (( literal[string]
literal[string]
literal[string]
literal[string] ))
keyword[raise]
identifier[LOG] . identifier[debug] ( literal[string] , identifier[str] ( identifier[exp_arg] . identifier[max] ()),
identifier[str] ( identifier[exp_arg] . identifier[min] ()))
keyword[if] identifier[exp_arg] . identifier[min] ()< literal[int] :
identifier[LOG] . identifier[warning] ( literal[string] +
literal[string] )
identifier[dubious] = identifier[np] . identifier[where] ( identifier[exp_arg] < literal[int] )[ literal[int] ]
identifier[LOG] . identifier[warning] (
literal[string] + identifier[str] ( identifier[dubious] . identifier[shape] [ literal[int] ]))
identifier[denom] = identifier[np] . identifier[exp] ( identifier[exp_arg] )- literal[int]
identifier[rad] = identifier[nom] / identifier[denom]
identifier[rad] = identifier[np] . identifier[where] ( identifier[rad] . identifier[mask] , identifier[np] . identifier[nan] , identifier[rad] . identifier[data] )
identifier[radshape] = identifier[rad] . identifier[shape]
keyword[if] identifier[wln] . identifier[shape] [ literal[int] ]== literal[int] :
keyword[if] identifier[temperature] . identifier[shape] [ literal[int] ]== literal[int] :
keyword[return] identifier[rad] [ literal[int] , literal[int] ]
keyword[else] :
keyword[return] identifier[rad] [:, literal[int] ]. identifier[reshape] ( identifier[shape] )
keyword[else] :
keyword[if] identifier[temperature] . identifier[shape] [ literal[int] ]== literal[int] :
keyword[return] identifier[rad] [ literal[int] ,:]
keyword[else] :
keyword[if] identifier[len] ( identifier[shape] )== literal[int] :
keyword[return] identifier[np] . identifier[reshape] ( identifier[rad] ,( identifier[shape] [ literal[int] ], identifier[radshape] [ literal[int] ]))
keyword[else] :
keyword[return] identifier[np] . identifier[reshape] ( identifier[rad] ,( identifier[shape] [ literal[int] ], identifier[shape] [ literal[int] ], identifier[radshape] [ literal[int] ])) | def planck(wave, temp, wavelength=True):
"""The Planck radiation or Blackbody radiation as a function of wavelength
or wavenumber. SI units.
_planck(wave, temperature, wavelength=True)
wave = Wavelength/wavenumber or a sequence of wavelengths/wavenumbers (m or m^-1)
temp = Temperature (scalar) or a sequence of temperatures (K)
Output: Wavelength space: The spectral radiance per meter (not micron!)
Unit = W/m^2 sr^-1 m^-1
Wavenumber space: The spectral radiance in Watts per square meter
per steradian per m-1:
Unit = W/m^2 sr^-1 (m^-1)^-1 = W/m sr^-1
Converting from SI units to mW/m^2 sr^-1 (cm^-1)^-1:
1.0 W/m^2 sr^-1 (m^-1)^-1 = 0.1 mW/m^2 sr^-1 (cm^-1)^-1
"""
units = ['wavelengths', 'wavenumbers']
if wavelength:
LOG.debug('Using {0} when calculating the Blackbody radiance'.format(units[(wavelength == True) - 1])) # depends on [control=['if'], data=[]]
if np.isscalar(temp):
temperature = np.array([temp], dtype='float64') # depends on [control=['if'], data=[]]
else:
temperature = np.array(temp, dtype='float64')
shape = temperature.shape
if np.isscalar(wave):
wln = np.array([wave], dtype='float64') # depends on [control=['if'], data=[]]
else:
wln = np.array(wave, dtype='float64')
if wavelength:
const = 2 * H_PLANCK * C_SPEED ** 2
nom = const / wln ** 5
arg1 = H_PLANCK * C_SPEED / (K_BOLTZMANN * wln) # depends on [control=['if'], data=[]]
else:
nom = 2 * H_PLANCK * C_SPEED ** 2 * wln ** 3
arg1 = H_PLANCK * C_SPEED * wln / K_BOLTZMANN
arg2 = np.where(np.greater(np.abs(temperature), EPSILON), np.array(1.0 / temperature), -9).reshape(-1, 1)
arg2 = np.ma.masked_array(arg2, mask=arg2 == -9)
LOG.debug('Max and min - arg1: %s %s', str(arg1.max()), str(arg1.min()))
LOG.debug('Max and min - arg2: %s %s', str(arg2.max()), str(arg2.min()))
try:
exp_arg = np.multiply(arg1.astype('float32'), arg2.astype('float32')) # depends on [control=['try'], data=[]]
except MemoryError:
LOG.warning('Dimensions used in numpy.multiply probably reached limit!\nMake sure the Radiance<->Tb table has been created and try running again')
raise # depends on [control=['except'], data=[]]
LOG.debug('Max and min before exp: %s %s', str(exp_arg.max()), str(exp_arg.min()))
if exp_arg.min() < 0:
LOG.warning('Something is fishy: \n' + '\tDenominator might be zero or negative in radiance derivation:')
dubious = np.where(exp_arg < 0)[0]
LOG.warning('Number of items having dubious values: ' + str(dubious.shape[0])) # depends on [control=['if'], data=[]]
denom = np.exp(exp_arg) - 1
rad = nom / denom
rad = np.where(rad.mask, np.nan, rad.data)
radshape = rad.shape
if wln.shape[0] == 1:
if temperature.shape[0] == 1:
return rad[0, 0] # depends on [control=['if'], data=[]]
else:
return rad[:, 0].reshape(shape) # depends on [control=['if'], data=[]]
elif temperature.shape[0] == 1:
return rad[0, :] # depends on [control=['if'], data=[]]
elif len(shape) == 1:
return np.reshape(rad, (shape[0], radshape[1])) # depends on [control=['if'], data=[]]
else:
return np.reshape(rad, (shape[0], shape[1], radshape[1])) |
def _is_readable(self, obj):
"""Check if the argument is a readable file-like object."""
try:
read = getattr(obj, 'read')
except AttributeError:
return False
else:
return is_method(read, max_arity=1) | def function[_is_readable, parameter[self, obj]]:
constant[Check if the argument is a readable file-like object.]
<ast.Try object at 0x7da1b0e63010> | keyword[def] identifier[_is_readable] ( identifier[self] , identifier[obj] ):
literal[string]
keyword[try] :
identifier[read] = identifier[getattr] ( identifier[obj] , literal[string] )
keyword[except] identifier[AttributeError] :
keyword[return] keyword[False]
keyword[else] :
keyword[return] identifier[is_method] ( identifier[read] , identifier[max_arity] = literal[int] ) | def _is_readable(self, obj):
"""Check if the argument is a readable file-like object."""
try:
read = getattr(obj, 'read') # depends on [control=['try'], data=[]]
except AttributeError:
return False # depends on [control=['except'], data=[]]
else:
return is_method(read, max_arity=1) |
def init_hmm(observations, nstates, lag=1, output=None, reversible=True):
"""Use a heuristic scheme to generate an initial model.
Parameters
----------
observations : list of ndarray((T_i))
list of arrays of length T_i with observation data
nstates : int
The number of states.
output : str, optional, default=None
Output model type from [None, 'gaussian', 'discrete']. If None, will automatically select an output
model type based on the format of observations.
Examples
--------
Generate initial model for a gaussian output model.
>>> import bhmm
>>> [model, observations, states] = bhmm.testsystems.generate_synthetic_observations(output='gaussian')
>>> initial_model = init_hmm(observations, model.nstates, output='gaussian')
Generate initial model for a discrete output model.
>>> import bhmm
>>> [model, observations, states] = bhmm.testsystems.generate_synthetic_observations(output='discrete')
>>> initial_model = init_hmm(observations, model.nstates, output='discrete')
"""
# select output model type
if output is None:
output = _guess_output_type(observations)
if output == 'discrete':
return init_discrete_hmm(observations, nstates, lag=lag, reversible=reversible)
elif output == 'gaussian':
return init_gaussian_hmm(observations, nstates, lag=lag, reversible=reversible)
else:
raise NotImplementedError('output model type '+str(output)+' not yet implemented.') | def function[init_hmm, parameter[observations, nstates, lag, output, reversible]]:
constant[Use a heuristic scheme to generate an initial model.
Parameters
----------
observations : list of ndarray((T_i))
list of arrays of length T_i with observation data
nstates : int
The number of states.
output : str, optional, default=None
Output model type from [None, 'gaussian', 'discrete']. If None, will automatically select an output
model type based on the format of observations.
Examples
--------
Generate initial model for a gaussian output model.
>>> import bhmm
>>> [model, observations, states] = bhmm.testsystems.generate_synthetic_observations(output='gaussian')
>>> initial_model = init_hmm(observations, model.nstates, output='gaussian')
Generate initial model for a discrete output model.
>>> import bhmm
>>> [model, observations, states] = bhmm.testsystems.generate_synthetic_observations(output='discrete')
>>> initial_model = init_hmm(observations, model.nstates, output='discrete')
]
if compare[name[output] is constant[None]] begin[:]
variable[output] assign[=] call[name[_guess_output_type], parameter[name[observations]]]
if compare[name[output] equal[==] constant[discrete]] begin[:]
return[call[name[init_discrete_hmm], parameter[name[observations], name[nstates]]]] | keyword[def] identifier[init_hmm] ( identifier[observations] , identifier[nstates] , identifier[lag] = literal[int] , identifier[output] = keyword[None] , identifier[reversible] = keyword[True] ):
literal[string]
keyword[if] identifier[output] keyword[is] keyword[None] :
identifier[output] = identifier[_guess_output_type] ( identifier[observations] )
keyword[if] identifier[output] == literal[string] :
keyword[return] identifier[init_discrete_hmm] ( identifier[observations] , identifier[nstates] , identifier[lag] = identifier[lag] , identifier[reversible] = identifier[reversible] )
keyword[elif] identifier[output] == literal[string] :
keyword[return] identifier[init_gaussian_hmm] ( identifier[observations] , identifier[nstates] , identifier[lag] = identifier[lag] , identifier[reversible] = identifier[reversible] )
keyword[else] :
keyword[raise] identifier[NotImplementedError] ( literal[string] + identifier[str] ( identifier[output] )+ literal[string] ) | def init_hmm(observations, nstates, lag=1, output=None, reversible=True):
"""Use a heuristic scheme to generate an initial model.
Parameters
----------
observations : list of ndarray((T_i))
list of arrays of length T_i with observation data
nstates : int
The number of states.
output : str, optional, default=None
Output model type from [None, 'gaussian', 'discrete']. If None, will automatically select an output
model type based on the format of observations.
Examples
--------
Generate initial model for a gaussian output model.
>>> import bhmm
>>> [model, observations, states] = bhmm.testsystems.generate_synthetic_observations(output='gaussian')
>>> initial_model = init_hmm(observations, model.nstates, output='gaussian')
Generate initial model for a discrete output model.
>>> import bhmm
>>> [model, observations, states] = bhmm.testsystems.generate_synthetic_observations(output='discrete')
>>> initial_model = init_hmm(observations, model.nstates, output='discrete')
"""
# select output model type
if output is None:
output = _guess_output_type(observations) # depends on [control=['if'], data=['output']]
if output == 'discrete':
return init_discrete_hmm(observations, nstates, lag=lag, reversible=reversible) # depends on [control=['if'], data=[]]
elif output == 'gaussian':
return init_gaussian_hmm(observations, nstates, lag=lag, reversible=reversible) # depends on [control=['if'], data=[]]
else:
raise NotImplementedError('output model type ' + str(output) + ' not yet implemented.') |
def from_archive(archive_filename, py_interpreter=sys.executable):
"""extract metadata from a given sdist archive file
:param archive_filename: a sdist archive file
:param py_interpreter: The full path to the used python interpreter
:returns: a json blob with metadata
"""
with _extract_to_tempdir(archive_filename) as root_dir:
data = _setup_py_run_from_dir(root_dir, py_interpreter)
return data | def function[from_archive, parameter[archive_filename, py_interpreter]]:
constant[extract metadata from a given sdist archive file
:param archive_filename: a sdist archive file
:param py_interpreter: The full path to the used python interpreter
:returns: a json blob with metadata
]
with call[name[_extract_to_tempdir], parameter[name[archive_filename]]] begin[:]
variable[data] assign[=] call[name[_setup_py_run_from_dir], parameter[name[root_dir], name[py_interpreter]]]
return[name[data]] | keyword[def] identifier[from_archive] ( identifier[archive_filename] , identifier[py_interpreter] = identifier[sys] . identifier[executable] ):
literal[string]
keyword[with] identifier[_extract_to_tempdir] ( identifier[archive_filename] ) keyword[as] identifier[root_dir] :
identifier[data] = identifier[_setup_py_run_from_dir] ( identifier[root_dir] , identifier[py_interpreter] )
keyword[return] identifier[data] | def from_archive(archive_filename, py_interpreter=sys.executable):
"""extract metadata from a given sdist archive file
:param archive_filename: a sdist archive file
:param py_interpreter: The full path to the used python interpreter
:returns: a json blob with metadata
"""
with _extract_to_tempdir(archive_filename) as root_dir:
data = _setup_py_run_from_dir(root_dir, py_interpreter) # depends on [control=['with'], data=['root_dir']]
return data |
def update(self, is_reserved=values.unset):
"""
Update the ShortCodeInstance
:param bool is_reserved: Whether the short code should be reserved for manual assignment to participants only
:returns: Updated ShortCodeInstance
:rtype: twilio.rest.proxy.v1.service.short_code.ShortCodeInstance
"""
data = values.of({'IsReserved': is_reserved, })
payload = self._version.update(
'POST',
self._uri,
data=data,
)
return ShortCodeInstance(
self._version,
payload,
service_sid=self._solution['service_sid'],
sid=self._solution['sid'],
) | def function[update, parameter[self, is_reserved]]:
constant[
Update the ShortCodeInstance
:param bool is_reserved: Whether the short code should be reserved for manual assignment to participants only
:returns: Updated ShortCodeInstance
:rtype: twilio.rest.proxy.v1.service.short_code.ShortCodeInstance
]
variable[data] assign[=] call[name[values].of, parameter[dictionary[[<ast.Constant object at 0x7da20c6ab0a0>], [<ast.Name object at 0x7da20c6aaaa0>]]]]
variable[payload] assign[=] call[name[self]._version.update, parameter[constant[POST], name[self]._uri]]
return[call[name[ShortCodeInstance], parameter[name[self]._version, name[payload]]]] | keyword[def] identifier[update] ( identifier[self] , identifier[is_reserved] = identifier[values] . identifier[unset] ):
literal[string]
identifier[data] = identifier[values] . identifier[of] ({ literal[string] : identifier[is_reserved] ,})
identifier[payload] = identifier[self] . identifier[_version] . identifier[update] (
literal[string] ,
identifier[self] . identifier[_uri] ,
identifier[data] = identifier[data] ,
)
keyword[return] identifier[ShortCodeInstance] (
identifier[self] . identifier[_version] ,
identifier[payload] ,
identifier[service_sid] = identifier[self] . identifier[_solution] [ literal[string] ],
identifier[sid] = identifier[self] . identifier[_solution] [ literal[string] ],
) | def update(self, is_reserved=values.unset):
"""
Update the ShortCodeInstance
:param bool is_reserved: Whether the short code should be reserved for manual assignment to participants only
:returns: Updated ShortCodeInstance
:rtype: twilio.rest.proxy.v1.service.short_code.ShortCodeInstance
"""
data = values.of({'IsReserved': is_reserved})
payload = self._version.update('POST', self._uri, data=data)
return ShortCodeInstance(self._version, payload, service_sid=self._solution['service_sid'], sid=self._solution['sid']) |
def get(self, record_id):
"""
Retrieves a record by its id
>>> record = airtable.get('recwPQIfs4wKPyc9D')
Args:
record_id(``str``): Airtable record id
Returns:
record (``dict``): Record
"""
record_url = self.record_url(record_id)
return self._get(record_url) | def function[get, parameter[self, record_id]]:
constant[
Retrieves a record by its id
>>> record = airtable.get('recwPQIfs4wKPyc9D')
Args:
record_id(``str``): Airtable record id
Returns:
record (``dict``): Record
]
variable[record_url] assign[=] call[name[self].record_url, parameter[name[record_id]]]
return[call[name[self]._get, parameter[name[record_url]]]] | keyword[def] identifier[get] ( identifier[self] , identifier[record_id] ):
literal[string]
identifier[record_url] = identifier[self] . identifier[record_url] ( identifier[record_id] )
keyword[return] identifier[self] . identifier[_get] ( identifier[record_url] ) | def get(self, record_id):
"""
Retrieves a record by its id
>>> record = airtable.get('recwPQIfs4wKPyc9D')
Args:
record_id(``str``): Airtable record id
Returns:
record (``dict``): Record
"""
record_url = self.record_url(record_id)
return self._get(record_url) |
def qos_map_dscp_mutation_dscp_mutation_map_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
qos = ET.SubElement(config, "qos", xmlns="urn:brocade.com:mgmt:brocade-qos")
map = ET.SubElement(qos, "map")
dscp_mutation = ET.SubElement(map, "dscp-mutation")
dscp_mutation_map_name = ET.SubElement(dscp_mutation, "dscp-mutation-map-name")
dscp_mutation_map_name.text = kwargs.pop('dscp_mutation_map_name')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[qos_map_dscp_mutation_dscp_mutation_map_name, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[qos] assign[=] call[name[ET].SubElement, parameter[name[config], constant[qos]]]
variable[map] assign[=] call[name[ET].SubElement, parameter[name[qos], constant[map]]]
variable[dscp_mutation] assign[=] call[name[ET].SubElement, parameter[name[map], constant[dscp-mutation]]]
variable[dscp_mutation_map_name] assign[=] call[name[ET].SubElement, parameter[name[dscp_mutation], constant[dscp-mutation-map-name]]]
name[dscp_mutation_map_name].text assign[=] call[name[kwargs].pop, parameter[constant[dscp_mutation_map_name]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[qos_map_dscp_mutation_dscp_mutation_map_name] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[qos] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[map] = identifier[ET] . identifier[SubElement] ( identifier[qos] , literal[string] )
identifier[dscp_mutation] = identifier[ET] . identifier[SubElement] ( identifier[map] , literal[string] )
identifier[dscp_mutation_map_name] = identifier[ET] . identifier[SubElement] ( identifier[dscp_mutation] , literal[string] )
identifier[dscp_mutation_map_name] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def qos_map_dscp_mutation_dscp_mutation_map_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
qos = ET.SubElement(config, 'qos', xmlns='urn:brocade.com:mgmt:brocade-qos')
map = ET.SubElement(qos, 'map')
dscp_mutation = ET.SubElement(map, 'dscp-mutation')
dscp_mutation_map_name = ET.SubElement(dscp_mutation, 'dscp-mutation-map-name')
dscp_mutation_map_name.text = kwargs.pop('dscp_mutation_map_name')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def handle(self, *arguments, **options):
"""
Parses arguments and options, runs validate_<action> for each action
named by self.get_actions(), then runs handle_<action> for each action
named by self.get_actions().
"""
self.arguments = arguments
self.options = options
self.arguments = self.parse_arguments(arguments)
self.options = self.parse_options(options)
for name in self.get_actions():
validate = getattr(self, "validate_{name:s}".format(
name=name), None)
if validate is not None and isinstance(validate, collections.Callable):
validate(*arguments, **options)
for name in self.get_actions():
handle = getattr(self, "handle_{name:s}".format(
name=name), None)
if handle is not None and isinstance(handle, collections.Callable):
handle(*self.arguments, **self.options) | def function[handle, parameter[self]]:
constant[
Parses arguments and options, runs validate_<action> for each action
named by self.get_actions(), then runs handle_<action> for each action
named by self.get_actions().
]
name[self].arguments assign[=] name[arguments]
name[self].options assign[=] name[options]
name[self].arguments assign[=] call[name[self].parse_arguments, parameter[name[arguments]]]
name[self].options assign[=] call[name[self].parse_options, parameter[name[options]]]
for taget[name[name]] in starred[call[name[self].get_actions, parameter[]]] begin[:]
variable[validate] assign[=] call[name[getattr], parameter[name[self], call[constant[validate_{name:s}].format, parameter[]], constant[None]]]
if <ast.BoolOp object at 0x7da1b14da350> begin[:]
call[name[validate], parameter[<ast.Starred object at 0x7da1b14d8730>]]
for taget[name[name]] in starred[call[name[self].get_actions, parameter[]]] begin[:]
variable[handle] assign[=] call[name[getattr], parameter[name[self], call[constant[handle_{name:s}].format, parameter[]], constant[None]]]
if <ast.BoolOp object at 0x7da1b14db340> begin[:]
call[name[handle], parameter[<ast.Starred object at 0x7da1b14dab00>]] | keyword[def] identifier[handle] ( identifier[self] ,* identifier[arguments] ,** identifier[options] ):
literal[string]
identifier[self] . identifier[arguments] = identifier[arguments]
identifier[self] . identifier[options] = identifier[options]
identifier[self] . identifier[arguments] = identifier[self] . identifier[parse_arguments] ( identifier[arguments] )
identifier[self] . identifier[options] = identifier[self] . identifier[parse_options] ( identifier[options] )
keyword[for] identifier[name] keyword[in] identifier[self] . identifier[get_actions] ():
identifier[validate] = identifier[getattr] ( identifier[self] , literal[string] . identifier[format] (
identifier[name] = identifier[name] ), keyword[None] )
keyword[if] identifier[validate] keyword[is] keyword[not] keyword[None] keyword[and] identifier[isinstance] ( identifier[validate] , identifier[collections] . identifier[Callable] ):
identifier[validate] (* identifier[arguments] ,** identifier[options] )
keyword[for] identifier[name] keyword[in] identifier[self] . identifier[get_actions] ():
identifier[handle] = identifier[getattr] ( identifier[self] , literal[string] . identifier[format] (
identifier[name] = identifier[name] ), keyword[None] )
keyword[if] identifier[handle] keyword[is] keyword[not] keyword[None] keyword[and] identifier[isinstance] ( identifier[handle] , identifier[collections] . identifier[Callable] ):
identifier[handle] (* identifier[self] . identifier[arguments] ,** identifier[self] . identifier[options] ) | def handle(self, *arguments, **options):
"""
Parses arguments and options, runs validate_<action> for each action
named by self.get_actions(), then runs handle_<action> for each action
named by self.get_actions().
"""
self.arguments = arguments
self.options = options
self.arguments = self.parse_arguments(arguments)
self.options = self.parse_options(options)
for name in self.get_actions():
validate = getattr(self, 'validate_{name:s}'.format(name=name), None)
if validate is not None and isinstance(validate, collections.Callable):
validate(*arguments, **options) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['name']]
for name in self.get_actions():
handle = getattr(self, 'handle_{name:s}'.format(name=name), None)
if handle is not None and isinstance(handle, collections.Callable):
handle(*self.arguments, **self.options) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['name']] |
def find_srvs_by_hostname(self, host_name):
"""Get all services from a host based on a host_name
:param host_name: the host name we want services
:type host_name: str
:return: list of services
:rtype: list[alignak.objects.service.Service]
"""
if hasattr(self, 'hosts'):
host = self.hosts.find_by_name(host_name)
if host is None:
return None
return host.get_services()
return None | def function[find_srvs_by_hostname, parameter[self, host_name]]:
constant[Get all services from a host based on a host_name
:param host_name: the host name we want services
:type host_name: str
:return: list of services
:rtype: list[alignak.objects.service.Service]
]
if call[name[hasattr], parameter[name[self], constant[hosts]]] begin[:]
variable[host] assign[=] call[name[self].hosts.find_by_name, parameter[name[host_name]]]
if compare[name[host] is constant[None]] begin[:]
return[constant[None]]
return[call[name[host].get_services, parameter[]]]
return[constant[None]] | keyword[def] identifier[find_srvs_by_hostname] ( identifier[self] , identifier[host_name] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[host] = identifier[self] . identifier[hosts] . identifier[find_by_name] ( identifier[host_name] )
keyword[if] identifier[host] keyword[is] keyword[None] :
keyword[return] keyword[None]
keyword[return] identifier[host] . identifier[get_services] ()
keyword[return] keyword[None] | def find_srvs_by_hostname(self, host_name):
"""Get all services from a host based on a host_name
:param host_name: the host name we want services
:type host_name: str
:return: list of services
:rtype: list[alignak.objects.service.Service]
"""
if hasattr(self, 'hosts'):
host = self.hosts.find_by_name(host_name)
if host is None:
return None # depends on [control=['if'], data=[]]
return host.get_services() # depends on [control=['if'], data=[]]
return None |
def diff_files(left, right, diff_options=None, formatter=None):
"""Takes two filenames or streams, and diffs the XML in those files"""
return _diff(etree.parse, left, right,
diff_options=diff_options, formatter=formatter) | def function[diff_files, parameter[left, right, diff_options, formatter]]:
constant[Takes two filenames or streams, and diffs the XML in those files]
return[call[name[_diff], parameter[name[etree].parse, name[left], name[right]]]] | keyword[def] identifier[diff_files] ( identifier[left] , identifier[right] , identifier[diff_options] = keyword[None] , identifier[formatter] = keyword[None] ):
literal[string]
keyword[return] identifier[_diff] ( identifier[etree] . identifier[parse] , identifier[left] , identifier[right] ,
identifier[diff_options] = identifier[diff_options] , identifier[formatter] = identifier[formatter] ) | def diff_files(left, right, diff_options=None, formatter=None):
"""Takes two filenames or streams, and diffs the XML in those files"""
return _diff(etree.parse, left, right, diff_options=diff_options, formatter=formatter) |
def get_gpu_requirements(gpus_reqs):
"""
Extracts the GPU from a dictionary requirements as list of GPURequirements.
:param gpus_reqs: A dictionary {'count': <count>} or a list [{min_vram: <min_vram>}, {min_vram: <min_vram>}, ...]
:return: A list of GPURequirements
"""
requirements = []
if gpus_reqs:
if type(gpus_reqs) is dict:
count = gpus_reqs.get('count')
if count:
for i in range(count):
requirements.append(GPURequirement())
elif type(gpus_reqs) is list:
for gpu_req in gpus_reqs:
requirements.append(GPURequirement(min_vram=gpu_req['minVram']))
return requirements
else:
# If no requirements are supplied
return [] | def function[get_gpu_requirements, parameter[gpus_reqs]]:
constant[
Extracts the GPU from a dictionary requirements as list of GPURequirements.
:param gpus_reqs: A dictionary {'count': <count>} or a list [{min_vram: <min_vram>}, {min_vram: <min_vram>}, ...]
:return: A list of GPURequirements
]
variable[requirements] assign[=] list[[]]
if name[gpus_reqs] begin[:]
if compare[call[name[type], parameter[name[gpus_reqs]]] is name[dict]] begin[:]
variable[count] assign[=] call[name[gpus_reqs].get, parameter[constant[count]]]
if name[count] begin[:]
for taget[name[i]] in starred[call[name[range], parameter[name[count]]]] begin[:]
call[name[requirements].append, parameter[call[name[GPURequirement], parameter[]]]]
return[name[requirements]] | keyword[def] identifier[get_gpu_requirements] ( identifier[gpus_reqs] ):
literal[string]
identifier[requirements] =[]
keyword[if] identifier[gpus_reqs] :
keyword[if] identifier[type] ( identifier[gpus_reqs] ) keyword[is] identifier[dict] :
identifier[count] = identifier[gpus_reqs] . identifier[get] ( literal[string] )
keyword[if] identifier[count] :
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[count] ):
identifier[requirements] . identifier[append] ( identifier[GPURequirement] ())
keyword[elif] identifier[type] ( identifier[gpus_reqs] ) keyword[is] identifier[list] :
keyword[for] identifier[gpu_req] keyword[in] identifier[gpus_reqs] :
identifier[requirements] . identifier[append] ( identifier[GPURequirement] ( identifier[min_vram] = identifier[gpu_req] [ literal[string] ]))
keyword[return] identifier[requirements]
keyword[else] :
keyword[return] [] | def get_gpu_requirements(gpus_reqs):
"""
Extracts the GPU from a dictionary requirements as list of GPURequirements.
:param gpus_reqs: A dictionary {'count': <count>} or a list [{min_vram: <min_vram>}, {min_vram: <min_vram>}, ...]
:return: A list of GPURequirements
"""
requirements = []
if gpus_reqs:
if type(gpus_reqs) is dict:
count = gpus_reqs.get('count')
if count:
for i in range(count):
requirements.append(GPURequirement()) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif type(gpus_reqs) is list:
for gpu_req in gpus_reqs:
requirements.append(GPURequirement(min_vram=gpu_req['minVram'])) # depends on [control=['for'], data=['gpu_req']] # depends on [control=['if'], data=[]]
return requirements # depends on [control=['if'], data=[]]
else:
# If no requirements are supplied
return [] |
def deep_exponential_family(data_size, feature_size, units, shape):
"""A multi-layered topic model over a documents-by-terms matrix."""
w2 = ed.Gamma(0.1, 0.3, sample_shape=[units[2], units[1]], name="w2")
w1 = ed.Gamma(0.1, 0.3, sample_shape=[units[1], units[0]], name="w1")
w0 = ed.Gamma(0.1, 0.3, sample_shape=[units[0], feature_size], name="w0")
z2 = ed.Gamma(0.1, 0.1, sample_shape=[data_size, units[2]], name="z2")
z1 = ed.Gamma(shape, shape / tf.matmul(z2, w2), name="z1")
z0 = ed.Gamma(shape, shape / tf.matmul(z1, w1), name="z0")
x = ed.Poisson(tf.matmul(z0, w0), name="x")
return x | def function[deep_exponential_family, parameter[data_size, feature_size, units, shape]]:
constant[A multi-layered topic model over a documents-by-terms matrix.]
variable[w2] assign[=] call[name[ed].Gamma, parameter[constant[0.1], constant[0.3]]]
variable[w1] assign[=] call[name[ed].Gamma, parameter[constant[0.1], constant[0.3]]]
variable[w0] assign[=] call[name[ed].Gamma, parameter[constant[0.1], constant[0.3]]]
variable[z2] assign[=] call[name[ed].Gamma, parameter[constant[0.1], constant[0.1]]]
variable[z1] assign[=] call[name[ed].Gamma, parameter[name[shape], binary_operation[name[shape] / call[name[tf].matmul, parameter[name[z2], name[w2]]]]]]
variable[z0] assign[=] call[name[ed].Gamma, parameter[name[shape], binary_operation[name[shape] / call[name[tf].matmul, parameter[name[z1], name[w1]]]]]]
variable[x] assign[=] call[name[ed].Poisson, parameter[call[name[tf].matmul, parameter[name[z0], name[w0]]]]]
return[name[x]] | keyword[def] identifier[deep_exponential_family] ( identifier[data_size] , identifier[feature_size] , identifier[units] , identifier[shape] ):
literal[string]
identifier[w2] = identifier[ed] . identifier[Gamma] ( literal[int] , literal[int] , identifier[sample_shape] =[ identifier[units] [ literal[int] ], identifier[units] [ literal[int] ]], identifier[name] = literal[string] )
identifier[w1] = identifier[ed] . identifier[Gamma] ( literal[int] , literal[int] , identifier[sample_shape] =[ identifier[units] [ literal[int] ], identifier[units] [ literal[int] ]], identifier[name] = literal[string] )
identifier[w0] = identifier[ed] . identifier[Gamma] ( literal[int] , literal[int] , identifier[sample_shape] =[ identifier[units] [ literal[int] ], identifier[feature_size] ], identifier[name] = literal[string] )
identifier[z2] = identifier[ed] . identifier[Gamma] ( literal[int] , literal[int] , identifier[sample_shape] =[ identifier[data_size] , identifier[units] [ literal[int] ]], identifier[name] = literal[string] )
identifier[z1] = identifier[ed] . identifier[Gamma] ( identifier[shape] , identifier[shape] / identifier[tf] . identifier[matmul] ( identifier[z2] , identifier[w2] ), identifier[name] = literal[string] )
identifier[z0] = identifier[ed] . identifier[Gamma] ( identifier[shape] , identifier[shape] / identifier[tf] . identifier[matmul] ( identifier[z1] , identifier[w1] ), identifier[name] = literal[string] )
identifier[x] = identifier[ed] . identifier[Poisson] ( identifier[tf] . identifier[matmul] ( identifier[z0] , identifier[w0] ), identifier[name] = literal[string] )
keyword[return] identifier[x] | def deep_exponential_family(data_size, feature_size, units, shape):
"""A multi-layered topic model over a documents-by-terms matrix."""
w2 = ed.Gamma(0.1, 0.3, sample_shape=[units[2], units[1]], name='w2')
w1 = ed.Gamma(0.1, 0.3, sample_shape=[units[1], units[0]], name='w1')
w0 = ed.Gamma(0.1, 0.3, sample_shape=[units[0], feature_size], name='w0')
z2 = ed.Gamma(0.1, 0.1, sample_shape=[data_size, units[2]], name='z2')
z1 = ed.Gamma(shape, shape / tf.matmul(z2, w2), name='z1')
z0 = ed.Gamma(shape, shape / tf.matmul(z1, w1), name='z0')
x = ed.Poisson(tf.matmul(z0, w0), name='x')
return x |
def replace_suffixes_2(self, word):
"""
Find the longest suffix among the ones specified
and perform the required action.
"""
has_vowel = False
if word.endswith('eed'):
if len(word) >= self.r1:
word = word[:-3] + 'ee'
return word
elif word.endswith('eedly'):
if len(word) >= self.r1:
word = word[:-5] + 'ee'
return word
elif word.endswith('ed'):
for vowel in self.vowels:
if vowel in word[:-2]:
has_vowel = True
word = word[:-2]
break
elif word.endswith('edly'):
for vowel in self.vowels:
if vowel in word[:-4]:
has_vowel = True
word = word[:-4]
break
elif word.endswith('ing'):
for vowel in self.vowels:
if vowel in word[:-3]:
has_vowel = True
word = word[:-3]
break
elif word.endswith('ingly'):
for vowel in self.vowels:
if vowel in word[:-5]:
has_vowel = True
word = word[:-5]
break
# Be sure to only perform one of these.
if has_vowel:
length = len(word)
if word[length - 2:] in ['at', 'bl', 'iz']:
word += 'e'
elif word[length - 2:] in self.doubles:
word = word[:-1]
elif self.is_short(word):
word += 'e'
return word | def function[replace_suffixes_2, parameter[self, word]]:
constant[
Find the longest suffix among the ones specified
and perform the required action.
]
variable[has_vowel] assign[=] constant[False]
if call[name[word].endswith, parameter[constant[eed]]] begin[:]
if compare[call[name[len], parameter[name[word]]] greater_or_equal[>=] name[self].r1] begin[:]
variable[word] assign[=] binary_operation[call[name[word]][<ast.Slice object at 0x7da1b0a4fee0>] + constant[ee]]
return[name[word]]
if name[has_vowel] begin[:]
variable[length] assign[=] call[name[len], parameter[name[word]]]
if compare[call[name[word]][<ast.Slice object at 0x7da1b0a48670>] in list[[<ast.Constant object at 0x7da1b0a48550>, <ast.Constant object at 0x7da1b0a4bb20>, <ast.Constant object at 0x7da1b0a4aef0>]]] begin[:]
<ast.AugAssign object at 0x7da1b0a48cd0>
return[name[word]] | keyword[def] identifier[replace_suffixes_2] ( identifier[self] , identifier[word] ):
literal[string]
identifier[has_vowel] = keyword[False]
keyword[if] identifier[word] . identifier[endswith] ( literal[string] ):
keyword[if] identifier[len] ( identifier[word] )>= identifier[self] . identifier[r1] :
identifier[word] = identifier[word] [:- literal[int] ]+ literal[string]
keyword[return] identifier[word]
keyword[elif] identifier[word] . identifier[endswith] ( literal[string] ):
keyword[if] identifier[len] ( identifier[word] )>= identifier[self] . identifier[r1] :
identifier[word] = identifier[word] [:- literal[int] ]+ literal[string]
keyword[return] identifier[word]
keyword[elif] identifier[word] . identifier[endswith] ( literal[string] ):
keyword[for] identifier[vowel] keyword[in] identifier[self] . identifier[vowels] :
keyword[if] identifier[vowel] keyword[in] identifier[word] [:- literal[int] ]:
identifier[has_vowel] = keyword[True]
identifier[word] = identifier[word] [:- literal[int] ]
keyword[break]
keyword[elif] identifier[word] . identifier[endswith] ( literal[string] ):
keyword[for] identifier[vowel] keyword[in] identifier[self] . identifier[vowels] :
keyword[if] identifier[vowel] keyword[in] identifier[word] [:- literal[int] ]:
identifier[has_vowel] = keyword[True]
identifier[word] = identifier[word] [:- literal[int] ]
keyword[break]
keyword[elif] identifier[word] . identifier[endswith] ( literal[string] ):
keyword[for] identifier[vowel] keyword[in] identifier[self] . identifier[vowels] :
keyword[if] identifier[vowel] keyword[in] identifier[word] [:- literal[int] ]:
identifier[has_vowel] = keyword[True]
identifier[word] = identifier[word] [:- literal[int] ]
keyword[break]
keyword[elif] identifier[word] . identifier[endswith] ( literal[string] ):
keyword[for] identifier[vowel] keyword[in] identifier[self] . identifier[vowels] :
keyword[if] identifier[vowel] keyword[in] identifier[word] [:- literal[int] ]:
identifier[has_vowel] = keyword[True]
identifier[word] = identifier[word] [:- literal[int] ]
keyword[break]
keyword[if] identifier[has_vowel] :
identifier[length] = identifier[len] ( identifier[word] )
keyword[if] identifier[word] [ identifier[length] - literal[int] :] keyword[in] [ literal[string] , literal[string] , literal[string] ]:
identifier[word] += literal[string]
keyword[elif] identifier[word] [ identifier[length] - literal[int] :] keyword[in] identifier[self] . identifier[doubles] :
identifier[word] = identifier[word] [:- literal[int] ]
keyword[elif] identifier[self] . identifier[is_short] ( identifier[word] ):
identifier[word] += literal[string]
keyword[return] identifier[word] | def replace_suffixes_2(self, word):
"""
Find the longest suffix among the ones specified
and perform the required action.
"""
has_vowel = False
if word.endswith('eed'):
if len(word) >= self.r1:
word = word[:-3] + 'ee' # depends on [control=['if'], data=[]]
return word # depends on [control=['if'], data=[]]
elif word.endswith('eedly'):
if len(word) >= self.r1:
word = word[:-5] + 'ee' # depends on [control=['if'], data=[]]
return word # depends on [control=['if'], data=[]]
elif word.endswith('ed'):
for vowel in self.vowels:
if vowel in word[:-2]:
has_vowel = True
word = word[:-2]
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['vowel']] # depends on [control=['if'], data=[]]
elif word.endswith('edly'):
for vowel in self.vowels:
if vowel in word[:-4]:
has_vowel = True
word = word[:-4]
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['vowel']] # depends on [control=['if'], data=[]]
elif word.endswith('ing'):
for vowel in self.vowels:
if vowel in word[:-3]:
has_vowel = True
word = word[:-3]
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['vowel']] # depends on [control=['if'], data=[]]
elif word.endswith('ingly'):
for vowel in self.vowels:
if vowel in word[:-5]:
has_vowel = True
word = word[:-5]
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['vowel']] # depends on [control=['if'], data=[]]
# Be sure to only perform one of these.
if has_vowel:
length = len(word)
if word[length - 2:] in ['at', 'bl', 'iz']:
word += 'e' # depends on [control=['if'], data=[]]
elif word[length - 2:] in self.doubles:
word = word[:-1] # depends on [control=['if'], data=[]]
elif self.is_short(word):
word += 'e' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return word |
def pages_admin_menu(context, page):
"""Render the admin table of pages."""
request = context.get('request', None)
expanded = False
if request and "tree_expanded" in request.COOKIES:
cookie_string = urllib.unquote(request.COOKIES['tree_expanded'])
if cookie_string:
ids = [int(id) for id in
urllib.unquote(request.COOKIES['tree_expanded']).split(',')]
if page.id in ids:
expanded = True
context.update({'expanded': expanded, 'page': page})
return context | def function[pages_admin_menu, parameter[context, page]]:
constant[Render the admin table of pages.]
variable[request] assign[=] call[name[context].get, parameter[constant[request], constant[None]]]
variable[expanded] assign[=] constant[False]
if <ast.BoolOp object at 0x7da1b26ac160> begin[:]
variable[cookie_string] assign[=] call[name[urllib].unquote, parameter[call[name[request].COOKIES][constant[tree_expanded]]]]
if name[cookie_string] begin[:]
variable[ids] assign[=] <ast.ListComp object at 0x7da20c7964d0>
if compare[name[page].id in name[ids]] begin[:]
variable[expanded] assign[=] constant[True]
call[name[context].update, parameter[dictionary[[<ast.Constant object at 0x7da2044c2020>, <ast.Constant object at 0x7da2044c3400>], [<ast.Name object at 0x7da2044c3430>, <ast.Name object at 0x7da2044c0eb0>]]]]
return[name[context]] | keyword[def] identifier[pages_admin_menu] ( identifier[context] , identifier[page] ):
literal[string]
identifier[request] = identifier[context] . identifier[get] ( literal[string] , keyword[None] )
identifier[expanded] = keyword[False]
keyword[if] identifier[request] keyword[and] literal[string] keyword[in] identifier[request] . identifier[COOKIES] :
identifier[cookie_string] = identifier[urllib] . identifier[unquote] ( identifier[request] . identifier[COOKIES] [ literal[string] ])
keyword[if] identifier[cookie_string] :
identifier[ids] =[ identifier[int] ( identifier[id] ) keyword[for] identifier[id] keyword[in]
identifier[urllib] . identifier[unquote] ( identifier[request] . identifier[COOKIES] [ literal[string] ]). identifier[split] ( literal[string] )]
keyword[if] identifier[page] . identifier[id] keyword[in] identifier[ids] :
identifier[expanded] = keyword[True]
identifier[context] . identifier[update] ({ literal[string] : identifier[expanded] , literal[string] : identifier[page] })
keyword[return] identifier[context] | def pages_admin_menu(context, page):
"""Render the admin table of pages."""
request = context.get('request', None)
expanded = False
if request and 'tree_expanded' in request.COOKIES:
cookie_string = urllib.unquote(request.COOKIES['tree_expanded'])
if cookie_string:
ids = [int(id) for id in urllib.unquote(request.COOKIES['tree_expanded']).split(',')]
if page.id in ids:
expanded = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
context.update({'expanded': expanded, 'page': page})
return context |
def decodeElem(self, bytes, index, raw=False):
"""Decodes a single element at array[index] from a sequence bytes
that contain data for the entire array.
"""
self._assertIndex(index)
start = index * self.type.nbytes
stop = start + self.type.nbytes
if stop > len(bytes):
msg = 'Decoding %s[%d] requires %d bytes, '
msg += 'but the ArrayType.decode() method received only %d bytes.'
raise IndexError(msg % (self.type.name, index, stop, len(bytes)))
return self.type.decode( bytes[start:stop], raw ) | def function[decodeElem, parameter[self, bytes, index, raw]]:
constant[Decodes a single element at array[index] from a sequence bytes
that contain data for the entire array.
]
call[name[self]._assertIndex, parameter[name[index]]]
variable[start] assign[=] binary_operation[name[index] * name[self].type.nbytes]
variable[stop] assign[=] binary_operation[name[start] + name[self].type.nbytes]
if compare[name[stop] greater[>] call[name[len], parameter[name[bytes]]]] begin[:]
variable[msg] assign[=] constant[Decoding %s[%d] requires %d bytes, ]
<ast.AugAssign object at 0x7da18dc9a080>
<ast.Raise object at 0x7da18dc98370>
return[call[name[self].type.decode, parameter[call[name[bytes]][<ast.Slice object at 0x7da207f99ff0>], name[raw]]]] | keyword[def] identifier[decodeElem] ( identifier[self] , identifier[bytes] , identifier[index] , identifier[raw] = keyword[False] ):
literal[string]
identifier[self] . identifier[_assertIndex] ( identifier[index] )
identifier[start] = identifier[index] * identifier[self] . identifier[type] . identifier[nbytes]
identifier[stop] = identifier[start] + identifier[self] . identifier[type] . identifier[nbytes]
keyword[if] identifier[stop] > identifier[len] ( identifier[bytes] ):
identifier[msg] = literal[string]
identifier[msg] += literal[string]
keyword[raise] identifier[IndexError] ( identifier[msg] %( identifier[self] . identifier[type] . identifier[name] , identifier[index] , identifier[stop] , identifier[len] ( identifier[bytes] )))
keyword[return] identifier[self] . identifier[type] . identifier[decode] ( identifier[bytes] [ identifier[start] : identifier[stop] ], identifier[raw] ) | def decodeElem(self, bytes, index, raw=False):
"""Decodes a single element at array[index] from a sequence bytes
that contain data for the entire array.
"""
self._assertIndex(index)
start = index * self.type.nbytes
stop = start + self.type.nbytes
if stop > len(bytes):
msg = 'Decoding %s[%d] requires %d bytes, '
msg += 'but the ArrayType.decode() method received only %d bytes.'
raise IndexError(msg % (self.type.name, index, stop, len(bytes))) # depends on [control=['if'], data=['stop']]
return self.type.decode(bytes[start:stop], raw) |
def draw(self):
"""
Draws the submenu and its background.
Note that this leaves the OpenGL state set to 2d drawing and may modify the scissor settings.
"""
if not self.visible:
# Simple visibility check, has to be tested to see if it works properly
return
if not isinstance(self.submenu,Container):
glEnable(GL_SCISSOR_TEST)
glScissor(*self.pos+self.size)
SubMenu.draw(self)
if not isinstance(self.submenu,Container):
glDisable(GL_SCISSOR_TEST) | def function[draw, parameter[self]]:
constant[
Draws the submenu and its background.
Note that this leaves the OpenGL state set to 2d drawing and may modify the scissor settings.
]
if <ast.UnaryOp object at 0x7da1b0240af0> begin[:]
return[None]
if <ast.UnaryOp object at 0x7da1b012d000> begin[:]
call[name[glEnable], parameter[name[GL_SCISSOR_TEST]]]
call[name[glScissor], parameter[<ast.Starred object at 0x7da1b012d060>]]
call[name[SubMenu].draw, parameter[name[self]]]
if <ast.UnaryOp object at 0x7da1b012d150> begin[:]
call[name[glDisable], parameter[name[GL_SCISSOR_TEST]]] | keyword[def] identifier[draw] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[visible] :
keyword[return]
keyword[if] keyword[not] identifier[isinstance] ( identifier[self] . identifier[submenu] , identifier[Container] ):
identifier[glEnable] ( identifier[GL_SCISSOR_TEST] )
identifier[glScissor] (* identifier[self] . identifier[pos] + identifier[self] . identifier[size] )
identifier[SubMenu] . identifier[draw] ( identifier[self] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[self] . identifier[submenu] , identifier[Container] ):
identifier[glDisable] ( identifier[GL_SCISSOR_TEST] ) | def draw(self):
"""
Draws the submenu and its background.
Note that this leaves the OpenGL state set to 2d drawing and may modify the scissor settings.
"""
if not self.visible:
# Simple visibility check, has to be tested to see if it works properly
return # depends on [control=['if'], data=[]]
if not isinstance(self.submenu, Container):
glEnable(GL_SCISSOR_TEST)
glScissor(*self.pos + self.size) # depends on [control=['if'], data=[]]
SubMenu.draw(self)
if not isinstance(self.submenu, Container):
glDisable(GL_SCISSOR_TEST) # depends on [control=['if'], data=[]] |
def _send_autocommit_mode(self):
"""Set whether or not to commit after every execute()"""
self._execute_command(COMMAND.COM_QUERY, "SET AUTOCOMMIT = %s" %
self.escape(self.autocommit_mode))
self._read_ok_packet() | def function[_send_autocommit_mode, parameter[self]]:
constant[Set whether or not to commit after every execute()]
call[name[self]._execute_command, parameter[name[COMMAND].COM_QUERY, binary_operation[constant[SET AUTOCOMMIT = %s] <ast.Mod object at 0x7da2590d6920> call[name[self].escape, parameter[name[self].autocommit_mode]]]]]
call[name[self]._read_ok_packet, parameter[]] | keyword[def] identifier[_send_autocommit_mode] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_execute_command] ( identifier[COMMAND] . identifier[COM_QUERY] , literal[string] %
identifier[self] . identifier[escape] ( identifier[self] . identifier[autocommit_mode] ))
identifier[self] . identifier[_read_ok_packet] () | def _send_autocommit_mode(self):
"""Set whether or not to commit after every execute()"""
self._execute_command(COMMAND.COM_QUERY, 'SET AUTOCOMMIT = %s' % self.escape(self.autocommit_mode))
self._read_ok_packet() |
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
"""
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
"""
# extracting dictionary of coefficients specific to required
# intensity measure type.
C = self.COEFFS[imt]
imean = (self._compute_magnitude(rup, C) +
self._compute_distance(rup, dists, C) +
self._get_site_amplification(sites, C) +
self._compute_forearc_backarc_term(C, sites, dists, rup))
istddevs = self._get_stddevs(C,
stddev_types,
num_sites=len(sites.vs30))
# Convert units to g,
# but only for PGA and SA (not PGV):
if imt.name in "SA PGA":
mean = np.log((10.0 ** (imean - 2.0)) / g)
else:
# PGV:
mean = np.log(10.0 ** imean)
# Return stddevs in terms of natural log scaling
stddevs = np.log(10.0 ** np.array(istddevs))
# mean_LogNaturale = np.log((10 ** mean) * 1e-2 / g)
return mean, stddevs | def function[get_mean_and_stddevs, parameter[self, sites, rup, dists, imt, stddev_types]]:
constant[
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
]
variable[C] assign[=] call[name[self].COEFFS][name[imt]]
variable[imean] assign[=] binary_operation[binary_operation[binary_operation[call[name[self]._compute_magnitude, parameter[name[rup], name[C]]] + call[name[self]._compute_distance, parameter[name[rup], name[dists], name[C]]]] + call[name[self]._get_site_amplification, parameter[name[sites], name[C]]]] + call[name[self]._compute_forearc_backarc_term, parameter[name[C], name[sites], name[dists], name[rup]]]]
variable[istddevs] assign[=] call[name[self]._get_stddevs, parameter[name[C], name[stddev_types]]]
if compare[name[imt].name in constant[SA PGA]] begin[:]
variable[mean] assign[=] call[name[np].log, parameter[binary_operation[binary_operation[constant[10.0] ** binary_operation[name[imean] - constant[2.0]]] / name[g]]]]
variable[stddevs] assign[=] call[name[np].log, parameter[binary_operation[constant[10.0] ** call[name[np].array, parameter[name[istddevs]]]]]]
return[tuple[[<ast.Name object at 0x7da20c794d60>, <ast.Name object at 0x7da20c795660>]]] | keyword[def] identifier[get_mean_and_stddevs] ( identifier[self] , identifier[sites] , identifier[rup] , identifier[dists] , identifier[imt] , identifier[stddev_types] ):
literal[string]
identifier[C] = identifier[self] . identifier[COEFFS] [ identifier[imt] ]
identifier[imean] =( identifier[self] . identifier[_compute_magnitude] ( identifier[rup] , identifier[C] )+
identifier[self] . identifier[_compute_distance] ( identifier[rup] , identifier[dists] , identifier[C] )+
identifier[self] . identifier[_get_site_amplification] ( identifier[sites] , identifier[C] )+
identifier[self] . identifier[_compute_forearc_backarc_term] ( identifier[C] , identifier[sites] , identifier[dists] , identifier[rup] ))
identifier[istddevs] = identifier[self] . identifier[_get_stddevs] ( identifier[C] ,
identifier[stddev_types] ,
identifier[num_sites] = identifier[len] ( identifier[sites] . identifier[vs30] ))
keyword[if] identifier[imt] . identifier[name] keyword[in] literal[string] :
identifier[mean] = identifier[np] . identifier[log] (( literal[int] **( identifier[imean] - literal[int] ))/ identifier[g] )
keyword[else] :
identifier[mean] = identifier[np] . identifier[log] ( literal[int] ** identifier[imean] )
identifier[stddevs] = identifier[np] . identifier[log] ( literal[int] ** identifier[np] . identifier[array] ( identifier[istddevs] ))
keyword[return] identifier[mean] , identifier[stddevs] | def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
"""
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
"""
# extracting dictionary of coefficients specific to required
# intensity measure type.
C = self.COEFFS[imt]
imean = self._compute_magnitude(rup, C) + self._compute_distance(rup, dists, C) + self._get_site_amplification(sites, C) + self._compute_forearc_backarc_term(C, sites, dists, rup)
istddevs = self._get_stddevs(C, stddev_types, num_sites=len(sites.vs30))
# Convert units to g,
# but only for PGA and SA (not PGV):
if imt.name in 'SA PGA':
mean = np.log(10.0 ** (imean - 2.0) / g) # depends on [control=['if'], data=[]]
else:
# PGV:
mean = np.log(10.0 ** imean)
# Return stddevs in terms of natural log scaling
stddevs = np.log(10.0 ** np.array(istddevs))
# mean_LogNaturale = np.log((10 ** mean) * 1e-2 / g)
return (mean, stddevs) |
def sanity_check_memory_sync(self, wire_src_dict=None):
""" Check that all memories are synchronous unless explicitly specified as async.
While the semantics of 'm' memories reads is asynchronous, if you want your design
to use a block ram (on an FPGA or otherwise) you want to make sure the index is
available at the beginning of the clock edge. This check will walk the logic structure
and throw an error on any memory if finds that has an index that is not ready at the
beginning of the cycle.
"""
sync_mems = set(m for m in self.logic_subset('m') if not m.op_param[1].asynchronous)
if not len(sync_mems):
return # nothing to check here
if wire_src_dict is None:
wire_src_dict, wdd = self.net_connections()
from .wire import Input, Const
sync_src = 'r'
sync_prop = 'wcs'
for net in sync_mems:
wires_to_check = list(net.args)
while len(wires_to_check):
wire = wires_to_check.pop()
if isinstance(wire, (Input, Const)):
continue
src_net = wire_src_dict[wire]
if src_net.op == sync_src:
continue
elif src_net.op in sync_prop:
wires_to_check.extend(src_net.args)
else:
raise PyrtlError(
'memory "%s" is not specified as asynchronous but has an index '
'"%s" that is not ready at the start of the cycle due to net "%s"'
% (net.op_param[1].name, net.args[0].name, str(src_net))) | def function[sanity_check_memory_sync, parameter[self, wire_src_dict]]:
constant[ Check that all memories are synchronous unless explicitly specified as async.
While the semantics of 'm' memories reads is asynchronous, if you want your design
to use a block ram (on an FPGA or otherwise) you want to make sure the index is
available at the beginning of the clock edge. This check will walk the logic structure
and throw an error on any memory if finds that has an index that is not ready at the
beginning of the cycle.
]
variable[sync_mems] assign[=] call[name[set], parameter[<ast.GeneratorExp object at 0x7da2041db7c0>]]
if <ast.UnaryOp object at 0x7da2041da590> begin[:]
return[None]
if compare[name[wire_src_dict] is constant[None]] begin[:]
<ast.Tuple object at 0x7da2041d8fd0> assign[=] call[name[self].net_connections, parameter[]]
from relative_module[wire] import module[Input], module[Const]
variable[sync_src] assign[=] constant[r]
variable[sync_prop] assign[=] constant[wcs]
for taget[name[net]] in starred[name[sync_mems]] begin[:]
variable[wires_to_check] assign[=] call[name[list], parameter[name[net].args]]
while call[name[len], parameter[name[wires_to_check]]] begin[:]
variable[wire] assign[=] call[name[wires_to_check].pop, parameter[]]
if call[name[isinstance], parameter[name[wire], tuple[[<ast.Name object at 0x7da2041d8220>, <ast.Name object at 0x7da2041d82e0>]]]] begin[:]
continue
variable[src_net] assign[=] call[name[wire_src_dict]][name[wire]]
if compare[name[src_net].op equal[==] name[sync_src]] begin[:]
continue | keyword[def] identifier[sanity_check_memory_sync] ( identifier[self] , identifier[wire_src_dict] = keyword[None] ):
literal[string]
identifier[sync_mems] = identifier[set] ( identifier[m] keyword[for] identifier[m] keyword[in] identifier[self] . identifier[logic_subset] ( literal[string] ) keyword[if] keyword[not] identifier[m] . identifier[op_param] [ literal[int] ]. identifier[asynchronous] )
keyword[if] keyword[not] identifier[len] ( identifier[sync_mems] ):
keyword[return]
keyword[if] identifier[wire_src_dict] keyword[is] keyword[None] :
identifier[wire_src_dict] , identifier[wdd] = identifier[self] . identifier[net_connections] ()
keyword[from] . identifier[wire] keyword[import] identifier[Input] , identifier[Const]
identifier[sync_src] = literal[string]
identifier[sync_prop] = literal[string]
keyword[for] identifier[net] keyword[in] identifier[sync_mems] :
identifier[wires_to_check] = identifier[list] ( identifier[net] . identifier[args] )
keyword[while] identifier[len] ( identifier[wires_to_check] ):
identifier[wire] = identifier[wires_to_check] . identifier[pop] ()
keyword[if] identifier[isinstance] ( identifier[wire] ,( identifier[Input] , identifier[Const] )):
keyword[continue]
identifier[src_net] = identifier[wire_src_dict] [ identifier[wire] ]
keyword[if] identifier[src_net] . identifier[op] == identifier[sync_src] :
keyword[continue]
keyword[elif] identifier[src_net] . identifier[op] keyword[in] identifier[sync_prop] :
identifier[wires_to_check] . identifier[extend] ( identifier[src_net] . identifier[args] )
keyword[else] :
keyword[raise] identifier[PyrtlError] (
literal[string]
literal[string]
%( identifier[net] . identifier[op_param] [ literal[int] ]. identifier[name] , identifier[net] . identifier[args] [ literal[int] ]. identifier[name] , identifier[str] ( identifier[src_net] ))) | def sanity_check_memory_sync(self, wire_src_dict=None):
""" Check that all memories are synchronous unless explicitly specified as async.
While the semantics of 'm' memories reads is asynchronous, if you want your design
to use a block ram (on an FPGA or otherwise) you want to make sure the index is
available at the beginning of the clock edge. This check will walk the logic structure
and throw an error on any memory if finds that has an index that is not ready at the
beginning of the cycle.
"""
sync_mems = set((m for m in self.logic_subset('m') if not m.op_param[1].asynchronous))
if not len(sync_mems):
return # nothing to check here # depends on [control=['if'], data=[]]
if wire_src_dict is None:
(wire_src_dict, wdd) = self.net_connections() # depends on [control=['if'], data=['wire_src_dict']]
from .wire import Input, Const
sync_src = 'r'
sync_prop = 'wcs'
for net in sync_mems:
wires_to_check = list(net.args)
while len(wires_to_check):
wire = wires_to_check.pop()
if isinstance(wire, (Input, Const)):
continue # depends on [control=['if'], data=[]]
src_net = wire_src_dict[wire]
if src_net.op == sync_src:
continue # depends on [control=['if'], data=[]]
elif src_net.op in sync_prop:
wires_to_check.extend(src_net.args) # depends on [control=['if'], data=[]]
else:
raise PyrtlError('memory "%s" is not specified as asynchronous but has an index "%s" that is not ready at the start of the cycle due to net "%s"' % (net.op_param[1].name, net.args[0].name, str(src_net))) # depends on [control=['while'], data=[]] # depends on [control=['for'], data=['net']] |
def open_in_browser(doc, encoding=None):
"""
Open the HTML document in a web browser, saving it to a temporary
file to open it. Note that this does not delete the file after
use. This is mainly meant for debugging.
"""
import os
import webbrowser
import tempfile
if not isinstance(doc, etree._ElementTree):
doc = etree.ElementTree(doc)
handle, fn = tempfile.mkstemp(suffix='.html')
f = os.fdopen(handle, 'wb')
try:
doc.write(f, method="html", encoding=encoding or doc.docinfo.encoding or "UTF-8")
finally:
# we leak the file itself here, but we should at least close it
f.close()
url = 'file://' + fn.replace(os.path.sep, '/')
print(url)
webbrowser.open(url) | def function[open_in_browser, parameter[doc, encoding]]:
constant[
Open the HTML document in a web browser, saving it to a temporary
file to open it. Note that this does not delete the file after
use. This is mainly meant for debugging.
]
import module[os]
import module[webbrowser]
import module[tempfile]
if <ast.UnaryOp object at 0x7da2043462f0> begin[:]
variable[doc] assign[=] call[name[etree].ElementTree, parameter[name[doc]]]
<ast.Tuple object at 0x7da204345240> assign[=] call[name[tempfile].mkstemp, parameter[]]
variable[f] assign[=] call[name[os].fdopen, parameter[name[handle], constant[wb]]]
<ast.Try object at 0x7da204344100>
variable[url] assign[=] binary_operation[constant[file://] + call[name[fn].replace, parameter[name[os].path.sep, constant[/]]]]
call[name[print], parameter[name[url]]]
call[name[webbrowser].open, parameter[name[url]]] | keyword[def] identifier[open_in_browser] ( identifier[doc] , identifier[encoding] = keyword[None] ):
literal[string]
keyword[import] identifier[os]
keyword[import] identifier[webbrowser]
keyword[import] identifier[tempfile]
keyword[if] keyword[not] identifier[isinstance] ( identifier[doc] , identifier[etree] . identifier[_ElementTree] ):
identifier[doc] = identifier[etree] . identifier[ElementTree] ( identifier[doc] )
identifier[handle] , identifier[fn] = identifier[tempfile] . identifier[mkstemp] ( identifier[suffix] = literal[string] )
identifier[f] = identifier[os] . identifier[fdopen] ( identifier[handle] , literal[string] )
keyword[try] :
identifier[doc] . identifier[write] ( identifier[f] , identifier[method] = literal[string] , identifier[encoding] = identifier[encoding] keyword[or] identifier[doc] . identifier[docinfo] . identifier[encoding] keyword[or] literal[string] )
keyword[finally] :
identifier[f] . identifier[close] ()
identifier[url] = literal[string] + identifier[fn] . identifier[replace] ( identifier[os] . identifier[path] . identifier[sep] , literal[string] )
identifier[print] ( identifier[url] )
identifier[webbrowser] . identifier[open] ( identifier[url] ) | def open_in_browser(doc, encoding=None):
"""
Open the HTML document in a web browser, saving it to a temporary
file to open it. Note that this does not delete the file after
use. This is mainly meant for debugging.
"""
import os
import webbrowser
import tempfile
if not isinstance(doc, etree._ElementTree):
doc = etree.ElementTree(doc) # depends on [control=['if'], data=[]]
(handle, fn) = tempfile.mkstemp(suffix='.html')
f = os.fdopen(handle, 'wb')
try:
doc.write(f, method='html', encoding=encoding or doc.docinfo.encoding or 'UTF-8') # depends on [control=['try'], data=[]]
finally:
# we leak the file itself here, but we should at least close it
f.close()
url = 'file://' + fn.replace(os.path.sep, '/')
print(url)
webbrowser.open(url) |
def _uniform_correlation_like_matrix(num_rows, batch_shape, dtype, seed):
"""Returns a uniformly random `Tensor` of "correlation-like" matrices.
A "correlation-like" matrix is a symmetric square matrix with all entries
between -1 and 1 (inclusive) and 1s on the main diagonal. Of these,
the ones that are positive semi-definite are exactly the correlation
matrices.
Args:
num_rows: Python `int` dimension of the correlation-like matrices.
batch_shape: `Tensor` or Python `tuple` of `int` shape of the
batch to return.
dtype: `dtype` of the `Tensor` to return.
seed: Random seed.
Returns:
matrices: A `Tensor` of shape `batch_shape + [num_rows, num_rows]`
and dtype `dtype`. Each entry is in [-1, 1], and each matrix
along the bottom two dimensions is symmetric and has 1s on the
main diagonal.
"""
num_entries = num_rows * (num_rows + 1) / 2
ones = tf.ones(shape=[num_entries], dtype=dtype)
# It seems wasteful to generate random values for the diagonal since
# I am going to throw them away, but `fill_triangular` fills the
# diagonal, so I probably need them.
# It's not impossible that it would be more efficient to just fill
# the whole matrix with random values instead of messing with
# `fill_triangular`. Then would need to filter almost half out with
# `matrix_band_part`.
unifs = uniform.Uniform(-ones, ones).sample(batch_shape, seed=seed)
tril = util.fill_triangular(unifs)
symmetric = tril + tf.linalg.matrix_transpose(tril)
diagonal_ones = tf.ones(
shape=util.pad(batch_shape, axis=0, back=True, value=num_rows),
dtype=dtype)
return tf.linalg.set_diag(symmetric, diagonal_ones) | def function[_uniform_correlation_like_matrix, parameter[num_rows, batch_shape, dtype, seed]]:
constant[Returns a uniformly random `Tensor` of "correlation-like" matrices.
A "correlation-like" matrix is a symmetric square matrix with all entries
between -1 and 1 (inclusive) and 1s on the main diagonal. Of these,
the ones that are positive semi-definite are exactly the correlation
matrices.
Args:
num_rows: Python `int` dimension of the correlation-like matrices.
batch_shape: `Tensor` or Python `tuple` of `int` shape of the
batch to return.
dtype: `dtype` of the `Tensor` to return.
seed: Random seed.
Returns:
matrices: A `Tensor` of shape `batch_shape + [num_rows, num_rows]`
and dtype `dtype`. Each entry is in [-1, 1], and each matrix
along the bottom two dimensions is symmetric and has 1s on the
main diagonal.
]
variable[num_entries] assign[=] binary_operation[binary_operation[name[num_rows] * binary_operation[name[num_rows] + constant[1]]] / constant[2]]
variable[ones] assign[=] call[name[tf].ones, parameter[]]
variable[unifs] assign[=] call[call[name[uniform].Uniform, parameter[<ast.UnaryOp object at 0x7da1b03b9d50>, name[ones]]].sample, parameter[name[batch_shape]]]
variable[tril] assign[=] call[name[util].fill_triangular, parameter[name[unifs]]]
variable[symmetric] assign[=] binary_operation[name[tril] + call[name[tf].linalg.matrix_transpose, parameter[name[tril]]]]
variable[diagonal_ones] assign[=] call[name[tf].ones, parameter[]]
return[call[name[tf].linalg.set_diag, parameter[name[symmetric], name[diagonal_ones]]]] | keyword[def] identifier[_uniform_correlation_like_matrix] ( identifier[num_rows] , identifier[batch_shape] , identifier[dtype] , identifier[seed] ):
literal[string]
identifier[num_entries] = identifier[num_rows] *( identifier[num_rows] + literal[int] )/ literal[int]
identifier[ones] = identifier[tf] . identifier[ones] ( identifier[shape] =[ identifier[num_entries] ], identifier[dtype] = identifier[dtype] )
identifier[unifs] = identifier[uniform] . identifier[Uniform] (- identifier[ones] , identifier[ones] ). identifier[sample] ( identifier[batch_shape] , identifier[seed] = identifier[seed] )
identifier[tril] = identifier[util] . identifier[fill_triangular] ( identifier[unifs] )
identifier[symmetric] = identifier[tril] + identifier[tf] . identifier[linalg] . identifier[matrix_transpose] ( identifier[tril] )
identifier[diagonal_ones] = identifier[tf] . identifier[ones] (
identifier[shape] = identifier[util] . identifier[pad] ( identifier[batch_shape] , identifier[axis] = literal[int] , identifier[back] = keyword[True] , identifier[value] = identifier[num_rows] ),
identifier[dtype] = identifier[dtype] )
keyword[return] identifier[tf] . identifier[linalg] . identifier[set_diag] ( identifier[symmetric] , identifier[diagonal_ones] ) | def _uniform_correlation_like_matrix(num_rows, batch_shape, dtype, seed):
"""Returns a uniformly random `Tensor` of "correlation-like" matrices.
A "correlation-like" matrix is a symmetric square matrix with all entries
between -1 and 1 (inclusive) and 1s on the main diagonal. Of these,
the ones that are positive semi-definite are exactly the correlation
matrices.
Args:
num_rows: Python `int` dimension of the correlation-like matrices.
batch_shape: `Tensor` or Python `tuple` of `int` shape of the
batch to return.
dtype: `dtype` of the `Tensor` to return.
seed: Random seed.
Returns:
matrices: A `Tensor` of shape `batch_shape + [num_rows, num_rows]`
and dtype `dtype`. Each entry is in [-1, 1], and each matrix
along the bottom two dimensions is symmetric and has 1s on the
main diagonal.
"""
num_entries = num_rows * (num_rows + 1) / 2
ones = tf.ones(shape=[num_entries], dtype=dtype)
# It seems wasteful to generate random values for the diagonal since
# I am going to throw them away, but `fill_triangular` fills the
# diagonal, so I probably need them.
# It's not impossible that it would be more efficient to just fill
# the whole matrix with random values instead of messing with
# `fill_triangular`. Then would need to filter almost half out with
# `matrix_band_part`.
unifs = uniform.Uniform(-ones, ones).sample(batch_shape, seed=seed)
tril = util.fill_triangular(unifs)
symmetric = tril + tf.linalg.matrix_transpose(tril)
diagonal_ones = tf.ones(shape=util.pad(batch_shape, axis=0, back=True, value=num_rows), dtype=dtype)
return tf.linalg.set_diag(symmetric, diagonal_ones) |
def handle_exceptions(*args):
"""
| Handles exceptions.
| It's possible to specify an user defined exception handler,
if not, :func:`base_exception_handler` handler will be used.
| The decorator uses given exceptions objects
or the default Python `Exception <http://docs.python.org/library/exceptions.html#exceptions.Exception>`_ class.
Usage::
@handle_exceptions(ZeroDivisionError)
def raiseAnException(value):
'''
Raises a 'ZeroDivisionError' exception.
'''
return value / 0
:param \*args: Arguments.
:type \*args: \*
:return: Object.
:rtype: object
"""
exceptions = tuple(filter(lambda x: issubclass(x, Exception),
filter(lambda x: isinstance(x, (type, types.ClassType)), args)))
handlers = filter(lambda x: inspect.isfunction(x), args) or (base_exception_handler,)
def handle_exceptions_decorator(object):
"""
Handles exceptions.
:param object: Object to decorate.
:type object: object
:return: Object.
:rtype: object
"""
@functools.wraps(object)
def handle_exceptions_wrapper(*args, **kwargs):
"""
Handles exceptions.
:param \*args: Arguments.
:type \*args: \*
:param \*\*kwargs: Keywords arguments.
:type \*\*kwargs: \*\*
"""
_exceptions__frame__ = True
try:
return object(*args, **kwargs)
except exceptions as error:
for handler in handlers:
handler(error)
return handle_exceptions_wrapper
return handle_exceptions_decorator | def function[handle_exceptions, parameter[]]:
constant[
| Handles exceptions.
| It's possible to specify an user defined exception handler,
if not, :func:`base_exception_handler` handler will be used.
| The decorator uses given exceptions objects
or the default Python `Exception <http://docs.python.org/library/exceptions.html#exceptions.Exception>`_ class.
Usage::
@handle_exceptions(ZeroDivisionError)
def raiseAnException(value):
'''
Raises a 'ZeroDivisionError' exception.
'''
return value / 0
:param \*args: Arguments.
:type \*args: \*
:return: Object.
:rtype: object
]
variable[exceptions] assign[=] call[name[tuple], parameter[call[name[filter], parameter[<ast.Lambda object at 0x7da18f09e7a0>, call[name[filter], parameter[<ast.Lambda object at 0x7da18f09c9a0>, name[args]]]]]]]
variable[handlers] assign[=] <ast.BoolOp object at 0x7da204345480>
def function[handle_exceptions_decorator, parameter[object]]:
constant[
Handles exceptions.
:param object: Object to decorate.
:type object: object
:return: Object.
:rtype: object
]
def function[handle_exceptions_wrapper, parameter[]]:
constant[
Handles exceptions.
:param \*args: Arguments.
:type \*args: \*
:param \*\*kwargs: Keywords arguments.
:type \*\*kwargs: \*\*
]
variable[_exceptions__frame__] assign[=] constant[True]
<ast.Try object at 0x7da204346d40>
return[name[handle_exceptions_wrapper]]
return[name[handle_exceptions_decorator]] | keyword[def] identifier[handle_exceptions] (* identifier[args] ):
literal[string]
identifier[exceptions] = identifier[tuple] ( identifier[filter] ( keyword[lambda] identifier[x] : identifier[issubclass] ( identifier[x] , identifier[Exception] ),
identifier[filter] ( keyword[lambda] identifier[x] : identifier[isinstance] ( identifier[x] ,( identifier[type] , identifier[types] . identifier[ClassType] )), identifier[args] )))
identifier[handlers] = identifier[filter] ( keyword[lambda] identifier[x] : identifier[inspect] . identifier[isfunction] ( identifier[x] ), identifier[args] ) keyword[or] ( identifier[base_exception_handler] ,)
keyword[def] identifier[handle_exceptions_decorator] ( identifier[object] ):
literal[string]
@ identifier[functools] . identifier[wraps] ( identifier[object] )
keyword[def] identifier[handle_exceptions_wrapper] (* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[_exceptions__frame__] = keyword[True]
keyword[try] :
keyword[return] identifier[object] (* identifier[args] ,** identifier[kwargs] )
keyword[except] identifier[exceptions] keyword[as] identifier[error] :
keyword[for] identifier[handler] keyword[in] identifier[handlers] :
identifier[handler] ( identifier[error] )
keyword[return] identifier[handle_exceptions_wrapper]
keyword[return] identifier[handle_exceptions_decorator] | def handle_exceptions(*args):
"""
| Handles exceptions.
| It's possible to specify an user defined exception handler,
if not, :func:`base_exception_handler` handler will be used.
| The decorator uses given exceptions objects
or the default Python `Exception <http://docs.python.org/library/exceptions.html#exceptions.Exception>`_ class.
Usage::
@handle_exceptions(ZeroDivisionError)
def raiseAnException(value):
'''
Raises a 'ZeroDivisionError' exception.
'''
return value / 0
:param \\*args: Arguments.
:type \\*args: \\*
:return: Object.
:rtype: object
"""
exceptions = tuple(filter(lambda x: issubclass(x, Exception), filter(lambda x: isinstance(x, (type, types.ClassType)), args)))
handlers = filter(lambda x: inspect.isfunction(x), args) or (base_exception_handler,)
def handle_exceptions_decorator(object):
"""
Handles exceptions.
:param object: Object to decorate.
:type object: object
:return: Object.
:rtype: object
"""
@functools.wraps(object)
def handle_exceptions_wrapper(*args, **kwargs):
"""
Handles exceptions.
:param \\*args: Arguments.
:type \\*args: \\*
:param \\*\\*kwargs: Keywords arguments.
:type \\*\\*kwargs: \\*\\*
"""
_exceptions__frame__ = True
try:
return object(*args, **kwargs) # depends on [control=['try'], data=[]]
except exceptions as error:
for handler in handlers:
handler(error) # depends on [control=['for'], data=['handler']] # depends on [control=['except'], data=['error']]
return handle_exceptions_wrapper
return handle_exceptions_decorator |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.