code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def delete_map(self, url, map=None, auth_map=None):
"""Gera um XML a partir dos dados do dicionário e o envia através de uma requisição DELETE.
:param url: URL para enviar a requisição HTTP.
:param map: Dicionário com os dados do corpo da requisição HTTP.
:param auth_map: Dicionário com as informações para autenticação na networkAPI.
:return: Retorna uma tupla contendo:
(< código de resposta http >, < corpo da resposta >).
:raise ConnectionError: Falha na conexão com a networkAPI.
:raise RestError: Falha no acesso à networkAPI.
"""
xml = None
if map is not None:
xml = dumps_networkapi(map)
response_code, content = self.delete(url, xml, 'text/plain', auth_map)
return response_code, content | def function[delete_map, parameter[self, url, map, auth_map]]:
constant[Gera um XML a partir dos dados do dicionário e o envia através de uma requisição DELETE.
:param url: URL para enviar a requisição HTTP.
:param map: Dicionário com os dados do corpo da requisição HTTP.
:param auth_map: Dicionário com as informações para autenticação na networkAPI.
:return: Retorna uma tupla contendo:
(< código de resposta http >, < corpo da resposta >).
:raise ConnectionError: Falha na conexão com a networkAPI.
:raise RestError: Falha no acesso à networkAPI.
]
variable[xml] assign[=] constant[None]
if compare[name[map] is_not constant[None]] begin[:]
variable[xml] assign[=] call[name[dumps_networkapi], parameter[name[map]]]
<ast.Tuple object at 0x7da1b2344a60> assign[=] call[name[self].delete, parameter[name[url], name[xml], constant[text/plain], name[auth_map]]]
return[tuple[[<ast.Name object at 0x7da1b2344490>, <ast.Name object at 0x7da1b2347be0>]]] | keyword[def] identifier[delete_map] ( identifier[self] , identifier[url] , identifier[map] = keyword[None] , identifier[auth_map] = keyword[None] ):
literal[string]
identifier[xml] = keyword[None]
keyword[if] identifier[map] keyword[is] keyword[not] keyword[None] :
identifier[xml] = identifier[dumps_networkapi] ( identifier[map] )
identifier[response_code] , identifier[content] = identifier[self] . identifier[delete] ( identifier[url] , identifier[xml] , literal[string] , identifier[auth_map] )
keyword[return] identifier[response_code] , identifier[content] | def delete_map(self, url, map=None, auth_map=None):
"""Gera um XML a partir dos dados do dicionário e o envia através de uma requisição DELETE.
:param url: URL para enviar a requisição HTTP.
:param map: Dicionário com os dados do corpo da requisição HTTP.
:param auth_map: Dicionário com as informações para autenticação na networkAPI.
:return: Retorna uma tupla contendo:
(< código de resposta http >, < corpo da resposta >).
:raise ConnectionError: Falha na conexão com a networkAPI.
:raise RestError: Falha no acesso à networkAPI.
"""
xml = None
if map is not None:
xml = dumps_networkapi(map) # depends on [control=['if'], data=['map']]
(response_code, content) = self.delete(url, xml, 'text/plain', auth_map)
return (response_code, content) |
def inverse(self):
"""
Retrieves the inverse of a G1 element.
"""
result = G2Element()
librelic.g2_neg_abi(byref(result), byref(self))
return result | def function[inverse, parameter[self]]:
constant[
Retrieves the inverse of a G1 element.
]
variable[result] assign[=] call[name[G2Element], parameter[]]
call[name[librelic].g2_neg_abi, parameter[call[name[byref], parameter[name[result]]], call[name[byref], parameter[name[self]]]]]
return[name[result]] | keyword[def] identifier[inverse] ( identifier[self] ):
literal[string]
identifier[result] = identifier[G2Element] ()
identifier[librelic] . identifier[g2_neg_abi] ( identifier[byref] ( identifier[result] ), identifier[byref] ( identifier[self] ))
keyword[return] identifier[result] | def inverse(self):
"""
Retrieves the inverse of a G1 element.
"""
result = G2Element()
librelic.g2_neg_abi(byref(result), byref(self))
return result |
def tsv_escape(x: Any) -> str:
"""
Escape data for tab-separated value (TSV) format.
"""
if x is None:
return ""
x = str(x)
return x.replace("\t", "\\t").replace("\n", "\\n") | def function[tsv_escape, parameter[x]]:
constant[
Escape data for tab-separated value (TSV) format.
]
if compare[name[x] is constant[None]] begin[:]
return[constant[]]
variable[x] assign[=] call[name[str], parameter[name[x]]]
return[call[call[name[x].replace, parameter[constant[ ], constant[\t]]].replace, parameter[constant[
], constant[\n]]]] | keyword[def] identifier[tsv_escape] ( identifier[x] : identifier[Any] )-> identifier[str] :
literal[string]
keyword[if] identifier[x] keyword[is] keyword[None] :
keyword[return] literal[string]
identifier[x] = identifier[str] ( identifier[x] )
keyword[return] identifier[x] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ) | def tsv_escape(x: Any) -> str:
"""
Escape data for tab-separated value (TSV) format.
"""
if x is None:
return '' # depends on [control=['if'], data=[]]
x = str(x)
return x.replace('\t', '\\t').replace('\n', '\\n') |
async def initialize_stores_async(self):
"""
Intializes the partition checkpoint and lease store ensures that a checkpoint
exists for all partitions. Note in this case checkpoint and lease stores are
the same storage manager construct.
:return: Returns the number of partitions.
:rtype: int
"""
await self.host.storage_manager.create_checkpoint_store_if_not_exists_async()
partition_ids = await self.get_partition_ids_async()
retry_tasks = []
for partition_id in partition_ids:
retry_tasks.append(
self.retry_async(
self.host.storage_manager.create_checkpoint_if_not_exists_async,
partition_id=partition_id,
retry_message="Failure creating checkpoint for partition, retrying",
final_failure_message="Out of retries creating checkpoint blob for partition",
max_retries=5,
host_id=self.host.host_name))
await asyncio.gather(*retry_tasks)
return len(partition_ids) | <ast.AsyncFunctionDef object at 0x7da2046215d0> | keyword[async] keyword[def] identifier[initialize_stores_async] ( identifier[self] ):
literal[string]
keyword[await] identifier[self] . identifier[host] . identifier[storage_manager] . identifier[create_checkpoint_store_if_not_exists_async] ()
identifier[partition_ids] = keyword[await] identifier[self] . identifier[get_partition_ids_async] ()
identifier[retry_tasks] =[]
keyword[for] identifier[partition_id] keyword[in] identifier[partition_ids] :
identifier[retry_tasks] . identifier[append] (
identifier[self] . identifier[retry_async] (
identifier[self] . identifier[host] . identifier[storage_manager] . identifier[create_checkpoint_if_not_exists_async] ,
identifier[partition_id] = identifier[partition_id] ,
identifier[retry_message] = literal[string] ,
identifier[final_failure_message] = literal[string] ,
identifier[max_retries] = literal[int] ,
identifier[host_id] = identifier[self] . identifier[host] . identifier[host_name] ))
keyword[await] identifier[asyncio] . identifier[gather] (* identifier[retry_tasks] )
keyword[return] identifier[len] ( identifier[partition_ids] ) | async def initialize_stores_async(self):
"""
Intializes the partition checkpoint and lease store ensures that a checkpoint
exists for all partitions. Note in this case checkpoint and lease stores are
the same storage manager construct.
:return: Returns the number of partitions.
:rtype: int
"""
await self.host.storage_manager.create_checkpoint_store_if_not_exists_async()
partition_ids = await self.get_partition_ids_async()
retry_tasks = []
for partition_id in partition_ids:
retry_tasks.append(self.retry_async(self.host.storage_manager.create_checkpoint_if_not_exists_async, partition_id=partition_id, retry_message='Failure creating checkpoint for partition, retrying', final_failure_message='Out of retries creating checkpoint blob for partition', max_retries=5, host_id=self.host.host_name)) # depends on [control=['for'], data=['partition_id']]
await asyncio.gather(*retry_tasks)
return len(partition_ids) |
def _get_items(self):
"""Get multiple items from a Queue.
Gets at least one (blocking) and at most ``max_batch_size`` items
(non-blocking) from a given Queue. Does not mark the items as done.
:rtype: Sequence
:returns: A sequence of items retrieved from the queue.
"""
items = [self._queue.get()]
while len(items) < self._max_batch_size:
try:
items.append(self._queue.get_nowait())
except queue.Empty:
break
return items | def function[_get_items, parameter[self]]:
constant[Get multiple items from a Queue.
Gets at least one (blocking) and at most ``max_batch_size`` items
(non-blocking) from a given Queue. Does not mark the items as done.
:rtype: Sequence
:returns: A sequence of items retrieved from the queue.
]
variable[items] assign[=] list[[<ast.Call object at 0x7da204564430>]]
while compare[call[name[len], parameter[name[items]]] less[<] name[self]._max_batch_size] begin[:]
<ast.Try object at 0x7da2045641c0>
return[name[items]] | keyword[def] identifier[_get_items] ( identifier[self] ):
literal[string]
identifier[items] =[ identifier[self] . identifier[_queue] . identifier[get] ()]
keyword[while] identifier[len] ( identifier[items] )< identifier[self] . identifier[_max_batch_size] :
keyword[try] :
identifier[items] . identifier[append] ( identifier[self] . identifier[_queue] . identifier[get_nowait] ())
keyword[except] identifier[queue] . identifier[Empty] :
keyword[break]
keyword[return] identifier[items] | def _get_items(self):
"""Get multiple items from a Queue.
Gets at least one (blocking) and at most ``max_batch_size`` items
(non-blocking) from a given Queue. Does not mark the items as done.
:rtype: Sequence
:returns: A sequence of items retrieved from the queue.
"""
items = [self._queue.get()]
while len(items) < self._max_batch_size:
try:
items.append(self._queue.get_nowait()) # depends on [control=['try'], data=[]]
except queue.Empty:
break # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]]
return items |
def drawGrid(self, painter, rect, showGrid, showColumns, showRows):
"""
Draws the grid on the inputed painter
:param painter | <QPainter>
rect | <QRect>
showGrid | <bool>
showColumns | <bool>
showRows | <bool>
"""
if not (self.showGrid() and showGrid):
return
# saves the painter state before continuing
painter.save()
# draw the grid data
painter.setBrush(self.alternateColor())
painter.setPen(Qt.NoPen)
# draw alternating rows
if self.alternatingRowColors():
painter.drawRects(self._buildData.get('grid_h_alt', []))
# draw alternating columns
if self.alternatingColumnColors():
painter.drawRects(self._buildData.get('grid_v_alt', []))
# draws the grid lines
painter.setPen(QPen(self.axisColor()))
grid = []
if self.showRows() and showRows:
grid += self._buildData.get('grid_h_lines', [])
if self.showColumns() and showColumns:
grid += self._buildData.get('grid_v_lines', [])
if grid:
painter.drawLines(grid)
# restores the painter when finished
painter.restore() | def function[drawGrid, parameter[self, painter, rect, showGrid, showColumns, showRows]]:
constant[
Draws the grid on the inputed painter
:param painter | <QPainter>
rect | <QRect>
showGrid | <bool>
showColumns | <bool>
showRows | <bool>
]
if <ast.UnaryOp object at 0x7da2054a5c30> begin[:]
return[None]
call[name[painter].save, parameter[]]
call[name[painter].setBrush, parameter[call[name[self].alternateColor, parameter[]]]]
call[name[painter].setPen, parameter[name[Qt].NoPen]]
if call[name[self].alternatingRowColors, parameter[]] begin[:]
call[name[painter].drawRects, parameter[call[name[self]._buildData.get, parameter[constant[grid_h_alt], list[[]]]]]]
if call[name[self].alternatingColumnColors, parameter[]] begin[:]
call[name[painter].drawRects, parameter[call[name[self]._buildData.get, parameter[constant[grid_v_alt], list[[]]]]]]
call[name[painter].setPen, parameter[call[name[QPen], parameter[call[name[self].axisColor, parameter[]]]]]]
variable[grid] assign[=] list[[]]
if <ast.BoolOp object at 0x7da1b24ae440> begin[:]
<ast.AugAssign object at 0x7da1b24ae920>
if <ast.BoolOp object at 0x7da1b24af3a0> begin[:]
<ast.AugAssign object at 0x7da1b24ada50>
if name[grid] begin[:]
call[name[painter].drawLines, parameter[name[grid]]]
call[name[painter].restore, parameter[]] | keyword[def] identifier[drawGrid] ( identifier[self] , identifier[painter] , identifier[rect] , identifier[showGrid] , identifier[showColumns] , identifier[showRows] ):
literal[string]
keyword[if] keyword[not] ( identifier[self] . identifier[showGrid] () keyword[and] identifier[showGrid] ):
keyword[return]
identifier[painter] . identifier[save] ()
identifier[painter] . identifier[setBrush] ( identifier[self] . identifier[alternateColor] ())
identifier[painter] . identifier[setPen] ( identifier[Qt] . identifier[NoPen] )
keyword[if] identifier[self] . identifier[alternatingRowColors] ():
identifier[painter] . identifier[drawRects] ( identifier[self] . identifier[_buildData] . identifier[get] ( literal[string] ,[]))
keyword[if] identifier[self] . identifier[alternatingColumnColors] ():
identifier[painter] . identifier[drawRects] ( identifier[self] . identifier[_buildData] . identifier[get] ( literal[string] ,[]))
identifier[painter] . identifier[setPen] ( identifier[QPen] ( identifier[self] . identifier[axisColor] ()))
identifier[grid] =[]
keyword[if] identifier[self] . identifier[showRows] () keyword[and] identifier[showRows] :
identifier[grid] += identifier[self] . identifier[_buildData] . identifier[get] ( literal[string] ,[])
keyword[if] identifier[self] . identifier[showColumns] () keyword[and] identifier[showColumns] :
identifier[grid] += identifier[self] . identifier[_buildData] . identifier[get] ( literal[string] ,[])
keyword[if] identifier[grid] :
identifier[painter] . identifier[drawLines] ( identifier[grid] )
identifier[painter] . identifier[restore] () | def drawGrid(self, painter, rect, showGrid, showColumns, showRows):
"""
Draws the grid on the inputed painter
:param painter | <QPainter>
rect | <QRect>
showGrid | <bool>
showColumns | <bool>
showRows | <bool>
"""
if not (self.showGrid() and showGrid):
return # depends on [control=['if'], data=[]] # saves the painter state before continuing
painter.save() # draw the grid data
painter.setBrush(self.alternateColor())
painter.setPen(Qt.NoPen) # draw alternating rows
if self.alternatingRowColors():
painter.drawRects(self._buildData.get('grid_h_alt', [])) # depends on [control=['if'], data=[]] # draw alternating columns
if self.alternatingColumnColors():
painter.drawRects(self._buildData.get('grid_v_alt', [])) # depends on [control=['if'], data=[]] # draws the grid lines
painter.setPen(QPen(self.axisColor()))
grid = []
if self.showRows() and showRows:
grid += self._buildData.get('grid_h_lines', []) # depends on [control=['if'], data=[]]
if self.showColumns() and showColumns:
grid += self._buildData.get('grid_v_lines', []) # depends on [control=['if'], data=[]]
if grid:
painter.drawLines(grid) # depends on [control=['if'], data=[]] # restores the painter when finished
painter.restore() |
def loop(self, value):
""" Indicates whether the playback should loop.
Parameters
----------
value : bool
True if playback should loop, False if not.
"""
if not type(value) == bool:
raise TypeError("can only be True or False")
self._loop = value | def function[loop, parameter[self, value]]:
constant[ Indicates whether the playback should loop.
Parameters
----------
value : bool
True if playback should loop, False if not.
]
if <ast.UnaryOp object at 0x7da1b253d210> begin[:]
<ast.Raise object at 0x7da1b253ec80>
name[self]._loop assign[=] name[value] | keyword[def] identifier[loop] ( identifier[self] , identifier[value] ):
literal[string]
keyword[if] keyword[not] identifier[type] ( identifier[value] )== identifier[bool] :
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[self] . identifier[_loop] = identifier[value] | def loop(self, value):
""" Indicates whether the playback should loop.
Parameters
----------
value : bool
True if playback should loop, False if not.
"""
if not type(value) == bool:
raise TypeError('can only be True or False') # depends on [control=['if'], data=[]]
self._loop = value |
def to_distance_maps(self, inverted=False):
"""
Generates a ``(H,W,K)`` output containing ``K`` distance maps for ``K`` keypoints.
The k-th distance map contains at every location ``(y, x)`` the euclidean distance to the k-th keypoint.
This function can be used as a helper when augmenting keypoints with a method that only supports
the augmentation of images.
Parameters
-------
inverted : bool, optional
If True, inverted distance maps are returned where each distance value d is replaced
by ``d/(d+1)``, i.e. the distance maps have values in the range ``(0.0, 1.0]`` with 1.0
denoting exactly the position of the respective keypoint.
Returns
-------
distance_maps : (H,W,K) ndarray
A ``float32`` array containing ``K`` distance maps for ``K`` keypoints. Each location
``(y, x, k)`` in the array denotes the euclidean distance at ``(y, x)`` to the ``k``-th keypoint.
In inverted mode the distance ``d`` is replaced by ``d/(d+1)``. The height and width
of the array match the height and width in ``KeypointsOnImage.shape``.
"""
ia.do_assert(len(self.keypoints) > 0)
height, width = self.shape[0:2]
distance_maps = np.zeros((height, width, len(self.keypoints)), dtype=np.float32)
yy = np.arange(0, height)
xx = np.arange(0, width)
grid_xx, grid_yy = np.meshgrid(xx, yy)
for i, keypoint in enumerate(self.keypoints):
y, x = keypoint.y, keypoint.x
distance_maps[:, :, i] = (grid_xx - x) ** 2 + (grid_yy - y) ** 2
distance_maps = np.sqrt(distance_maps)
if inverted:
return 1/(distance_maps+1)
return distance_maps | def function[to_distance_maps, parameter[self, inverted]]:
constant[
Generates a ``(H,W,K)`` output containing ``K`` distance maps for ``K`` keypoints.
The k-th distance map contains at every location ``(y, x)`` the euclidean distance to the k-th keypoint.
This function can be used as a helper when augmenting keypoints with a method that only supports
the augmentation of images.
Parameters
-------
inverted : bool, optional
If True, inverted distance maps are returned where each distance value d is replaced
by ``d/(d+1)``, i.e. the distance maps have values in the range ``(0.0, 1.0]`` with 1.0
denoting exactly the position of the respective keypoint.
Returns
-------
distance_maps : (H,W,K) ndarray
A ``float32`` array containing ``K`` distance maps for ``K`` keypoints. Each location
``(y, x, k)`` in the array denotes the euclidean distance at ``(y, x)`` to the ``k``-th keypoint.
In inverted mode the distance ``d`` is replaced by ``d/(d+1)``. The height and width
of the array match the height and width in ``KeypointsOnImage.shape``.
]
call[name[ia].do_assert, parameter[compare[call[name[len], parameter[name[self].keypoints]] greater[>] constant[0]]]]
<ast.Tuple object at 0x7da207f9b010> assign[=] call[name[self].shape][<ast.Slice object at 0x7da207f9b700>]
variable[distance_maps] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da207f9b0a0>, <ast.Name object at 0x7da207f9b460>, <ast.Call object at 0x7da207f990c0>]]]]
variable[yy] assign[=] call[name[np].arange, parameter[constant[0], name[height]]]
variable[xx] assign[=] call[name[np].arange, parameter[constant[0], name[width]]]
<ast.Tuple object at 0x7da207f9a9b0> assign[=] call[name[np].meshgrid, parameter[name[xx], name[yy]]]
for taget[tuple[[<ast.Name object at 0x7da1b0213730>, <ast.Name object at 0x7da1b0213be0>]]] in starred[call[name[enumerate], parameter[name[self].keypoints]]] begin[:]
<ast.Tuple object at 0x7da1b0211f90> assign[=] tuple[[<ast.Attribute object at 0x7da1b0210130>, <ast.Attribute object at 0x7da1b02102e0>]]
call[name[distance_maps]][tuple[[<ast.Slice object at 0x7da1b0213880>, <ast.Slice object at 0x7da1b0210ca0>, <ast.Name object at 0x7da1b0212ad0>]]] assign[=] binary_operation[binary_operation[binary_operation[name[grid_xx] - name[x]] ** constant[2]] + binary_operation[binary_operation[name[grid_yy] - name[y]] ** constant[2]]]
variable[distance_maps] assign[=] call[name[np].sqrt, parameter[name[distance_maps]]]
if name[inverted] begin[:]
return[binary_operation[constant[1] / binary_operation[name[distance_maps] + constant[1]]]]
return[name[distance_maps]] | keyword[def] identifier[to_distance_maps] ( identifier[self] , identifier[inverted] = keyword[False] ):
literal[string]
identifier[ia] . identifier[do_assert] ( identifier[len] ( identifier[self] . identifier[keypoints] )> literal[int] )
identifier[height] , identifier[width] = identifier[self] . identifier[shape] [ literal[int] : literal[int] ]
identifier[distance_maps] = identifier[np] . identifier[zeros] (( identifier[height] , identifier[width] , identifier[len] ( identifier[self] . identifier[keypoints] )), identifier[dtype] = identifier[np] . identifier[float32] )
identifier[yy] = identifier[np] . identifier[arange] ( literal[int] , identifier[height] )
identifier[xx] = identifier[np] . identifier[arange] ( literal[int] , identifier[width] )
identifier[grid_xx] , identifier[grid_yy] = identifier[np] . identifier[meshgrid] ( identifier[xx] , identifier[yy] )
keyword[for] identifier[i] , identifier[keypoint] keyword[in] identifier[enumerate] ( identifier[self] . identifier[keypoints] ):
identifier[y] , identifier[x] = identifier[keypoint] . identifier[y] , identifier[keypoint] . identifier[x]
identifier[distance_maps] [:,:, identifier[i] ]=( identifier[grid_xx] - identifier[x] )** literal[int] +( identifier[grid_yy] - identifier[y] )** literal[int]
identifier[distance_maps] = identifier[np] . identifier[sqrt] ( identifier[distance_maps] )
keyword[if] identifier[inverted] :
keyword[return] literal[int] /( identifier[distance_maps] + literal[int] )
keyword[return] identifier[distance_maps] | def to_distance_maps(self, inverted=False):
"""
Generates a ``(H,W,K)`` output containing ``K`` distance maps for ``K`` keypoints.
The k-th distance map contains at every location ``(y, x)`` the euclidean distance to the k-th keypoint.
This function can be used as a helper when augmenting keypoints with a method that only supports
the augmentation of images.
Parameters
-------
inverted : bool, optional
If True, inverted distance maps are returned where each distance value d is replaced
by ``d/(d+1)``, i.e. the distance maps have values in the range ``(0.0, 1.0]`` with 1.0
denoting exactly the position of the respective keypoint.
Returns
-------
distance_maps : (H,W,K) ndarray
A ``float32`` array containing ``K`` distance maps for ``K`` keypoints. Each location
``(y, x, k)`` in the array denotes the euclidean distance at ``(y, x)`` to the ``k``-th keypoint.
In inverted mode the distance ``d`` is replaced by ``d/(d+1)``. The height and width
of the array match the height and width in ``KeypointsOnImage.shape``.
"""
ia.do_assert(len(self.keypoints) > 0)
(height, width) = self.shape[0:2]
distance_maps = np.zeros((height, width, len(self.keypoints)), dtype=np.float32)
yy = np.arange(0, height)
xx = np.arange(0, width)
(grid_xx, grid_yy) = np.meshgrid(xx, yy)
for (i, keypoint) in enumerate(self.keypoints):
(y, x) = (keypoint.y, keypoint.x)
distance_maps[:, :, i] = (grid_xx - x) ** 2 + (grid_yy - y) ** 2 # depends on [control=['for'], data=[]]
distance_maps = np.sqrt(distance_maps)
if inverted:
return 1 / (distance_maps + 1) # depends on [control=['if'], data=[]]
return distance_maps |
def load_from_file(module_path):
"""
Load a python module from its absolute filesystem path
Borrowed from django-cms
"""
from imp import load_module, PY_SOURCE
imported = None
if module_path:
with open(module_path, 'r') as openfile:
imported = load_module('mod', openfile, module_path, ('imported', 'r', PY_SOURCE))
return imported | def function[load_from_file, parameter[module_path]]:
constant[
Load a python module from its absolute filesystem path
Borrowed from django-cms
]
from relative_module[imp] import module[load_module], module[PY_SOURCE]
variable[imported] assign[=] constant[None]
if name[module_path] begin[:]
with call[name[open], parameter[name[module_path], constant[r]]] begin[:]
variable[imported] assign[=] call[name[load_module], parameter[constant[mod], name[openfile], name[module_path], tuple[[<ast.Constant object at 0x7da18f00e4a0>, <ast.Constant object at 0x7da18f00e470>, <ast.Name object at 0x7da18f00faf0>]]]]
return[name[imported]] | keyword[def] identifier[load_from_file] ( identifier[module_path] ):
literal[string]
keyword[from] identifier[imp] keyword[import] identifier[load_module] , identifier[PY_SOURCE]
identifier[imported] = keyword[None]
keyword[if] identifier[module_path] :
keyword[with] identifier[open] ( identifier[module_path] , literal[string] ) keyword[as] identifier[openfile] :
identifier[imported] = identifier[load_module] ( literal[string] , identifier[openfile] , identifier[module_path] ,( literal[string] , literal[string] , identifier[PY_SOURCE] ))
keyword[return] identifier[imported] | def load_from_file(module_path):
"""
Load a python module from its absolute filesystem path
Borrowed from django-cms
"""
from imp import load_module, PY_SOURCE
imported = None
if module_path:
with open(module_path, 'r') as openfile:
imported = load_module('mod', openfile, module_path, ('imported', 'r', PY_SOURCE)) # depends on [control=['with'], data=['openfile']] # depends on [control=['if'], data=[]]
return imported |
def render(self, name=None, value=None, attrs=None):
"""Outputs a <ul> for this set of choice fields.
If an id was given to the field, it is applied to the <ul> (each
item in the list will get an id of `$id_$i`).
"""
attrs = {} or attrs
self.attrs = attrs
self.name = name
self.value = value
id_ = self.attrs.get('id')
output = []
for i, choice in enumerate(self.choices):
choice_value, choice_label = choice
if isinstance(choice_label, (tuple, list)):
attrs_plus = self.attrs.copy()
if id_:
attrs_plus['id'] += '_{}'.format(i)
sub_ul_renderer = self.__class__(
attrs=attrs_plus,
choices=choice_label,
)
sub_ul_renderer.choice_input_class = self.choice_input_class
output.append(html.format_html(
self.inner_html, choice_value=choice_value,
sub_widgets=sub_ul_renderer.render(),
))
else:
w = self.choice_input_class(
self.name, self.value, self.attrs.copy(), choice, i)
output.append(html.format_html(
self.inner_html,
choice_value=force_text(w),
sub_widgets=''))
return html.format_html(
self.outer_html,
id_attr=html.format_html(' id="{}"', id_) if id_ else '',
content=mark_safe('\n'.join(output)),
) | def function[render, parameter[self, name, value, attrs]]:
constant[Outputs a <ul> for this set of choice fields.
If an id was given to the field, it is applied to the <ul> (each
item in the list will get an id of `$id_$i`).
]
variable[attrs] assign[=] <ast.BoolOp object at 0x7da1b1906b90>
name[self].attrs assign[=] name[attrs]
name[self].name assign[=] name[name]
name[self].value assign[=] name[value]
variable[id_] assign[=] call[name[self].attrs.get, parameter[constant[id]]]
variable[output] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b1907370>, <ast.Name object at 0x7da1b1906cb0>]]] in starred[call[name[enumerate], parameter[name[self].choices]]] begin[:]
<ast.Tuple object at 0x7da1b1906200> assign[=] name[choice]
if call[name[isinstance], parameter[name[choice_label], tuple[[<ast.Name object at 0x7da1b19077f0>, <ast.Name object at 0x7da1b19063b0>]]]] begin[:]
variable[attrs_plus] assign[=] call[name[self].attrs.copy, parameter[]]
if name[id_] begin[:]
<ast.AugAssign object at 0x7da1b1907670>
variable[sub_ul_renderer] assign[=] call[name[self].__class__, parameter[]]
name[sub_ul_renderer].choice_input_class assign[=] name[self].choice_input_class
call[name[output].append, parameter[call[name[html].format_html, parameter[name[self].inner_html]]]]
return[call[name[html].format_html, parameter[name[self].outer_html]]] | keyword[def] identifier[render] ( identifier[self] , identifier[name] = keyword[None] , identifier[value] = keyword[None] , identifier[attrs] = keyword[None] ):
literal[string]
identifier[attrs] ={} keyword[or] identifier[attrs]
identifier[self] . identifier[attrs] = identifier[attrs]
identifier[self] . identifier[name] = identifier[name]
identifier[self] . identifier[value] = identifier[value]
identifier[id_] = identifier[self] . identifier[attrs] . identifier[get] ( literal[string] )
identifier[output] =[]
keyword[for] identifier[i] , identifier[choice] keyword[in] identifier[enumerate] ( identifier[self] . identifier[choices] ):
identifier[choice_value] , identifier[choice_label] = identifier[choice]
keyword[if] identifier[isinstance] ( identifier[choice_label] ,( identifier[tuple] , identifier[list] )):
identifier[attrs_plus] = identifier[self] . identifier[attrs] . identifier[copy] ()
keyword[if] identifier[id_] :
identifier[attrs_plus] [ literal[string] ]+= literal[string] . identifier[format] ( identifier[i] )
identifier[sub_ul_renderer] = identifier[self] . identifier[__class__] (
identifier[attrs] = identifier[attrs_plus] ,
identifier[choices] = identifier[choice_label] ,
)
identifier[sub_ul_renderer] . identifier[choice_input_class] = identifier[self] . identifier[choice_input_class]
identifier[output] . identifier[append] ( identifier[html] . identifier[format_html] (
identifier[self] . identifier[inner_html] , identifier[choice_value] = identifier[choice_value] ,
identifier[sub_widgets] = identifier[sub_ul_renderer] . identifier[render] (),
))
keyword[else] :
identifier[w] = identifier[self] . identifier[choice_input_class] (
identifier[self] . identifier[name] , identifier[self] . identifier[value] , identifier[self] . identifier[attrs] . identifier[copy] (), identifier[choice] , identifier[i] )
identifier[output] . identifier[append] ( identifier[html] . identifier[format_html] (
identifier[self] . identifier[inner_html] ,
identifier[choice_value] = identifier[force_text] ( identifier[w] ),
identifier[sub_widgets] = literal[string] ))
keyword[return] identifier[html] . identifier[format_html] (
identifier[self] . identifier[outer_html] ,
identifier[id_attr] = identifier[html] . identifier[format_html] ( literal[string] , identifier[id_] ) keyword[if] identifier[id_] keyword[else] literal[string] ,
identifier[content] = identifier[mark_safe] ( literal[string] . identifier[join] ( identifier[output] )),
) | def render(self, name=None, value=None, attrs=None):
"""Outputs a <ul> for this set of choice fields.
If an id was given to the field, it is applied to the <ul> (each
item in the list will get an id of `$id_$i`).
"""
attrs = {} or attrs
self.attrs = attrs
self.name = name
self.value = value
id_ = self.attrs.get('id')
output = []
for (i, choice) in enumerate(self.choices):
(choice_value, choice_label) = choice
if isinstance(choice_label, (tuple, list)):
attrs_plus = self.attrs.copy()
if id_:
attrs_plus['id'] += '_{}'.format(i) # depends on [control=['if'], data=[]]
sub_ul_renderer = self.__class__(attrs=attrs_plus, choices=choice_label)
sub_ul_renderer.choice_input_class = self.choice_input_class
output.append(html.format_html(self.inner_html, choice_value=choice_value, sub_widgets=sub_ul_renderer.render())) # depends on [control=['if'], data=[]]
else:
w = self.choice_input_class(self.name, self.value, self.attrs.copy(), choice, i)
output.append(html.format_html(self.inner_html, choice_value=force_text(w), sub_widgets='')) # depends on [control=['for'], data=[]]
return html.format_html(self.outer_html, id_attr=html.format_html(' id="{}"', id_) if id_ else '', content=mark_safe('\n'.join(output))) |
def get_subtree(self, name): # noqa: D302
r"""
Get all node names in a sub-tree.
:param name: Sub-tree root node name
:type name: :ref:`NodeName`
:rtype: list of :ref:`NodeName`
:raises:
* RuntimeError (Argument \`name\` is not valid)
* RuntimeError (Node *[name]* not in tree)
Using the same example tree created in
:py:meth:`ptrie.Trie.add_nodes`::
>>> from __future__ import print_function
>>> import docs.support.ptrie_example, pprint
>>> tobj = docs.support.ptrie_example.create_tree()
>>> print(tobj)
root
├branch1 (*)
│├leaf1
││└subleaf1 (*)
│└leaf2 (*)
│ └subleaf2
└branch2
>>> pprint.pprint(tobj.get_subtree('root.branch1'))
['root.branch1',
'root.branch1.leaf1',
'root.branch1.leaf1.subleaf1',
'root.branch1.leaf2',
'root.branch1.leaf2.subleaf2']
"""
if self._validate_node_name(name):
raise RuntimeError("Argument `name` is not valid")
self._node_in_tree(name)
return self._get_subtree(name) | def function[get_subtree, parameter[self, name]]:
constant[
Get all node names in a sub-tree.
:param name: Sub-tree root node name
:type name: :ref:`NodeName`
:rtype: list of :ref:`NodeName`
:raises:
* RuntimeError (Argument \`name\` is not valid)
* RuntimeError (Node *[name]* not in tree)
Using the same example tree created in
:py:meth:`ptrie.Trie.add_nodes`::
>>> from __future__ import print_function
>>> import docs.support.ptrie_example, pprint
>>> tobj = docs.support.ptrie_example.create_tree()
>>> print(tobj)
root
├branch1 (*)
│├leaf1
││└subleaf1 (*)
│└leaf2 (*)
│ └subleaf2
└branch2
>>> pprint.pprint(tobj.get_subtree('root.branch1'))
['root.branch1',
'root.branch1.leaf1',
'root.branch1.leaf1.subleaf1',
'root.branch1.leaf2',
'root.branch1.leaf2.subleaf2']
]
if call[name[self]._validate_node_name, parameter[name[name]]] begin[:]
<ast.Raise object at 0x7da1b10d5f60>
call[name[self]._node_in_tree, parameter[name[name]]]
return[call[name[self]._get_subtree, parameter[name[name]]]] | keyword[def] identifier[get_subtree] ( identifier[self] , identifier[name] ):
literal[string]
keyword[if] identifier[self] . identifier[_validate_node_name] ( identifier[name] ):
keyword[raise] identifier[RuntimeError] ( literal[string] )
identifier[self] . identifier[_node_in_tree] ( identifier[name] )
keyword[return] identifier[self] . identifier[_get_subtree] ( identifier[name] ) | def get_subtree(self, name): # noqa: D302
"\n Get all node names in a sub-tree.\n\n :param name: Sub-tree root node name\n :type name: :ref:`NodeName`\n\n :rtype: list of :ref:`NodeName`\n\n :raises:\n * RuntimeError (Argument \\`name\\` is not valid)\n\n * RuntimeError (Node *[name]* not in tree)\n\n Using the same example tree created in\n :py:meth:`ptrie.Trie.add_nodes`::\n\n >>> from __future__ import print_function\n >>> import docs.support.ptrie_example, pprint\n >>> tobj = docs.support.ptrie_example.create_tree()\n >>> print(tobj)\n root\n ├branch1 (*)\n │├leaf1\n ││└subleaf1 (*)\n │└leaf2 (*)\n │ └subleaf2\n └branch2\n >>> pprint.pprint(tobj.get_subtree('root.branch1'))\n ['root.branch1',\n 'root.branch1.leaf1',\n 'root.branch1.leaf1.subleaf1',\n 'root.branch1.leaf2',\n 'root.branch1.leaf2.subleaf2']\n "
if self._validate_node_name(name):
raise RuntimeError('Argument `name` is not valid') # depends on [control=['if'], data=[]]
self._node_in_tree(name)
return self._get_subtree(name) |
def close(self):
"""Close the stream."""
self.current = Token(self.current.lineno, TOKEN_EOF, '')
self._next = None
self.closed = True | def function[close, parameter[self]]:
constant[Close the stream.]
name[self].current assign[=] call[name[Token], parameter[name[self].current.lineno, name[TOKEN_EOF], constant[]]]
name[self]._next assign[=] constant[None]
name[self].closed assign[=] constant[True] | keyword[def] identifier[close] ( identifier[self] ):
literal[string]
identifier[self] . identifier[current] = identifier[Token] ( identifier[self] . identifier[current] . identifier[lineno] , identifier[TOKEN_EOF] , literal[string] )
identifier[self] . identifier[_next] = keyword[None]
identifier[self] . identifier[closed] = keyword[True] | def close(self):
"""Close the stream."""
self.current = Token(self.current.lineno, TOKEN_EOF, '')
self._next = None
self.closed = True |
def readObject(self):
"""read object"""
try:
_, res = self._read_and_exec_opcode(ident=0)
position_bak = self.object_stream.tell()
the_rest = self.object_stream.read()
if len(the_rest):
log_error("Warning!!!!: Stream still has %s bytes left.\
Enable debug mode of logging to see the hexdump." % len(the_rest))
log_debug(self._create_hexdump(the_rest))
else:
log_debug("Java Object unmarshalled succesfully!")
self.object_stream.seek(position_bak)
return res
except Exception:
self._oops_dump_state()
raise | def function[readObject, parameter[self]]:
constant[read object]
<ast.Try object at 0x7da2054a69b0> | keyword[def] identifier[readObject] ( identifier[self] ):
literal[string]
keyword[try] :
identifier[_] , identifier[res] = identifier[self] . identifier[_read_and_exec_opcode] ( identifier[ident] = literal[int] )
identifier[position_bak] = identifier[self] . identifier[object_stream] . identifier[tell] ()
identifier[the_rest] = identifier[self] . identifier[object_stream] . identifier[read] ()
keyword[if] identifier[len] ( identifier[the_rest] ):
identifier[log_error] ( literal[string] % identifier[len] ( identifier[the_rest] ))
identifier[log_debug] ( identifier[self] . identifier[_create_hexdump] ( identifier[the_rest] ))
keyword[else] :
identifier[log_debug] ( literal[string] )
identifier[self] . identifier[object_stream] . identifier[seek] ( identifier[position_bak] )
keyword[return] identifier[res]
keyword[except] identifier[Exception] :
identifier[self] . identifier[_oops_dump_state] ()
keyword[raise] | def readObject(self):
"""read object"""
try:
(_, res) = self._read_and_exec_opcode(ident=0)
position_bak = self.object_stream.tell()
the_rest = self.object_stream.read()
if len(the_rest):
log_error('Warning!!!!: Stream still has %s bytes left.Enable debug mode of logging to see the hexdump.' % len(the_rest))
log_debug(self._create_hexdump(the_rest)) # depends on [control=['if'], data=[]]
else:
log_debug('Java Object unmarshalled succesfully!')
self.object_stream.seek(position_bak)
return res # depends on [control=['try'], data=[]]
except Exception:
self._oops_dump_state()
raise # depends on [control=['except'], data=[]] |
def stream(queryset_or_adapter, basename=None):
"""Stream a csv file from an object list,
a queryset or an instanciated adapter.
"""
if isinstance(queryset_or_adapter, Adapter):
adapter = queryset_or_adapter
elif isinstance(queryset_or_adapter, (list, tuple)):
if not queryset_or_adapter:
raise ValueError(
'Type detection is not possible with an empty list')
cls = _adapters.get(queryset_or_adapter[0].__class__)
adapter = cls(queryset_or_adapter)
elif isinstance(queryset_or_adapter, db.BaseQuerySet):
cls = _adapters.get(queryset_or_adapter._document)
adapter = cls(queryset_or_adapter)
else:
raise ValueError('Unsupported object type')
timestamp = datetime.now().strftime('%Y-%m-%d-%H-%M')
headers = {
b'Content-Disposition': 'attachment; filename={0}-{1}.csv'.format(
basename or 'export', timestamp),
}
streamer = stream_with_context(yield_rows(adapter))
return Response(streamer, mimetype="text/csv", headers=headers) | def function[stream, parameter[queryset_or_adapter, basename]]:
constant[Stream a csv file from an object list,
a queryset or an instanciated adapter.
]
if call[name[isinstance], parameter[name[queryset_or_adapter], name[Adapter]]] begin[:]
variable[adapter] assign[=] name[queryset_or_adapter]
variable[timestamp] assign[=] call[call[name[datetime].now, parameter[]].strftime, parameter[constant[%Y-%m-%d-%H-%M]]]
variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da18f09e140>], [<ast.Call object at 0x7da18f09dbd0>]]
variable[streamer] assign[=] call[name[stream_with_context], parameter[call[name[yield_rows], parameter[name[adapter]]]]]
return[call[name[Response], parameter[name[streamer]]]] | keyword[def] identifier[stream] ( identifier[queryset_or_adapter] , identifier[basename] = keyword[None] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[queryset_or_adapter] , identifier[Adapter] ):
identifier[adapter] = identifier[queryset_or_adapter]
keyword[elif] identifier[isinstance] ( identifier[queryset_or_adapter] ,( identifier[list] , identifier[tuple] )):
keyword[if] keyword[not] identifier[queryset_or_adapter] :
keyword[raise] identifier[ValueError] (
literal[string] )
identifier[cls] = identifier[_adapters] . identifier[get] ( identifier[queryset_or_adapter] [ literal[int] ]. identifier[__class__] )
identifier[adapter] = identifier[cls] ( identifier[queryset_or_adapter] )
keyword[elif] identifier[isinstance] ( identifier[queryset_or_adapter] , identifier[db] . identifier[BaseQuerySet] ):
identifier[cls] = identifier[_adapters] . identifier[get] ( identifier[queryset_or_adapter] . identifier[_document] )
identifier[adapter] = identifier[cls] ( identifier[queryset_or_adapter] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[timestamp] = identifier[datetime] . identifier[now] (). identifier[strftime] ( literal[string] )
identifier[headers] ={
literal[string] : literal[string] . identifier[format] (
identifier[basename] keyword[or] literal[string] , identifier[timestamp] ),
}
identifier[streamer] = identifier[stream_with_context] ( identifier[yield_rows] ( identifier[adapter] ))
keyword[return] identifier[Response] ( identifier[streamer] , identifier[mimetype] = literal[string] , identifier[headers] = identifier[headers] ) | def stream(queryset_or_adapter, basename=None):
"""Stream a csv file from an object list,
a queryset or an instanciated adapter.
"""
if isinstance(queryset_or_adapter, Adapter):
adapter = queryset_or_adapter # depends on [control=['if'], data=[]]
elif isinstance(queryset_or_adapter, (list, tuple)):
if not queryset_or_adapter:
raise ValueError('Type detection is not possible with an empty list') # depends on [control=['if'], data=[]]
cls = _adapters.get(queryset_or_adapter[0].__class__)
adapter = cls(queryset_or_adapter) # depends on [control=['if'], data=[]]
elif isinstance(queryset_or_adapter, db.BaseQuerySet):
cls = _adapters.get(queryset_or_adapter._document)
adapter = cls(queryset_or_adapter) # depends on [control=['if'], data=[]]
else:
raise ValueError('Unsupported object type')
timestamp = datetime.now().strftime('%Y-%m-%d-%H-%M')
headers = {b'Content-Disposition': 'attachment; filename={0}-{1}.csv'.format(basename or 'export', timestamp)}
streamer = stream_with_context(yield_rows(adapter))
return Response(streamer, mimetype='text/csv', headers=headers) |
def get_rows(self, startrow=3, worksheet=None):
"""Returns a generator for all rows in a sheet.
Each row contains a dictionary where the key is the value of the
first row of the sheet for each column.
The data values are returned in utf-8 format.
Starts to consume data from startrow
"""
headers = []
row_nr = 0
worksheet = worksheet if worksheet else self.worksheet
for row in worksheet.rows: # .iter_rows():
row_nr += 1
if row_nr == 1:
# headers = [cell.internal_value for cell in row]
headers = [cell.value for cell in row]
continue
if row_nr % 1000 == 0:
transaction.savepoint()
if row_nr <= startrow:
continue
# row = [_c(cell.internal_value).decode('utf-8') for cell in row]
new_row = []
for cell in row:
value = cell.value
if value is None:
value = ''
if isinstance(value, unicode):
value = value.encode('utf-8')
# Strip any space, \t, \n, or \r characters from the left-hand
# side, right-hand side, or both sides of the string
if isinstance(value, str):
value = value.strip(' \t\n\r')
new_row.append(value)
row = dict(zip(headers, new_row))
# parse out addresses
for add_type in ['Physical', 'Postal', 'Billing']:
row[add_type] = {}
if add_type + "_Address" in row:
for key in ['Address', 'City', 'State', 'District', 'Zip', 'Country']:
row[add_type][key] = str(row.get("%s_%s" % (add_type, key), ''))
yield row | def function[get_rows, parameter[self, startrow, worksheet]]:
constant[Returns a generator for all rows in a sheet.
Each row contains a dictionary where the key is the value of the
first row of the sheet for each column.
The data values are returned in utf-8 format.
Starts to consume data from startrow
]
variable[headers] assign[=] list[[]]
variable[row_nr] assign[=] constant[0]
variable[worksheet] assign[=] <ast.IfExp object at 0x7da1b1d66410>
for taget[name[row]] in starred[name[worksheet].rows] begin[:]
<ast.AugAssign object at 0x7da1b1d64310>
if compare[name[row_nr] equal[==] constant[1]] begin[:]
variable[headers] assign[=] <ast.ListComp object at 0x7da1b1d66110>
continue
if compare[binary_operation[name[row_nr] <ast.Mod object at 0x7da2590d6920> constant[1000]] equal[==] constant[0]] begin[:]
call[name[transaction].savepoint, parameter[]]
if compare[name[row_nr] less_or_equal[<=] name[startrow]] begin[:]
continue
variable[new_row] assign[=] list[[]]
for taget[name[cell]] in starred[name[row]] begin[:]
variable[value] assign[=] name[cell].value
if compare[name[value] is constant[None]] begin[:]
variable[value] assign[=] constant[]
if call[name[isinstance], parameter[name[value], name[unicode]]] begin[:]
variable[value] assign[=] call[name[value].encode, parameter[constant[utf-8]]]
if call[name[isinstance], parameter[name[value], name[str]]] begin[:]
variable[value] assign[=] call[name[value].strip, parameter[constant[
]]]
call[name[new_row].append, parameter[name[value]]]
variable[row] assign[=] call[name[dict], parameter[call[name[zip], parameter[name[headers], name[new_row]]]]]
for taget[name[add_type]] in starred[list[[<ast.Constant object at 0x7da1b1d64f70>, <ast.Constant object at 0x7da1b1d642e0>, <ast.Constant object at 0x7da1b1d64e80>]]] begin[:]
call[name[row]][name[add_type]] assign[=] dictionary[[], []]
if compare[binary_operation[name[add_type] + constant[_Address]] in name[row]] begin[:]
for taget[name[key]] in starred[list[[<ast.Constant object at 0x7da1b1d672e0>, <ast.Constant object at 0x7da1b1d66890>, <ast.Constant object at 0x7da1b1d64fa0>, <ast.Constant object at 0x7da1b1d657e0>, <ast.Constant object at 0x7da1b1d655a0>, <ast.Constant object at 0x7da1b1d65db0>]]] begin[:]
call[call[name[row]][name[add_type]]][name[key]] assign[=] call[name[str], parameter[call[name[row].get, parameter[binary_operation[constant[%s_%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1d668c0>, <ast.Name object at 0x7da1b1d65990>]]], constant[]]]]]
<ast.Yield object at 0x7da1b1d39180> | keyword[def] identifier[get_rows] ( identifier[self] , identifier[startrow] = literal[int] , identifier[worksheet] = keyword[None] ):
literal[string]
identifier[headers] =[]
identifier[row_nr] = literal[int]
identifier[worksheet] = identifier[worksheet] keyword[if] identifier[worksheet] keyword[else] identifier[self] . identifier[worksheet]
keyword[for] identifier[row] keyword[in] identifier[worksheet] . identifier[rows] :
identifier[row_nr] += literal[int]
keyword[if] identifier[row_nr] == literal[int] :
identifier[headers] =[ identifier[cell] . identifier[value] keyword[for] identifier[cell] keyword[in] identifier[row] ]
keyword[continue]
keyword[if] identifier[row_nr] % literal[int] == literal[int] :
identifier[transaction] . identifier[savepoint] ()
keyword[if] identifier[row_nr] <= identifier[startrow] :
keyword[continue]
identifier[new_row] =[]
keyword[for] identifier[cell] keyword[in] identifier[row] :
identifier[value] = identifier[cell] . identifier[value]
keyword[if] identifier[value] keyword[is] keyword[None] :
identifier[value] = literal[string]
keyword[if] identifier[isinstance] ( identifier[value] , identifier[unicode] ):
identifier[value] = identifier[value] . identifier[encode] ( literal[string] )
keyword[if] identifier[isinstance] ( identifier[value] , identifier[str] ):
identifier[value] = identifier[value] . identifier[strip] ( literal[string] )
identifier[new_row] . identifier[append] ( identifier[value] )
identifier[row] = identifier[dict] ( identifier[zip] ( identifier[headers] , identifier[new_row] ))
keyword[for] identifier[add_type] keyword[in] [ literal[string] , literal[string] , literal[string] ]:
identifier[row] [ identifier[add_type] ]={}
keyword[if] identifier[add_type] + literal[string] keyword[in] identifier[row] :
keyword[for] identifier[key] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]:
identifier[row] [ identifier[add_type] ][ identifier[key] ]= identifier[str] ( identifier[row] . identifier[get] ( literal[string] %( identifier[add_type] , identifier[key] ), literal[string] ))
keyword[yield] identifier[row] | def get_rows(self, startrow=3, worksheet=None):
"""Returns a generator for all rows in a sheet.
Each row contains a dictionary where the key is the value of the
first row of the sheet for each column.
The data values are returned in utf-8 format.
Starts to consume data from startrow
"""
headers = []
row_nr = 0
worksheet = worksheet if worksheet else self.worksheet
for row in worksheet.rows: # .iter_rows():
row_nr += 1
if row_nr == 1:
# headers = [cell.internal_value for cell in row]
headers = [cell.value for cell in row]
continue # depends on [control=['if'], data=[]]
if row_nr % 1000 == 0:
transaction.savepoint() # depends on [control=['if'], data=[]]
if row_nr <= startrow:
continue # depends on [control=['if'], data=[]]
# row = [_c(cell.internal_value).decode('utf-8') for cell in row]
new_row = []
for cell in row:
value = cell.value
if value is None:
value = '' # depends on [control=['if'], data=['value']]
if isinstance(value, unicode):
value = value.encode('utf-8') # depends on [control=['if'], data=[]]
# Strip any space, \t, \n, or \r characters from the left-hand
# side, right-hand side, or both sides of the string
if isinstance(value, str):
value = value.strip(' \t\n\r') # depends on [control=['if'], data=[]]
new_row.append(value) # depends on [control=['for'], data=['cell']]
row = dict(zip(headers, new_row))
# parse out addresses
for add_type in ['Physical', 'Postal', 'Billing']:
row[add_type] = {}
if add_type + '_Address' in row:
for key in ['Address', 'City', 'State', 'District', 'Zip', 'Country']:
row[add_type][key] = str(row.get('%s_%s' % (add_type, key), '')) # depends on [control=['for'], data=['key']] # depends on [control=['if'], data=['row']] # depends on [control=['for'], data=['add_type']]
yield row # depends on [control=['for'], data=['row']] |
def sixteensreporter(self, analysistype='sixteens_full'):
"""
Creates a report of the results
:param analysistype: The variable to use when accessing attributes in the metadata object
"""
# Create the path in which the reports are stored
make_path(self.reportpath)
# Initialise the header and data strings
header = 'Strain,Gene,PercentIdentity,Genus,FoldCoverage\n'
data = ''
with open(os.path.join(self.reportpath, analysistype + '.csv'), 'w') as report:
with open(os.path.join(self.reportpath, analysistype + '_sequences.fa'), 'w') as sequences:
for sample in self.runmetadata.samples:
try:
# Select the best hit of all the full-length 16S genes mapped
sample[analysistype].besthit = sorted(sample[analysistype].results.items(),
key=operator.itemgetter(1), reverse=True)[0][0]
# Add the sample name to the data string
data += sample.name + ','
# Find the record that matches the best hit, and extract the necessary values to be place in the
# data string
for name, identity in sample[analysistype].results.items():
if name == sample[analysistype].besthit:
data += '{},{},{},{}\n'.format(name, identity, sample[analysistype].genus,
sample[analysistype].avgdepth[name])
# Create a FASTA-formatted sequence output of the 16S sequence
record = SeqRecord(Seq(sample[analysistype].sequences[name],
IUPAC.unambiguous_dna),
id='{}_{}'.format(sample.name, '16S'),
description='')
SeqIO.write(record, sequences, 'fasta')
except (KeyError, IndexError):
data += '{}\n'.format(sample.name)
# Write the results to the report
report.write(header)
report.write(data) | def function[sixteensreporter, parameter[self, analysistype]]:
constant[
Creates a report of the results
:param analysistype: The variable to use when accessing attributes in the metadata object
]
call[name[make_path], parameter[name[self].reportpath]]
variable[header] assign[=] constant[Strain,Gene,PercentIdentity,Genus,FoldCoverage
]
variable[data] assign[=] constant[]
with call[name[open], parameter[call[name[os].path.join, parameter[name[self].reportpath, binary_operation[name[analysistype] + constant[.csv]]]], constant[w]]] begin[:]
with call[name[open], parameter[call[name[os].path.join, parameter[name[self].reportpath, binary_operation[name[analysistype] + constant[_sequences.fa]]]], constant[w]]] begin[:]
for taget[name[sample]] in starred[name[self].runmetadata.samples] begin[:]
<ast.Try object at 0x7da1b1112bf0>
call[name[report].write, parameter[name[header]]]
call[name[report].write, parameter[name[data]]] | keyword[def] identifier[sixteensreporter] ( identifier[self] , identifier[analysistype] = literal[string] ):
literal[string]
identifier[make_path] ( identifier[self] . identifier[reportpath] )
identifier[header] = literal[string]
identifier[data] = literal[string]
keyword[with] identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[reportpath] , identifier[analysistype] + literal[string] ), literal[string] ) keyword[as] identifier[report] :
keyword[with] identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[reportpath] , identifier[analysistype] + literal[string] ), literal[string] ) keyword[as] identifier[sequences] :
keyword[for] identifier[sample] keyword[in] identifier[self] . identifier[runmetadata] . identifier[samples] :
keyword[try] :
identifier[sample] [ identifier[analysistype] ]. identifier[besthit] = identifier[sorted] ( identifier[sample] [ identifier[analysistype] ]. identifier[results] . identifier[items] (),
identifier[key] = identifier[operator] . identifier[itemgetter] ( literal[int] ), identifier[reverse] = keyword[True] )[ literal[int] ][ literal[int] ]
identifier[data] += identifier[sample] . identifier[name] + literal[string]
keyword[for] identifier[name] , identifier[identity] keyword[in] identifier[sample] [ identifier[analysistype] ]. identifier[results] . identifier[items] ():
keyword[if] identifier[name] == identifier[sample] [ identifier[analysistype] ]. identifier[besthit] :
identifier[data] += literal[string] . identifier[format] ( identifier[name] , identifier[identity] , identifier[sample] [ identifier[analysistype] ]. identifier[genus] ,
identifier[sample] [ identifier[analysistype] ]. identifier[avgdepth] [ identifier[name] ])
identifier[record] = identifier[SeqRecord] ( identifier[Seq] ( identifier[sample] [ identifier[analysistype] ]. identifier[sequences] [ identifier[name] ],
identifier[IUPAC] . identifier[unambiguous_dna] ),
identifier[id] = literal[string] . identifier[format] ( identifier[sample] . identifier[name] , literal[string] ),
identifier[description] = literal[string] )
identifier[SeqIO] . identifier[write] ( identifier[record] , identifier[sequences] , literal[string] )
keyword[except] ( identifier[KeyError] , identifier[IndexError] ):
identifier[data] += literal[string] . identifier[format] ( identifier[sample] . identifier[name] )
identifier[report] . identifier[write] ( identifier[header] )
identifier[report] . identifier[write] ( identifier[data] ) | def sixteensreporter(self, analysistype='sixteens_full'):
"""
Creates a report of the results
:param analysistype: The variable to use when accessing attributes in the metadata object
"""
# Create the path in which the reports are stored
make_path(self.reportpath)
# Initialise the header and data strings
header = 'Strain,Gene,PercentIdentity,Genus,FoldCoverage\n'
data = ''
with open(os.path.join(self.reportpath, analysistype + '.csv'), 'w') as report:
with open(os.path.join(self.reportpath, analysistype + '_sequences.fa'), 'w') as sequences:
for sample in self.runmetadata.samples:
try:
# Select the best hit of all the full-length 16S genes mapped
sample[analysistype].besthit = sorted(sample[analysistype].results.items(), key=operator.itemgetter(1), reverse=True)[0][0]
# Add the sample name to the data string
data += sample.name + ','
# Find the record that matches the best hit, and extract the necessary values to be place in the
# data string
for (name, identity) in sample[analysistype].results.items():
if name == sample[analysistype].besthit:
data += '{},{},{},{}\n'.format(name, identity, sample[analysistype].genus, sample[analysistype].avgdepth[name])
# Create a FASTA-formatted sequence output of the 16S sequence
record = SeqRecord(Seq(sample[analysistype].sequences[name], IUPAC.unambiguous_dna), id='{}_{}'.format(sample.name, '16S'), description='')
SeqIO.write(record, sequences, 'fasta') # depends on [control=['if'], data=['name']] # depends on [control=['for'], data=[]] # depends on [control=['try'], data=[]]
except (KeyError, IndexError):
data += '{}\n'.format(sample.name) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['sample']] # depends on [control=['with'], data=['sequences']]
# Write the results to the report
report.write(header)
report.write(data) # depends on [control=['with'], data=['open', 'report']] |
def create_proteinquant_lookup(fns, pqdb, poolnames, protacc_colnr,
ms1_qcolpattern=None, isobqcolpattern=None,
psmnrpattern=None, probcolpattern=None,
fdrcolpattern=None, pepcolpattern=None):
"""Calls lower level function to create a protein quant lookup"""
patterns = [ms1_qcolpattern, probcolpattern, fdrcolpattern, pepcolpattern]
storefuns = [pqdb.store_precursor_quants, pqdb.store_probability,
pqdb.store_fdr, pqdb.store_pep]
create_pep_protein_quant_lookup(fns, pqdb, poolnames, protacc_colnr,
patterns, storefuns, isobqcolpattern,
psmnrpattern) | def function[create_proteinquant_lookup, parameter[fns, pqdb, poolnames, protacc_colnr, ms1_qcolpattern, isobqcolpattern, psmnrpattern, probcolpattern, fdrcolpattern, pepcolpattern]]:
constant[Calls lower level function to create a protein quant lookup]
variable[patterns] assign[=] list[[<ast.Name object at 0x7da1b24e21d0>, <ast.Name object at 0x7da1b24e1510>, <ast.Name object at 0x7da1b24e29e0>, <ast.Name object at 0x7da1b24e1540>]]
variable[storefuns] assign[=] list[[<ast.Attribute object at 0x7da1b24e0bb0>, <ast.Attribute object at 0x7da1b24e0640>, <ast.Attribute object at 0x7da1b24e2d70>, <ast.Attribute object at 0x7da1b24e29b0>]]
call[name[create_pep_protein_quant_lookup], parameter[name[fns], name[pqdb], name[poolnames], name[protacc_colnr], name[patterns], name[storefuns], name[isobqcolpattern], name[psmnrpattern]]] | keyword[def] identifier[create_proteinquant_lookup] ( identifier[fns] , identifier[pqdb] , identifier[poolnames] , identifier[protacc_colnr] ,
identifier[ms1_qcolpattern] = keyword[None] , identifier[isobqcolpattern] = keyword[None] ,
identifier[psmnrpattern] = keyword[None] , identifier[probcolpattern] = keyword[None] ,
identifier[fdrcolpattern] = keyword[None] , identifier[pepcolpattern] = keyword[None] ):
literal[string]
identifier[patterns] =[ identifier[ms1_qcolpattern] , identifier[probcolpattern] , identifier[fdrcolpattern] , identifier[pepcolpattern] ]
identifier[storefuns] =[ identifier[pqdb] . identifier[store_precursor_quants] , identifier[pqdb] . identifier[store_probability] ,
identifier[pqdb] . identifier[store_fdr] , identifier[pqdb] . identifier[store_pep] ]
identifier[create_pep_protein_quant_lookup] ( identifier[fns] , identifier[pqdb] , identifier[poolnames] , identifier[protacc_colnr] ,
identifier[patterns] , identifier[storefuns] , identifier[isobqcolpattern] ,
identifier[psmnrpattern] ) | def create_proteinquant_lookup(fns, pqdb, poolnames, protacc_colnr, ms1_qcolpattern=None, isobqcolpattern=None, psmnrpattern=None, probcolpattern=None, fdrcolpattern=None, pepcolpattern=None):
"""Calls lower level function to create a protein quant lookup"""
patterns = [ms1_qcolpattern, probcolpattern, fdrcolpattern, pepcolpattern]
storefuns = [pqdb.store_precursor_quants, pqdb.store_probability, pqdb.store_fdr, pqdb.store_pep]
create_pep_protein_quant_lookup(fns, pqdb, poolnames, protacc_colnr, patterns, storefuns, isobqcolpattern, psmnrpattern) |
def _update(self):
"""Update status text."""
typing_users = [self._conversation.get_user(user_id)
for user_id, status in self._typing_statuses.items()
if status == hangups.TYPING_TYPE_STARTED]
displayed_names = [user.first_name for user in typing_users
if not user.is_self]
if displayed_names:
typing_message = '{} {} typing...'.format(
', '.join(sorted(displayed_names)),
'is' if len(displayed_names) == 1 else 'are'
)
else:
typing_message = ''
if not self._is_connected:
self._widget.set_text("RECONNECTING...")
elif self._message is not None:
self._widget.set_text(self._message)
else:
self._widget.set_text(typing_message) | def function[_update, parameter[self]]:
constant[Update status text.]
variable[typing_users] assign[=] <ast.ListComp object at 0x7da2047e8400>
variable[displayed_names] assign[=] <ast.ListComp object at 0x7da2047eb7f0>
if name[displayed_names] begin[:]
variable[typing_message] assign[=] call[constant[{} {} typing...].format, parameter[call[constant[, ].join, parameter[call[name[sorted], parameter[name[displayed_names]]]]], <ast.IfExp object at 0x7da20c796290>]]
if <ast.UnaryOp object at 0x7da20c7946a0> begin[:]
call[name[self]._widget.set_text, parameter[constant[RECONNECTING...]]] | keyword[def] identifier[_update] ( identifier[self] ):
literal[string]
identifier[typing_users] =[ identifier[self] . identifier[_conversation] . identifier[get_user] ( identifier[user_id] )
keyword[for] identifier[user_id] , identifier[status] keyword[in] identifier[self] . identifier[_typing_statuses] . identifier[items] ()
keyword[if] identifier[status] == identifier[hangups] . identifier[TYPING_TYPE_STARTED] ]
identifier[displayed_names] =[ identifier[user] . identifier[first_name] keyword[for] identifier[user] keyword[in] identifier[typing_users]
keyword[if] keyword[not] identifier[user] . identifier[is_self] ]
keyword[if] identifier[displayed_names] :
identifier[typing_message] = literal[string] . identifier[format] (
literal[string] . identifier[join] ( identifier[sorted] ( identifier[displayed_names] )),
literal[string] keyword[if] identifier[len] ( identifier[displayed_names] )== literal[int] keyword[else] literal[string]
)
keyword[else] :
identifier[typing_message] = literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_is_connected] :
identifier[self] . identifier[_widget] . identifier[set_text] ( literal[string] )
keyword[elif] identifier[self] . identifier[_message] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[_widget] . identifier[set_text] ( identifier[self] . identifier[_message] )
keyword[else] :
identifier[self] . identifier[_widget] . identifier[set_text] ( identifier[typing_message] ) | def _update(self):
"""Update status text."""
typing_users = [self._conversation.get_user(user_id) for (user_id, status) in self._typing_statuses.items() if status == hangups.TYPING_TYPE_STARTED]
displayed_names = [user.first_name for user in typing_users if not user.is_self]
if displayed_names:
typing_message = '{} {} typing...'.format(', '.join(sorted(displayed_names)), 'is' if len(displayed_names) == 1 else 'are') # depends on [control=['if'], data=[]]
else:
typing_message = ''
if not self._is_connected:
self._widget.set_text('RECONNECTING...') # depends on [control=['if'], data=[]]
elif self._message is not None:
self._widget.set_text(self._message) # depends on [control=['if'], data=[]]
else:
self._widget.set_text(typing_message) |
def perform_permissions_check(self, user, obj, perms):
""" Performs the permissions check. """
return self.request.forum_permission_handler.can_update_topics_to_sticky_topics(obj, user) | def function[perform_permissions_check, parameter[self, user, obj, perms]]:
constant[ Performs the permissions check. ]
return[call[name[self].request.forum_permission_handler.can_update_topics_to_sticky_topics, parameter[name[obj], name[user]]]] | keyword[def] identifier[perform_permissions_check] ( identifier[self] , identifier[user] , identifier[obj] , identifier[perms] ):
literal[string]
keyword[return] identifier[self] . identifier[request] . identifier[forum_permission_handler] . identifier[can_update_topics_to_sticky_topics] ( identifier[obj] , identifier[user] ) | def perform_permissions_check(self, user, obj, perms):
""" Performs the permissions check. """
return self.request.forum_permission_handler.can_update_topics_to_sticky_topics(obj, user) |
def get_typename(x):
'''Returns the name of the type of x, if x is an object. Otherwise, returns the name of x.
'''
if isinstance(x, type):
ret = x.__name__
else:
ret = x.__class__.__name__
return ret | def function[get_typename, parameter[x]]:
constant[Returns the name of the type of x, if x is an object. Otherwise, returns the name of x.
]
if call[name[isinstance], parameter[name[x], name[type]]] begin[:]
variable[ret] assign[=] name[x].__name__
return[name[ret]] | keyword[def] identifier[get_typename] ( identifier[x] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[x] , identifier[type] ):
identifier[ret] = identifier[x] . identifier[__name__]
keyword[else] :
identifier[ret] = identifier[x] . identifier[__class__] . identifier[__name__]
keyword[return] identifier[ret] | def get_typename(x):
"""Returns the name of the type of x, if x is an object. Otherwise, returns the name of x.
"""
if isinstance(x, type):
ret = x.__name__ # depends on [control=['if'], data=[]]
else:
ret = x.__class__.__name__
return ret |
def read_value(self):
"""Read the value of this descriptor."""
pass
# Kick off a query to read the value of the descriptor, then wait
# for the result to return asyncronously.
self._value_read.clear()
self._device._peripheral.readValueForDescriptor(self._descriptor)
if not self._value_read.wait(timeout_sec):
raise RuntimeError('Exceeded timeout waiting to read characteristic value!')
return self._value | def function[read_value, parameter[self]]:
constant[Read the value of this descriptor.]
pass
call[name[self]._value_read.clear, parameter[]]
call[name[self]._device._peripheral.readValueForDescriptor, parameter[name[self]._descriptor]]
if <ast.UnaryOp object at 0x7da204962bc0> begin[:]
<ast.Raise object at 0x7da20c6e4e20>
return[name[self]._value] | keyword[def] identifier[read_value] ( identifier[self] ):
literal[string]
keyword[pass]
identifier[self] . identifier[_value_read] . identifier[clear] ()
identifier[self] . identifier[_device] . identifier[_peripheral] . identifier[readValueForDescriptor] ( identifier[self] . identifier[_descriptor] )
keyword[if] keyword[not] identifier[self] . identifier[_value_read] . identifier[wait] ( identifier[timeout_sec] ):
keyword[raise] identifier[RuntimeError] ( literal[string] )
keyword[return] identifier[self] . identifier[_value] | def read_value(self):
"""Read the value of this descriptor."""
pass
# Kick off a query to read the value of the descriptor, then wait
# for the result to return asyncronously.
self._value_read.clear()
self._device._peripheral.readValueForDescriptor(self._descriptor)
if not self._value_read.wait(timeout_sec):
raise RuntimeError('Exceeded timeout waiting to read characteristic value!') # depends on [control=['if'], data=[]]
return self._value |
def refresh_styles(self):
"""Load all available styles"""
import matplotlib.pyplot as plt
self.colours = {}
for style in plt.style.available:
try:
style_colours = plt.style.library[style]['axes.prop_cycle']
self.colours[style] = [c['color'] for c in list(style_colours)]
except KeyError:
continue
self.colours['km3pipe'] = [
"#ff7869", "#4babe1", "#96ad3e", "#e4823d", "#5d72b2", "#e2a3c2",
"#fd9844", "#e480e7"
] | def function[refresh_styles, parameter[self]]:
constant[Load all available styles]
import module[matplotlib.pyplot] as alias[plt]
name[self].colours assign[=] dictionary[[], []]
for taget[name[style]] in starred[name[plt].style.available] begin[:]
<ast.Try object at 0x7da1b26ad570>
call[name[self].colours][constant[km3pipe]] assign[=] list[[<ast.Constant object at 0x7da1b26adb40>, <ast.Constant object at 0x7da1b26adf30>, <ast.Constant object at 0x7da1b26af100>, <ast.Constant object at 0x7da1b26adf90>, <ast.Constant object at 0x7da1b26ac1c0>, <ast.Constant object at 0x7da1b26aded0>, <ast.Constant object at 0x7da1b26ae1d0>, <ast.Constant object at 0x7da1b26aeec0>]] | keyword[def] identifier[refresh_styles] ( identifier[self] ):
literal[string]
keyword[import] identifier[matplotlib] . identifier[pyplot] keyword[as] identifier[plt]
identifier[self] . identifier[colours] ={}
keyword[for] identifier[style] keyword[in] identifier[plt] . identifier[style] . identifier[available] :
keyword[try] :
identifier[style_colours] = identifier[plt] . identifier[style] . identifier[library] [ identifier[style] ][ literal[string] ]
identifier[self] . identifier[colours] [ identifier[style] ]=[ identifier[c] [ literal[string] ] keyword[for] identifier[c] keyword[in] identifier[list] ( identifier[style_colours] )]
keyword[except] identifier[KeyError] :
keyword[continue]
identifier[self] . identifier[colours] [ literal[string] ]=[
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string]
] | def refresh_styles(self):
"""Load all available styles"""
import matplotlib.pyplot as plt
self.colours = {}
for style in plt.style.available:
try:
style_colours = plt.style.library[style]['axes.prop_cycle']
self.colours[style] = [c['color'] for c in list(style_colours)] # depends on [control=['try'], data=[]]
except KeyError:
continue # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['style']]
self.colours['km3pipe'] = ['#ff7869', '#4babe1', '#96ad3e', '#e4823d', '#5d72b2', '#e2a3c2', '#fd9844', '#e480e7'] |
def findvalue(array, value, compare = lambda x, y: x == y):
"A function that uses the compare function to return a value from the list."
try:
return next(x for x in array if compare(x, value))
except StopIteration:
raise ValueError('%r not in array'%value) | def function[findvalue, parameter[array, value, compare]]:
constant[A function that uses the compare function to return a value from the list.]
<ast.Try object at 0x7da20c7c9300> | keyword[def] identifier[findvalue] ( identifier[array] , identifier[value] , identifier[compare] = keyword[lambda] identifier[x] , identifier[y] : identifier[x] == identifier[y] ):
literal[string]
keyword[try] :
keyword[return] identifier[next] ( identifier[x] keyword[for] identifier[x] keyword[in] identifier[array] keyword[if] identifier[compare] ( identifier[x] , identifier[value] ))
keyword[except] identifier[StopIteration] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[value] ) | def findvalue(array, value, compare=lambda x, y: x == y):
"""A function that uses the compare function to return a value from the list."""
try:
return next((x for x in array if compare(x, value))) # depends on [control=['try'], data=[]]
except StopIteration:
raise ValueError('%r not in array' % value) # depends on [control=['except'], data=[]] |
def map(self, f):
"""
Maps this Pair with *f*'; see :meth:`vanilla.core.Recver.map`
Returns a new Pair of our current Sender and the mapped target's
Recver.
"""
return self._replace(recver=self.recver.map(f)) | def function[map, parameter[self, f]]:
constant[
Maps this Pair with *f*'; see :meth:`vanilla.core.Recver.map`
Returns a new Pair of our current Sender and the mapped target's
Recver.
]
return[call[name[self]._replace, parameter[]]] | keyword[def] identifier[map] ( identifier[self] , identifier[f] ):
literal[string]
keyword[return] identifier[self] . identifier[_replace] ( identifier[recver] = identifier[self] . identifier[recver] . identifier[map] ( identifier[f] )) | def map(self, f):
"""
Maps this Pair with *f*'; see :meth:`vanilla.core.Recver.map`
Returns a new Pair of our current Sender and the mapped target's
Recver.
"""
return self._replace(recver=self.recver.map(f)) |
def get(self, name, template_path):
'''
Returns the renderer object.
:param name: name of the requested renderer
:param template_path: path to the template
'''
if name not in self._renderers:
cls = self._renderer_classes.get(name)
if cls is None:
return None
else:
self._renderers[name] = cls(template_path, self.extra_vars)
return self._renderers[name] | def function[get, parameter[self, name, template_path]]:
constant[
Returns the renderer object.
:param name: name of the requested renderer
:param template_path: path to the template
]
if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[self]._renderers] begin[:]
variable[cls] assign[=] call[name[self]._renderer_classes.get, parameter[name[name]]]
if compare[name[cls] is constant[None]] begin[:]
return[constant[None]]
return[call[name[self]._renderers][name[name]]] | keyword[def] identifier[get] ( identifier[self] , identifier[name] , identifier[template_path] ):
literal[string]
keyword[if] identifier[name] keyword[not] keyword[in] identifier[self] . identifier[_renderers] :
identifier[cls] = identifier[self] . identifier[_renderer_classes] . identifier[get] ( identifier[name] )
keyword[if] identifier[cls] keyword[is] keyword[None] :
keyword[return] keyword[None]
keyword[else] :
identifier[self] . identifier[_renderers] [ identifier[name] ]= identifier[cls] ( identifier[template_path] , identifier[self] . identifier[extra_vars] )
keyword[return] identifier[self] . identifier[_renderers] [ identifier[name] ] | def get(self, name, template_path):
"""
Returns the renderer object.
:param name: name of the requested renderer
:param template_path: path to the template
"""
if name not in self._renderers:
cls = self._renderer_classes.get(name)
if cls is None:
return None # depends on [control=['if'], data=[]]
else:
self._renderers[name] = cls(template_path, self.extra_vars) # depends on [control=['if'], data=['name']]
return self._renderers[name] |
def convert_concat(net, node, module, builder):
"""Convert concat layer from mxnet to coreml.
Parameters
----------
network: net
A mxnet network object.
layer: node
Node to convert.
module: module
An module for MXNet
builder: NeuralNetworkBuilder
A neural network builder object.
"""
# Get input and output names
input_names, output_name = _get_input_output_name(net, node, 'all')
name = node['name']
mode = 'CONCAT'
builder.add_elementwise(name = name, input_names = input_names,
output_name = output_name, mode = mode) | def function[convert_concat, parameter[net, node, module, builder]]:
constant[Convert concat layer from mxnet to coreml.
Parameters
----------
network: net
A mxnet network object.
layer: node
Node to convert.
module: module
An module for MXNet
builder: NeuralNetworkBuilder
A neural network builder object.
]
<ast.Tuple object at 0x7da1b20665c0> assign[=] call[name[_get_input_output_name], parameter[name[net], name[node], constant[all]]]
variable[name] assign[=] call[name[node]][constant[name]]
variable[mode] assign[=] constant[CONCAT]
call[name[builder].add_elementwise, parameter[]] | keyword[def] identifier[convert_concat] ( identifier[net] , identifier[node] , identifier[module] , identifier[builder] ):
literal[string]
identifier[input_names] , identifier[output_name] = identifier[_get_input_output_name] ( identifier[net] , identifier[node] , literal[string] )
identifier[name] = identifier[node] [ literal[string] ]
identifier[mode] = literal[string]
identifier[builder] . identifier[add_elementwise] ( identifier[name] = identifier[name] , identifier[input_names] = identifier[input_names] ,
identifier[output_name] = identifier[output_name] , identifier[mode] = identifier[mode] ) | def convert_concat(net, node, module, builder):
"""Convert concat layer from mxnet to coreml.
Parameters
----------
network: net
A mxnet network object.
layer: node
Node to convert.
module: module
An module for MXNet
builder: NeuralNetworkBuilder
A neural network builder object.
"""
# Get input and output names
(input_names, output_name) = _get_input_output_name(net, node, 'all')
name = node['name']
mode = 'CONCAT'
builder.add_elementwise(name=name, input_names=input_names, output_name=output_name, mode=mode) |
def add_movie(self, movie_file, left, top, width, height,
poster_frame_image=None, mime_type=CT.VIDEO):
"""Return newly added movie shape displaying video in *movie_file*.
**EXPERIMENTAL.** This method has important limitations:
* The size must be specified; no auto-scaling such as that provided
by :meth:`add_picture` is performed.
* The MIME type of the video file should be specified, e.g.
'video/mp4'. The provided video file is not interrogated for its
type. The MIME type `video/unknown` is used by default (and works
fine in tests as of this writing).
* A poster frame image must be provided, it cannot be automatically
extracted from the video file. If no poster frame is provided, the
default "media loudspeaker" image will be used.
Return a newly added movie shape to the slide, positioned at (*left*,
*top*), having size (*width*, *height*), and containing *movie_file*.
Before the video is started, *poster_frame_image* is displayed as
a placeholder for the video.
"""
movie_pic = _MoviePicElementCreator.new_movie_pic(
self, self._next_shape_id, movie_file, left, top, width, height,
poster_frame_image, mime_type
)
self._spTree.append(movie_pic)
self._add_video_timing(movie_pic)
return self._shape_factory(movie_pic) | def function[add_movie, parameter[self, movie_file, left, top, width, height, poster_frame_image, mime_type]]:
constant[Return newly added movie shape displaying video in *movie_file*.
**EXPERIMENTAL.** This method has important limitations:
* The size must be specified; no auto-scaling such as that provided
by :meth:`add_picture` is performed.
* The MIME type of the video file should be specified, e.g.
'video/mp4'. The provided video file is not interrogated for its
type. The MIME type `video/unknown` is used by default (and works
fine in tests as of this writing).
* A poster frame image must be provided, it cannot be automatically
extracted from the video file. If no poster frame is provided, the
default "media loudspeaker" image will be used.
Return a newly added movie shape to the slide, positioned at (*left*,
*top*), having size (*width*, *height*), and containing *movie_file*.
Before the video is started, *poster_frame_image* is displayed as
a placeholder for the video.
]
variable[movie_pic] assign[=] call[name[_MoviePicElementCreator].new_movie_pic, parameter[name[self], name[self]._next_shape_id, name[movie_file], name[left], name[top], name[width], name[height], name[poster_frame_image], name[mime_type]]]
call[name[self]._spTree.append, parameter[name[movie_pic]]]
call[name[self]._add_video_timing, parameter[name[movie_pic]]]
return[call[name[self]._shape_factory, parameter[name[movie_pic]]]] | keyword[def] identifier[add_movie] ( identifier[self] , identifier[movie_file] , identifier[left] , identifier[top] , identifier[width] , identifier[height] ,
identifier[poster_frame_image] = keyword[None] , identifier[mime_type] = identifier[CT] . identifier[VIDEO] ):
literal[string]
identifier[movie_pic] = identifier[_MoviePicElementCreator] . identifier[new_movie_pic] (
identifier[self] , identifier[self] . identifier[_next_shape_id] , identifier[movie_file] , identifier[left] , identifier[top] , identifier[width] , identifier[height] ,
identifier[poster_frame_image] , identifier[mime_type]
)
identifier[self] . identifier[_spTree] . identifier[append] ( identifier[movie_pic] )
identifier[self] . identifier[_add_video_timing] ( identifier[movie_pic] )
keyword[return] identifier[self] . identifier[_shape_factory] ( identifier[movie_pic] ) | def add_movie(self, movie_file, left, top, width, height, poster_frame_image=None, mime_type=CT.VIDEO):
"""Return newly added movie shape displaying video in *movie_file*.
**EXPERIMENTAL.** This method has important limitations:
* The size must be specified; no auto-scaling such as that provided
by :meth:`add_picture` is performed.
* The MIME type of the video file should be specified, e.g.
'video/mp4'. The provided video file is not interrogated for its
type. The MIME type `video/unknown` is used by default (and works
fine in tests as of this writing).
* A poster frame image must be provided, it cannot be automatically
extracted from the video file. If no poster frame is provided, the
default "media loudspeaker" image will be used.
Return a newly added movie shape to the slide, positioned at (*left*,
*top*), having size (*width*, *height*), and containing *movie_file*.
Before the video is started, *poster_frame_image* is displayed as
a placeholder for the video.
"""
movie_pic = _MoviePicElementCreator.new_movie_pic(self, self._next_shape_id, movie_file, left, top, width, height, poster_frame_image, mime_type)
self._spTree.append(movie_pic)
self._add_video_timing(movie_pic)
return self._shape_factory(movie_pic) |
def write_version_info(conn, version_table, version_value):
"""
Inserts the version value in to the version table.
Parameters
----------
conn : sa.Connection
The connection to use to execute the insert.
version_table : sa.Table
The version table of the asset database
version_value : int
The version to write in to the database
"""
conn.execute(sa.insert(version_table, values={'version': version_value})) | def function[write_version_info, parameter[conn, version_table, version_value]]:
constant[
Inserts the version value in to the version table.
Parameters
----------
conn : sa.Connection
The connection to use to execute the insert.
version_table : sa.Table
The version table of the asset database
version_value : int
The version to write in to the database
]
call[name[conn].execute, parameter[call[name[sa].insert, parameter[name[version_table]]]]] | keyword[def] identifier[write_version_info] ( identifier[conn] , identifier[version_table] , identifier[version_value] ):
literal[string]
identifier[conn] . identifier[execute] ( identifier[sa] . identifier[insert] ( identifier[version_table] , identifier[values] ={ literal[string] : identifier[version_value] })) | def write_version_info(conn, version_table, version_value):
"""
Inserts the version value in to the version table.
Parameters
----------
conn : sa.Connection
The connection to use to execute the insert.
version_table : sa.Table
The version table of the asset database
version_value : int
The version to write in to the database
"""
conn.execute(sa.insert(version_table, values={'version': version_value})) |
def _TrimNode(node, index, depth, flags):
"""
Internal helper method to trim a node.
Args:
node (MerkleTreeNode):
index (int): flag index.
depth (int): node tree depth to start trim from.
flags (bytearray): of left/right pairs. 1 byte for the left node, 1 byte for the right node.
00 to erase, 11 to keep. Will keep the node if either left or right is not-0
"""
if depth == 1 or node.LeftChild is None:
return
if depth == 2:
if not flags[index * 2] and not flags[index * 2 + 1]:
node.LeftChild = None
node.RightChild = None
else:
MerkleTree._TrimNode(node.LeftChild, index * 2, depth - 1, flags)
MerkleTree._TrimNode(node.RightChild, index * 2, depth - 1, flags)
if node.LeftChild.LeftChild is None and node.RightChild.RightChild is None:
node.LeftChild = None
node.RightChild = None | def function[_TrimNode, parameter[node, index, depth, flags]]:
constant[
Internal helper method to trim a node.
Args:
node (MerkleTreeNode):
index (int): flag index.
depth (int): node tree depth to start trim from.
flags (bytearray): of left/right pairs. 1 byte for the left node, 1 byte for the right node.
00 to erase, 11 to keep. Will keep the node if either left or right is not-0
]
if <ast.BoolOp object at 0x7da2044c2950> begin[:]
return[None]
if compare[name[depth] equal[==] constant[2]] begin[:]
if <ast.BoolOp object at 0x7da2044c3ac0> begin[:]
name[node].LeftChild assign[=] constant[None]
name[node].RightChild assign[=] constant[None] | keyword[def] identifier[_TrimNode] ( identifier[node] , identifier[index] , identifier[depth] , identifier[flags] ):
literal[string]
keyword[if] identifier[depth] == literal[int] keyword[or] identifier[node] . identifier[LeftChild] keyword[is] keyword[None] :
keyword[return]
keyword[if] identifier[depth] == literal[int] :
keyword[if] keyword[not] identifier[flags] [ identifier[index] * literal[int] ] keyword[and] keyword[not] identifier[flags] [ identifier[index] * literal[int] + literal[int] ]:
identifier[node] . identifier[LeftChild] = keyword[None]
identifier[node] . identifier[RightChild] = keyword[None]
keyword[else] :
identifier[MerkleTree] . identifier[_TrimNode] ( identifier[node] . identifier[LeftChild] , identifier[index] * literal[int] , identifier[depth] - literal[int] , identifier[flags] )
identifier[MerkleTree] . identifier[_TrimNode] ( identifier[node] . identifier[RightChild] , identifier[index] * literal[int] , identifier[depth] - literal[int] , identifier[flags] )
keyword[if] identifier[node] . identifier[LeftChild] . identifier[LeftChild] keyword[is] keyword[None] keyword[and] identifier[node] . identifier[RightChild] . identifier[RightChild] keyword[is] keyword[None] :
identifier[node] . identifier[LeftChild] = keyword[None]
identifier[node] . identifier[RightChild] = keyword[None] | def _TrimNode(node, index, depth, flags):
"""
Internal helper method to trim a node.
Args:
node (MerkleTreeNode):
index (int): flag index.
depth (int): node tree depth to start trim from.
flags (bytearray): of left/right pairs. 1 byte for the left node, 1 byte for the right node.
00 to erase, 11 to keep. Will keep the node if either left or right is not-0
"""
if depth == 1 or node.LeftChild is None:
return # depends on [control=['if'], data=[]]
if depth == 2:
if not flags[index * 2] and (not flags[index * 2 + 1]):
node.LeftChild = None
node.RightChild = None # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
MerkleTree._TrimNode(node.LeftChild, index * 2, depth - 1, flags)
MerkleTree._TrimNode(node.RightChild, index * 2, depth - 1, flags)
if node.LeftChild.LeftChild is None and node.RightChild.RightChild is None:
node.LeftChild = None
node.RightChild = None # depends on [control=['if'], data=[]] |
def players(game_id):
"""Gets player/coach/umpire information for the game with matching id."""
# get data
data = mlbgame.data.get_players(game_id)
# parse data
parsed = etree.parse(data)
root = parsed.getroot()
output = {}
output['game_id'] = game_id
# get player/coach data
for team in root.findall('team'):
type = team.attrib['type'] + "_team"
# the type is either home_team or away_team
output[type] = {}
output[type]['players'] = []
output[type]['coaches'] = []
for p in team.findall('player'):
player = {}
for key in p.keys():
player[key] = p.get(key)
output[type]['players'].append(player)
for c in team.findall('coach'):
coach = {}
for key in c.keys():
coach[key] = c.get(key)
output[type]['coaches'].append(coach)
# get umpire data
output['umpires'] = []
for u in root.find('umpires').findall('umpire'):
umpire = {}
for key in u.keys():
umpire[key] = u.get(key)
output['umpires'].append(umpire)
return output | def function[players, parameter[game_id]]:
constant[Gets player/coach/umpire information for the game with matching id.]
variable[data] assign[=] call[name[mlbgame].data.get_players, parameter[name[game_id]]]
variable[parsed] assign[=] call[name[etree].parse, parameter[name[data]]]
variable[root] assign[=] call[name[parsed].getroot, parameter[]]
variable[output] assign[=] dictionary[[], []]
call[name[output]][constant[game_id]] assign[=] name[game_id]
for taget[name[team]] in starred[call[name[root].findall, parameter[constant[team]]]] begin[:]
variable[type] assign[=] binary_operation[call[name[team].attrib][constant[type]] + constant[_team]]
call[name[output]][name[type]] assign[=] dictionary[[], []]
call[call[name[output]][name[type]]][constant[players]] assign[=] list[[]]
call[call[name[output]][name[type]]][constant[coaches]] assign[=] list[[]]
for taget[name[p]] in starred[call[name[team].findall, parameter[constant[player]]]] begin[:]
variable[player] assign[=] dictionary[[], []]
for taget[name[key]] in starred[call[name[p].keys, parameter[]]] begin[:]
call[name[player]][name[key]] assign[=] call[name[p].get, parameter[name[key]]]
call[call[call[name[output]][name[type]]][constant[players]].append, parameter[name[player]]]
for taget[name[c]] in starred[call[name[team].findall, parameter[constant[coach]]]] begin[:]
variable[coach] assign[=] dictionary[[], []]
for taget[name[key]] in starred[call[name[c].keys, parameter[]]] begin[:]
call[name[coach]][name[key]] assign[=] call[name[c].get, parameter[name[key]]]
call[call[call[name[output]][name[type]]][constant[coaches]].append, parameter[name[coach]]]
call[name[output]][constant[umpires]] assign[=] list[[]]
for taget[name[u]] in starred[call[call[name[root].find, parameter[constant[umpires]]].findall, parameter[constant[umpire]]]] begin[:]
variable[umpire] assign[=] dictionary[[], []]
for taget[name[key]] in starred[call[name[u].keys, parameter[]]] begin[:]
call[name[umpire]][name[key]] assign[=] call[name[u].get, parameter[name[key]]]
call[call[name[output]][constant[umpires]].append, parameter[name[umpire]]]
return[name[output]] | keyword[def] identifier[players] ( identifier[game_id] ):
literal[string]
identifier[data] = identifier[mlbgame] . identifier[data] . identifier[get_players] ( identifier[game_id] )
identifier[parsed] = identifier[etree] . identifier[parse] ( identifier[data] )
identifier[root] = identifier[parsed] . identifier[getroot] ()
identifier[output] ={}
identifier[output] [ literal[string] ]= identifier[game_id]
keyword[for] identifier[team] keyword[in] identifier[root] . identifier[findall] ( literal[string] ):
identifier[type] = identifier[team] . identifier[attrib] [ literal[string] ]+ literal[string]
identifier[output] [ identifier[type] ]={}
identifier[output] [ identifier[type] ][ literal[string] ]=[]
identifier[output] [ identifier[type] ][ literal[string] ]=[]
keyword[for] identifier[p] keyword[in] identifier[team] . identifier[findall] ( literal[string] ):
identifier[player] ={}
keyword[for] identifier[key] keyword[in] identifier[p] . identifier[keys] ():
identifier[player] [ identifier[key] ]= identifier[p] . identifier[get] ( identifier[key] )
identifier[output] [ identifier[type] ][ literal[string] ]. identifier[append] ( identifier[player] )
keyword[for] identifier[c] keyword[in] identifier[team] . identifier[findall] ( literal[string] ):
identifier[coach] ={}
keyword[for] identifier[key] keyword[in] identifier[c] . identifier[keys] ():
identifier[coach] [ identifier[key] ]= identifier[c] . identifier[get] ( identifier[key] )
identifier[output] [ identifier[type] ][ literal[string] ]. identifier[append] ( identifier[coach] )
identifier[output] [ literal[string] ]=[]
keyword[for] identifier[u] keyword[in] identifier[root] . identifier[find] ( literal[string] ). identifier[findall] ( literal[string] ):
identifier[umpire] ={}
keyword[for] identifier[key] keyword[in] identifier[u] . identifier[keys] ():
identifier[umpire] [ identifier[key] ]= identifier[u] . identifier[get] ( identifier[key] )
identifier[output] [ literal[string] ]. identifier[append] ( identifier[umpire] )
keyword[return] identifier[output] | def players(game_id):
"""Gets player/coach/umpire information for the game with matching id."""
# get data
data = mlbgame.data.get_players(game_id)
# parse data
parsed = etree.parse(data)
root = parsed.getroot()
output = {}
output['game_id'] = game_id
# get player/coach data
for team in root.findall('team'):
type = team.attrib['type'] + '_team'
# the type is either home_team or away_team
output[type] = {}
output[type]['players'] = []
output[type]['coaches'] = []
for p in team.findall('player'):
player = {}
for key in p.keys():
player[key] = p.get(key) # depends on [control=['for'], data=['key']]
output[type]['players'].append(player) # depends on [control=['for'], data=['p']]
for c in team.findall('coach'):
coach = {}
for key in c.keys():
coach[key] = c.get(key) # depends on [control=['for'], data=['key']]
output[type]['coaches'].append(coach) # depends on [control=['for'], data=['c']] # depends on [control=['for'], data=['team']]
# get umpire data
output['umpires'] = []
for u in root.find('umpires').findall('umpire'):
umpire = {}
for key in u.keys():
umpire[key] = u.get(key) # depends on [control=['for'], data=['key']]
output['umpires'].append(umpire) # depends on [control=['for'], data=['u']]
return output |
def to_dict(self):
"""Useful for providing arguments to templates.
:API: public
"""
ret = {}
for key in ['name', 'cmd', 'id', 'start_time', 'end_time',
'outcome', 'start_time_string', 'start_delta_string']:
val = getattr(self, key)
ret[key] = val() if hasattr(val, '__call__') else val
ret['parent'] = self.parent.to_dict() if self.parent else None
return ret | def function[to_dict, parameter[self]]:
constant[Useful for providing arguments to templates.
:API: public
]
variable[ret] assign[=] dictionary[[], []]
for taget[name[key]] in starred[list[[<ast.Constant object at 0x7da1b2279480>, <ast.Constant object at 0x7da1b227a740>, <ast.Constant object at 0x7da1b227b1f0>, <ast.Constant object at 0x7da1b227be20>, <ast.Constant object at 0x7da1b2278580>, <ast.Constant object at 0x7da1b2278c40>, <ast.Constant object at 0x7da1b2278ac0>, <ast.Constant object at 0x7da1b22798d0>]]] begin[:]
variable[val] assign[=] call[name[getattr], parameter[name[self], name[key]]]
call[name[ret]][name[key]] assign[=] <ast.IfExp object at 0x7da1b227bbb0>
call[name[ret]][constant[parent]] assign[=] <ast.IfExp object at 0x7da1b2279a80>
return[name[ret]] | keyword[def] identifier[to_dict] ( identifier[self] ):
literal[string]
identifier[ret] ={}
keyword[for] identifier[key] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] ]:
identifier[val] = identifier[getattr] ( identifier[self] , identifier[key] )
identifier[ret] [ identifier[key] ]= identifier[val] () keyword[if] identifier[hasattr] ( identifier[val] , literal[string] ) keyword[else] identifier[val]
identifier[ret] [ literal[string] ]= identifier[self] . identifier[parent] . identifier[to_dict] () keyword[if] identifier[self] . identifier[parent] keyword[else] keyword[None]
keyword[return] identifier[ret] | def to_dict(self):
"""Useful for providing arguments to templates.
:API: public
"""
ret = {}
for key in ['name', 'cmd', 'id', 'start_time', 'end_time', 'outcome', 'start_time_string', 'start_delta_string']:
val = getattr(self, key)
ret[key] = val() if hasattr(val, '__call__') else val # depends on [control=['for'], data=['key']]
ret['parent'] = self.parent.to_dict() if self.parent else None
return ret |
def _mysql_aes_unpad(val):
"""Reverse padding."""
val = _to_string(val)
pad_value = ord(val[-1])
return val[:-pad_value] | def function[_mysql_aes_unpad, parameter[val]]:
constant[Reverse padding.]
variable[val] assign[=] call[name[_to_string], parameter[name[val]]]
variable[pad_value] assign[=] call[name[ord], parameter[call[name[val]][<ast.UnaryOp object at 0x7da2044c25f0>]]]
return[call[name[val]][<ast.Slice object at 0x7da2044c09d0>]] | keyword[def] identifier[_mysql_aes_unpad] ( identifier[val] ):
literal[string]
identifier[val] = identifier[_to_string] ( identifier[val] )
identifier[pad_value] = identifier[ord] ( identifier[val] [- literal[int] ])
keyword[return] identifier[val] [:- identifier[pad_value] ] | def _mysql_aes_unpad(val):
"""Reverse padding."""
val = _to_string(val)
pad_value = ord(val[-1])
return val[:-pad_value] |
def get_labels(self, depth=None):
"""
Returns a list of labels created by this reference.
Parameters
----------
depth : integer or ``None``
If not ``None``, defines from how many reference levels to
retrieve labels from.
Returns
-------
out : list of ``Label``
List containing the labels in this cell and its references.
"""
if not isinstance(self.ref_cell, Cell):
return []
if self.rotation is not None:
ct = numpy.cos(self.rotation * numpy.pi / 180.0)
st = numpy.sin(self.rotation * numpy.pi / 180.0)
st = numpy.array([-st, st])
if self.x_reflection:
xrefl = numpy.array([1, -1], dtype='int')
if self.magnification is not None:
mag = numpy.array([self.magnification, self.magnification])
if self.origin is not None:
orgn = numpy.array(self.origin)
labels = self.ref_cell.get_labels(depth=depth)
for lbl in labels:
if self.x_reflection:
lbl.position = lbl.position * xrefl
if self.magnification is not None:
lbl.position = lbl.position * mag
if self.rotation is not None:
lbl.position = lbl.position * ct + lbl.position[::-1] * st
if self.origin is not None:
lbl.position = lbl.position + orgn
return labels | def function[get_labels, parameter[self, depth]]:
constant[
Returns a list of labels created by this reference.
Parameters
----------
depth : integer or ``None``
If not ``None``, defines from how many reference levels to
retrieve labels from.
Returns
-------
out : list of ``Label``
List containing the labels in this cell and its references.
]
if <ast.UnaryOp object at 0x7da20c76fc70> begin[:]
return[list[[]]]
if compare[name[self].rotation is_not constant[None]] begin[:]
variable[ct] assign[=] call[name[numpy].cos, parameter[binary_operation[binary_operation[name[self].rotation * name[numpy].pi] / constant[180.0]]]]
variable[st] assign[=] call[name[numpy].sin, parameter[binary_operation[binary_operation[name[self].rotation * name[numpy].pi] / constant[180.0]]]]
variable[st] assign[=] call[name[numpy].array, parameter[list[[<ast.UnaryOp object at 0x7da20c76f400>, <ast.Name object at 0x7da20c76eb60>]]]]
if name[self].x_reflection begin[:]
variable[xrefl] assign[=] call[name[numpy].array, parameter[list[[<ast.Constant object at 0x7da20c76dfc0>, <ast.UnaryOp object at 0x7da20c76ef80>]]]]
if compare[name[self].magnification is_not constant[None]] begin[:]
variable[mag] assign[=] call[name[numpy].array, parameter[list[[<ast.Attribute object at 0x7da20c76c5b0>, <ast.Attribute object at 0x7da20c76c760>]]]]
if compare[name[self].origin is_not constant[None]] begin[:]
variable[orgn] assign[=] call[name[numpy].array, parameter[name[self].origin]]
variable[labels] assign[=] call[name[self].ref_cell.get_labels, parameter[]]
for taget[name[lbl]] in starred[name[labels]] begin[:]
if name[self].x_reflection begin[:]
name[lbl].position assign[=] binary_operation[name[lbl].position * name[xrefl]]
if compare[name[self].magnification is_not constant[None]] begin[:]
name[lbl].position assign[=] binary_operation[name[lbl].position * name[mag]]
if compare[name[self].rotation is_not constant[None]] begin[:]
name[lbl].position assign[=] binary_operation[binary_operation[name[lbl].position * name[ct]] + binary_operation[call[name[lbl].position][<ast.Slice object at 0x7da20c76ce80>] * name[st]]]
if compare[name[self].origin is_not constant[None]] begin[:]
name[lbl].position assign[=] binary_operation[name[lbl].position + name[orgn]]
return[name[labels]] | keyword[def] identifier[get_labels] ( identifier[self] , identifier[depth] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[self] . identifier[ref_cell] , identifier[Cell] ):
keyword[return] []
keyword[if] identifier[self] . identifier[rotation] keyword[is] keyword[not] keyword[None] :
identifier[ct] = identifier[numpy] . identifier[cos] ( identifier[self] . identifier[rotation] * identifier[numpy] . identifier[pi] / literal[int] )
identifier[st] = identifier[numpy] . identifier[sin] ( identifier[self] . identifier[rotation] * identifier[numpy] . identifier[pi] / literal[int] )
identifier[st] = identifier[numpy] . identifier[array] ([- identifier[st] , identifier[st] ])
keyword[if] identifier[self] . identifier[x_reflection] :
identifier[xrefl] = identifier[numpy] . identifier[array] ([ literal[int] ,- literal[int] ], identifier[dtype] = literal[string] )
keyword[if] identifier[self] . identifier[magnification] keyword[is] keyword[not] keyword[None] :
identifier[mag] = identifier[numpy] . identifier[array] ([ identifier[self] . identifier[magnification] , identifier[self] . identifier[magnification] ])
keyword[if] identifier[self] . identifier[origin] keyword[is] keyword[not] keyword[None] :
identifier[orgn] = identifier[numpy] . identifier[array] ( identifier[self] . identifier[origin] )
identifier[labels] = identifier[self] . identifier[ref_cell] . identifier[get_labels] ( identifier[depth] = identifier[depth] )
keyword[for] identifier[lbl] keyword[in] identifier[labels] :
keyword[if] identifier[self] . identifier[x_reflection] :
identifier[lbl] . identifier[position] = identifier[lbl] . identifier[position] * identifier[xrefl]
keyword[if] identifier[self] . identifier[magnification] keyword[is] keyword[not] keyword[None] :
identifier[lbl] . identifier[position] = identifier[lbl] . identifier[position] * identifier[mag]
keyword[if] identifier[self] . identifier[rotation] keyword[is] keyword[not] keyword[None] :
identifier[lbl] . identifier[position] = identifier[lbl] . identifier[position] * identifier[ct] + identifier[lbl] . identifier[position] [::- literal[int] ]* identifier[st]
keyword[if] identifier[self] . identifier[origin] keyword[is] keyword[not] keyword[None] :
identifier[lbl] . identifier[position] = identifier[lbl] . identifier[position] + identifier[orgn]
keyword[return] identifier[labels] | def get_labels(self, depth=None):
"""
Returns a list of labels created by this reference.
Parameters
----------
depth : integer or ``None``
If not ``None``, defines from how many reference levels to
retrieve labels from.
Returns
-------
out : list of ``Label``
List containing the labels in this cell and its references.
"""
if not isinstance(self.ref_cell, Cell):
return [] # depends on [control=['if'], data=[]]
if self.rotation is not None:
ct = numpy.cos(self.rotation * numpy.pi / 180.0)
st = numpy.sin(self.rotation * numpy.pi / 180.0)
st = numpy.array([-st, st]) # depends on [control=['if'], data=[]]
if self.x_reflection:
xrefl = numpy.array([1, -1], dtype='int') # depends on [control=['if'], data=[]]
if self.magnification is not None:
mag = numpy.array([self.magnification, self.magnification]) # depends on [control=['if'], data=[]]
if self.origin is not None:
orgn = numpy.array(self.origin) # depends on [control=['if'], data=[]]
labels = self.ref_cell.get_labels(depth=depth)
for lbl in labels:
if self.x_reflection:
lbl.position = lbl.position * xrefl # depends on [control=['if'], data=[]]
if self.magnification is not None:
lbl.position = lbl.position * mag # depends on [control=['if'], data=[]]
if self.rotation is not None:
lbl.position = lbl.position * ct + lbl.position[::-1] * st # depends on [control=['if'], data=[]]
if self.origin is not None:
lbl.position = lbl.position + orgn # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['lbl']]
return labels |
def fetch(self, card_id, data={}, **kwargs):
""""
Fetch Card for given Id
Args:
card_id : Id for which card object has to be retrieved
Returns:
Card dict for given card Id
"""
return super(Card, self).fetch(card_id, data, **kwargs) | def function[fetch, parameter[self, card_id, data]]:
constant["
Fetch Card for given Id
Args:
card_id : Id for which card object has to be retrieved
Returns:
Card dict for given card Id
]
return[call[call[name[super], parameter[name[Card], name[self]]].fetch, parameter[name[card_id], name[data]]]] | keyword[def] identifier[fetch] ( identifier[self] , identifier[card_id] , identifier[data] ={},** identifier[kwargs] ):
literal[string]
keyword[return] identifier[super] ( identifier[Card] , identifier[self] ). identifier[fetch] ( identifier[card_id] , identifier[data] ,** identifier[kwargs] ) | def fetch(self, card_id, data={}, **kwargs):
""""
Fetch Card for given Id
Args:
card_id : Id for which card object has to be retrieved
Returns:
Card dict for given card Id
"""
return super(Card, self).fetch(card_id, data, **kwargs) |
def getWorksheetServices(self):
"""get list of analysis services present on this worksheet
"""
services = []
for analysis in self.getAnalyses():
service = analysis.getAnalysisService()
if service and service not in services:
services.append(service)
return services | def function[getWorksheetServices, parameter[self]]:
constant[get list of analysis services present on this worksheet
]
variable[services] assign[=] list[[]]
for taget[name[analysis]] in starred[call[name[self].getAnalyses, parameter[]]] begin[:]
variable[service] assign[=] call[name[analysis].getAnalysisService, parameter[]]
if <ast.BoolOp object at 0x7da1b2346e00> begin[:]
call[name[services].append, parameter[name[service]]]
return[name[services]] | keyword[def] identifier[getWorksheetServices] ( identifier[self] ):
literal[string]
identifier[services] =[]
keyword[for] identifier[analysis] keyword[in] identifier[self] . identifier[getAnalyses] ():
identifier[service] = identifier[analysis] . identifier[getAnalysisService] ()
keyword[if] identifier[service] keyword[and] identifier[service] keyword[not] keyword[in] identifier[services] :
identifier[services] . identifier[append] ( identifier[service] )
keyword[return] identifier[services] | def getWorksheetServices(self):
"""get list of analysis services present on this worksheet
"""
services = []
for analysis in self.getAnalyses():
service = analysis.getAnalysisService()
if service and service not in services:
services.append(service) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['analysis']]
return services |
def getHelpFileAsString(taskname,taskpath):
"""
This functions will return useful help as a string read from a file
in the task's installed directory called "<module>.help".
If no such file can be found, it will simply return an empty string.
Notes
-----
The location of the actual help file will be found under the task's
installed directory using 'irafutils.rglob' to search all sub-dirs to
find the file. This allows the help file to be either in the tasks
installed directory or in any sub-directory, such as a "help/" directory.
Parameters
----------
taskname: string
Value of `__taskname__` for a module/task
taskpath: string
Value of `__file__` for an installed module which defines the task
Returns
-------
helpString: string
multi-line string read from the file '<taskname>.help'
"""
#get the local library directory where the code is stored
pathsplit=os.path.split(taskpath) # taskpath should be task's __file__
if taskname.find('.') > -1: # if taskname is given as package.taskname...
helpname=taskname.split(".")[1] # taskname should be __taskname__ from task's module
else:
helpname = taskname
localdir = pathsplit[0]
if localdir == '':
localdir = '.'
helpfile=rglob(localdir,helpname+".help")[0]
if os.access(helpfile,os.R_OK):
fh=open(helpfile,'r')
ss=fh.readlines()
fh.close()
helpString=""
for line in ss:
helpString+=line
else:
helpString= ''
return helpString | def function[getHelpFileAsString, parameter[taskname, taskpath]]:
constant[
This functions will return useful help as a string read from a file
in the task's installed directory called "<module>.help".
If no such file can be found, it will simply return an empty string.
Notes
-----
The location of the actual help file will be found under the task's
installed directory using 'irafutils.rglob' to search all sub-dirs to
find the file. This allows the help file to be either in the tasks
installed directory or in any sub-directory, such as a "help/" directory.
Parameters
----------
taskname: string
Value of `__taskname__` for a module/task
taskpath: string
Value of `__file__` for an installed module which defines the task
Returns
-------
helpString: string
multi-line string read from the file '<taskname>.help'
]
variable[pathsplit] assign[=] call[name[os].path.split, parameter[name[taskpath]]]
if compare[call[name[taskname].find, parameter[constant[.]]] greater[>] <ast.UnaryOp object at 0x7da18f812b00>] begin[:]
variable[helpname] assign[=] call[call[name[taskname].split, parameter[constant[.]]]][constant[1]]
variable[localdir] assign[=] call[name[pathsplit]][constant[0]]
if compare[name[localdir] equal[==] constant[]] begin[:]
variable[localdir] assign[=] constant[.]
variable[helpfile] assign[=] call[call[name[rglob], parameter[name[localdir], binary_operation[name[helpname] + constant[.help]]]]][constant[0]]
if call[name[os].access, parameter[name[helpfile], name[os].R_OK]] begin[:]
variable[fh] assign[=] call[name[open], parameter[name[helpfile], constant[r]]]
variable[ss] assign[=] call[name[fh].readlines, parameter[]]
call[name[fh].close, parameter[]]
variable[helpString] assign[=] constant[]
for taget[name[line]] in starred[name[ss]] begin[:]
<ast.AugAssign object at 0x7da1b0e81120>
return[name[helpString]] | keyword[def] identifier[getHelpFileAsString] ( identifier[taskname] , identifier[taskpath] ):
literal[string]
identifier[pathsplit] = identifier[os] . identifier[path] . identifier[split] ( identifier[taskpath] )
keyword[if] identifier[taskname] . identifier[find] ( literal[string] )>- literal[int] :
identifier[helpname] = identifier[taskname] . identifier[split] ( literal[string] )[ literal[int] ]
keyword[else] :
identifier[helpname] = identifier[taskname]
identifier[localdir] = identifier[pathsplit] [ literal[int] ]
keyword[if] identifier[localdir] == literal[string] :
identifier[localdir] = literal[string]
identifier[helpfile] = identifier[rglob] ( identifier[localdir] , identifier[helpname] + literal[string] )[ literal[int] ]
keyword[if] identifier[os] . identifier[access] ( identifier[helpfile] , identifier[os] . identifier[R_OK] ):
identifier[fh] = identifier[open] ( identifier[helpfile] , literal[string] )
identifier[ss] = identifier[fh] . identifier[readlines] ()
identifier[fh] . identifier[close] ()
identifier[helpString] = literal[string]
keyword[for] identifier[line] keyword[in] identifier[ss] :
identifier[helpString] += identifier[line]
keyword[else] :
identifier[helpString] = literal[string]
keyword[return] identifier[helpString] | def getHelpFileAsString(taskname, taskpath):
"""
This functions will return useful help as a string read from a file
in the task's installed directory called "<module>.help".
If no such file can be found, it will simply return an empty string.
Notes
-----
The location of the actual help file will be found under the task's
installed directory using 'irafutils.rglob' to search all sub-dirs to
find the file. This allows the help file to be either in the tasks
installed directory or in any sub-directory, such as a "help/" directory.
Parameters
----------
taskname: string
Value of `__taskname__` for a module/task
taskpath: string
Value of `__file__` for an installed module which defines the task
Returns
-------
helpString: string
multi-line string read from the file '<taskname>.help'
"""
#get the local library directory where the code is stored
pathsplit = os.path.split(taskpath) # taskpath should be task's __file__
if taskname.find('.') > -1: # if taskname is given as package.taskname...
helpname = taskname.split('.')[1] # taskname should be __taskname__ from task's module # depends on [control=['if'], data=[]]
else:
helpname = taskname
localdir = pathsplit[0]
if localdir == '':
localdir = '.' # depends on [control=['if'], data=['localdir']]
helpfile = rglob(localdir, helpname + '.help')[0]
if os.access(helpfile, os.R_OK):
fh = open(helpfile, 'r')
ss = fh.readlines()
fh.close()
helpString = ''
for line in ss:
helpString += line # depends on [control=['for'], data=['line']] # depends on [control=['if'], data=[]]
else:
helpString = ''
return helpString |
def ELBND(w, e, function="max"):
"""
This function estimates Error and Learning Based Novelty Detection measure
from given data.
**Args:**
* `w` : history of adaptive parameters of an adaptive model (2d array),
every row represents parameters in given time index.
* `e` : error of adaptive model (1d array)
**Kwargs:**
* `functions` : output function (str). The way how to produce single
value for every sample (from all parameters)
* `max` - maximal value
* `sum` - sum of values
**Returns:**
* ELBND values (1d array). This vector has same lenght as `w`.
"""
# check if the function is known
if not function in ["max", "sum"]:
raise ValueError('Unknown output function')
# get length of data and number of parameters
N = w.shape[0]
n = w.shape[1]
# get abs dw from w
dw = np.zeros(w.shape)
dw[:-1] = np.abs(np.diff(w, axis=0))
# absolute values of product of increments and error
a = np.random.random((5,2))
b = a.T*np.array([1,2,3,4,5])
elbnd = np.abs((dw.T*e).T)
# apply output function
if function == "max":
elbnd = np.max(elbnd, axis=1)
elif function == "sum":
elbnd = np.sum(elbnd, axis=1)
# return output
return elbnd | def function[ELBND, parameter[w, e, function]]:
constant[
This function estimates Error and Learning Based Novelty Detection measure
from given data.
**Args:**
* `w` : history of adaptive parameters of an adaptive model (2d array),
every row represents parameters in given time index.
* `e` : error of adaptive model (1d array)
**Kwargs:**
* `functions` : output function (str). The way how to produce single
value for every sample (from all parameters)
* `max` - maximal value
* `sum` - sum of values
**Returns:**
* ELBND values (1d array). This vector has same lenght as `w`.
]
if <ast.UnaryOp object at 0x7da1b0ef1cc0> begin[:]
<ast.Raise object at 0x7da1b0ef2cb0>
variable[N] assign[=] call[name[w].shape][constant[0]]
variable[n] assign[=] call[name[w].shape][constant[1]]
variable[dw] assign[=] call[name[np].zeros, parameter[name[w].shape]]
call[name[dw]][<ast.Slice object at 0x7da1b0ef1ff0>] assign[=] call[name[np].abs, parameter[call[name[np].diff, parameter[name[w]]]]]
variable[a] assign[=] call[name[np].random.random, parameter[tuple[[<ast.Constant object at 0x7da1b0ef1870>, <ast.Constant object at 0x7da1b0ef1ba0>]]]]
variable[b] assign[=] binary_operation[name[a].T * call[name[np].array, parameter[list[[<ast.Constant object at 0x7da1b0ef2980>, <ast.Constant object at 0x7da1b0ef20e0>, <ast.Constant object at 0x7da1b0ef1ea0>, <ast.Constant object at 0x7da1b0ef28f0>, <ast.Constant object at 0x7da1b0ef17b0>]]]]]
variable[elbnd] assign[=] call[name[np].abs, parameter[binary_operation[name[dw].T * name[e]].T]]
if compare[name[function] equal[==] constant[max]] begin[:]
variable[elbnd] assign[=] call[name[np].max, parameter[name[elbnd]]]
return[name[elbnd]] | keyword[def] identifier[ELBND] ( identifier[w] , identifier[e] , identifier[function] = literal[string] ):
literal[string]
keyword[if] keyword[not] identifier[function] keyword[in] [ literal[string] , literal[string] ]:
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[N] = identifier[w] . identifier[shape] [ literal[int] ]
identifier[n] = identifier[w] . identifier[shape] [ literal[int] ]
identifier[dw] = identifier[np] . identifier[zeros] ( identifier[w] . identifier[shape] )
identifier[dw] [:- literal[int] ]= identifier[np] . identifier[abs] ( identifier[np] . identifier[diff] ( identifier[w] , identifier[axis] = literal[int] ))
identifier[a] = identifier[np] . identifier[random] . identifier[random] (( literal[int] , literal[int] ))
identifier[b] = identifier[a] . identifier[T] * identifier[np] . identifier[array] ([ literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ])
identifier[elbnd] = identifier[np] . identifier[abs] (( identifier[dw] . identifier[T] * identifier[e] ). identifier[T] )
keyword[if] identifier[function] == literal[string] :
identifier[elbnd] = identifier[np] . identifier[max] ( identifier[elbnd] , identifier[axis] = literal[int] )
keyword[elif] identifier[function] == literal[string] :
identifier[elbnd] = identifier[np] . identifier[sum] ( identifier[elbnd] , identifier[axis] = literal[int] )
keyword[return] identifier[elbnd] | def ELBND(w, e, function='max'):
"""
This function estimates Error and Learning Based Novelty Detection measure
from given data.
**Args:**
* `w` : history of adaptive parameters of an adaptive model (2d array),
every row represents parameters in given time index.
* `e` : error of adaptive model (1d array)
**Kwargs:**
* `functions` : output function (str). The way how to produce single
value for every sample (from all parameters)
* `max` - maximal value
* `sum` - sum of values
**Returns:**
* ELBND values (1d array). This vector has same lenght as `w`.
"""
# check if the function is known
if not function in ['max', 'sum']:
raise ValueError('Unknown output function') # depends on [control=['if'], data=[]]
# get length of data and number of parameters
N = w.shape[0]
n = w.shape[1]
# get abs dw from w
dw = np.zeros(w.shape)
dw[:-1] = np.abs(np.diff(w, axis=0))
# absolute values of product of increments and error
a = np.random.random((5, 2))
b = a.T * np.array([1, 2, 3, 4, 5])
elbnd = np.abs((dw.T * e).T)
# apply output function
if function == 'max':
elbnd = np.max(elbnd, axis=1) # depends on [control=['if'], data=[]]
elif function == 'sum':
elbnd = np.sum(elbnd, axis=1) # depends on [control=['if'], data=[]]
# return output
return elbnd |
def _accept_as_blank(self, url_info: URLInfo):
'''Mark the URL as OK in the pool.'''
_logger.debug(__('Got empty robots.txt for {0}.', url_info.url))
self._robots_txt_pool.load_robots_txt(url_info, '') | def function[_accept_as_blank, parameter[self, url_info]]:
constant[Mark the URL as OK in the pool.]
call[name[_logger].debug, parameter[call[name[__], parameter[constant[Got empty robots.txt for {0}.], name[url_info].url]]]]
call[name[self]._robots_txt_pool.load_robots_txt, parameter[name[url_info], constant[]]] | keyword[def] identifier[_accept_as_blank] ( identifier[self] , identifier[url_info] : identifier[URLInfo] ):
literal[string]
identifier[_logger] . identifier[debug] ( identifier[__] ( literal[string] , identifier[url_info] . identifier[url] ))
identifier[self] . identifier[_robots_txt_pool] . identifier[load_robots_txt] ( identifier[url_info] , literal[string] ) | def _accept_as_blank(self, url_info: URLInfo):
"""Mark the URL as OK in the pool."""
_logger.debug(__('Got empty robots.txt for {0}.', url_info.url))
self._robots_txt_pool.load_robots_txt(url_info, '') |
def append_formula(self, formula, no_return=True):
"""
Appends list of clauses to solver's internal formula.
"""
if self.maplesat:
res = None
for clause in formula:
res = self.add_clause(clause, no_return)
if not no_return:
return res | def function[append_formula, parameter[self, formula, no_return]]:
constant[
Appends list of clauses to solver's internal formula.
]
if name[self].maplesat begin[:]
variable[res] assign[=] constant[None]
for taget[name[clause]] in starred[name[formula]] begin[:]
variable[res] assign[=] call[name[self].add_clause, parameter[name[clause], name[no_return]]]
if <ast.UnaryOp object at 0x7da1b128a320> begin[:]
return[name[res]] | keyword[def] identifier[append_formula] ( identifier[self] , identifier[formula] , identifier[no_return] = keyword[True] ):
literal[string]
keyword[if] identifier[self] . identifier[maplesat] :
identifier[res] = keyword[None]
keyword[for] identifier[clause] keyword[in] identifier[formula] :
identifier[res] = identifier[self] . identifier[add_clause] ( identifier[clause] , identifier[no_return] )
keyword[if] keyword[not] identifier[no_return] :
keyword[return] identifier[res] | def append_formula(self, formula, no_return=True):
"""
Appends list of clauses to solver's internal formula.
"""
if self.maplesat:
res = None
for clause in formula:
res = self.add_clause(clause, no_return) # depends on [control=['for'], data=['clause']]
if not no_return:
return res # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def _make_completed_functions(self):
"""
Fill in self._completed_functions list and clean up job manager.
:return: None
"""
finished = self._get_finished_functions()
for func_addr in finished:
self._completed_functions.add(func_addr)
self._cleanup_analysis_jobs(finished_func_addrs=finished) | def function[_make_completed_functions, parameter[self]]:
constant[
Fill in self._completed_functions list and clean up job manager.
:return: None
]
variable[finished] assign[=] call[name[self]._get_finished_functions, parameter[]]
for taget[name[func_addr]] in starred[name[finished]] begin[:]
call[name[self]._completed_functions.add, parameter[name[func_addr]]]
call[name[self]._cleanup_analysis_jobs, parameter[]] | keyword[def] identifier[_make_completed_functions] ( identifier[self] ):
literal[string]
identifier[finished] = identifier[self] . identifier[_get_finished_functions] ()
keyword[for] identifier[func_addr] keyword[in] identifier[finished] :
identifier[self] . identifier[_completed_functions] . identifier[add] ( identifier[func_addr] )
identifier[self] . identifier[_cleanup_analysis_jobs] ( identifier[finished_func_addrs] = identifier[finished] ) | def _make_completed_functions(self):
"""
Fill in self._completed_functions list and clean up job manager.
:return: None
"""
finished = self._get_finished_functions()
for func_addr in finished:
self._completed_functions.add(func_addr) # depends on [control=['for'], data=['func_addr']]
self._cleanup_analysis_jobs(finished_func_addrs=finished) |
def hash_args(*args, **kwargs):
"""Define a unique string for any set of representable args."""
arg_string = '_'.join([str(arg) for arg in args])
kwarg_string = '_'.join([str(key) + '=' + str(value)
for key, value in iteritems(kwargs)])
combined = ':'.join([arg_string, kwarg_string])
hasher = md5()
hasher.update(b(combined))
return hasher.hexdigest() | def function[hash_args, parameter[]]:
constant[Define a unique string for any set of representable args.]
variable[arg_string] assign[=] call[constant[_].join, parameter[<ast.ListComp object at 0x7da1b1eba050>]]
variable[kwarg_string] assign[=] call[constant[_].join, parameter[<ast.ListComp object at 0x7da1b1eb9f30>]]
variable[combined] assign[=] call[constant[:].join, parameter[list[[<ast.Name object at 0x7da1b1eb9ab0>, <ast.Name object at 0x7da1b1ea1300>]]]]
variable[hasher] assign[=] call[name[md5], parameter[]]
call[name[hasher].update, parameter[call[name[b], parameter[name[combined]]]]]
return[call[name[hasher].hexdigest, parameter[]]] | keyword[def] identifier[hash_args] (* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[arg_string] = literal[string] . identifier[join] ([ identifier[str] ( identifier[arg] ) keyword[for] identifier[arg] keyword[in] identifier[args] ])
identifier[kwarg_string] = literal[string] . identifier[join] ([ identifier[str] ( identifier[key] )+ literal[string] + identifier[str] ( identifier[value] )
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[iteritems] ( identifier[kwargs] )])
identifier[combined] = literal[string] . identifier[join] ([ identifier[arg_string] , identifier[kwarg_string] ])
identifier[hasher] = identifier[md5] ()
identifier[hasher] . identifier[update] ( identifier[b] ( identifier[combined] ))
keyword[return] identifier[hasher] . identifier[hexdigest] () | def hash_args(*args, **kwargs):
"""Define a unique string for any set of representable args."""
arg_string = '_'.join([str(arg) for arg in args])
kwarg_string = '_'.join([str(key) + '=' + str(value) for (key, value) in iteritems(kwargs)])
combined = ':'.join([arg_string, kwarg_string])
hasher = md5()
hasher.update(b(combined))
return hasher.hexdigest() |
def get_plugins(namespace, interface=None, check_extras=True, load_now=False):
"""
helper to get a direct interface to _Plugins
"""
return _DB.add_namespace(namespace, interface, check_extras, load_now) | def function[get_plugins, parameter[namespace, interface, check_extras, load_now]]:
constant[
helper to get a direct interface to _Plugins
]
return[call[name[_DB].add_namespace, parameter[name[namespace], name[interface], name[check_extras], name[load_now]]]] | keyword[def] identifier[get_plugins] ( identifier[namespace] , identifier[interface] = keyword[None] , identifier[check_extras] = keyword[True] , identifier[load_now] = keyword[False] ):
literal[string]
keyword[return] identifier[_DB] . identifier[add_namespace] ( identifier[namespace] , identifier[interface] , identifier[check_extras] , identifier[load_now] ) | def get_plugins(namespace, interface=None, check_extras=True, load_now=False):
"""
helper to get a direct interface to _Plugins
"""
return _DB.add_namespace(namespace, interface, check_extras, load_now) |
def post(self, request, format=None):
""" validate password change operation and return result """
serializer_class = self.get_serializer_class()
serializer = serializer_class(data=request.data, instance=request.user)
if serializer.is_valid():
serializer.save()
return Response({'detail': _(u'Password successfully changed')})
return Response(serializer.errors, status=400) | def function[post, parameter[self, request, format]]:
constant[ validate password change operation and return result ]
variable[serializer_class] assign[=] call[name[self].get_serializer_class, parameter[]]
variable[serializer] assign[=] call[name[serializer_class], parameter[]]
if call[name[serializer].is_valid, parameter[]] begin[:]
call[name[serializer].save, parameter[]]
return[call[name[Response], parameter[dictionary[[<ast.Constant object at 0x7da20c990820>], [<ast.Call object at 0x7da20c991c60>]]]]]
return[call[name[Response], parameter[name[serializer].errors]]] | keyword[def] identifier[post] ( identifier[self] , identifier[request] , identifier[format] = keyword[None] ):
literal[string]
identifier[serializer_class] = identifier[self] . identifier[get_serializer_class] ()
identifier[serializer] = identifier[serializer_class] ( identifier[data] = identifier[request] . identifier[data] , identifier[instance] = identifier[request] . identifier[user] )
keyword[if] identifier[serializer] . identifier[is_valid] ():
identifier[serializer] . identifier[save] ()
keyword[return] identifier[Response] ({ literal[string] : identifier[_] ( literal[string] )})
keyword[return] identifier[Response] ( identifier[serializer] . identifier[errors] , identifier[status] = literal[int] ) | def post(self, request, format=None):
""" validate password change operation and return result """
serializer_class = self.get_serializer_class()
serializer = serializer_class(data=request.data, instance=request.user)
if serializer.is_valid():
serializer.save()
return Response({'detail': _(u'Password successfully changed')}) # depends on [control=['if'], data=[]]
return Response(serializer.errors, status=400) |
def _BuildPluginRequest(self, app_id, challenge_data, origin):
"""Builds a JSON request in the form that the plugin expects."""
client_data_map = {}
encoded_challenges = []
app_id_hash_encoded = self._Base64Encode(self._SHA256(app_id))
for challenge_item in challenge_data:
key = challenge_item['key']
key_handle_encoded = self._Base64Encode(key.key_handle)
raw_challenge = challenge_item['challenge']
client_data_json = model.ClientData(
model.ClientData.TYP_AUTHENTICATION,
raw_challenge,
origin).GetJson()
challenge_hash_encoded = self._Base64Encode(
self._SHA256(client_data_json))
# Populate challenges list
encoded_challenges.append({
'appIdHash': app_id_hash_encoded,
'challengeHash': challenge_hash_encoded,
'keyHandle': key_handle_encoded,
'version': key.version,
})
# Populate ClientData map
key_challenge_pair = (key_handle_encoded, challenge_hash_encoded)
client_data_map[key_challenge_pair] = client_data_json
signing_request = {
'type': 'sign_helper_request',
'signData': encoded_challenges,
'timeoutSeconds': U2F_SIGNATURE_TIMEOUT_SECONDS,
'localAlways': True
}
return client_data_map, json.dumps(signing_request) | def function[_BuildPluginRequest, parameter[self, app_id, challenge_data, origin]]:
constant[Builds a JSON request in the form that the plugin expects.]
variable[client_data_map] assign[=] dictionary[[], []]
variable[encoded_challenges] assign[=] list[[]]
variable[app_id_hash_encoded] assign[=] call[name[self]._Base64Encode, parameter[call[name[self]._SHA256, parameter[name[app_id]]]]]
for taget[name[challenge_item]] in starred[name[challenge_data]] begin[:]
variable[key] assign[=] call[name[challenge_item]][constant[key]]
variable[key_handle_encoded] assign[=] call[name[self]._Base64Encode, parameter[name[key].key_handle]]
variable[raw_challenge] assign[=] call[name[challenge_item]][constant[challenge]]
variable[client_data_json] assign[=] call[call[name[model].ClientData, parameter[name[model].ClientData.TYP_AUTHENTICATION, name[raw_challenge], name[origin]]].GetJson, parameter[]]
variable[challenge_hash_encoded] assign[=] call[name[self]._Base64Encode, parameter[call[name[self]._SHA256, parameter[name[client_data_json]]]]]
call[name[encoded_challenges].append, parameter[dictionary[[<ast.Constant object at 0x7da18fe91c60>, <ast.Constant object at 0x7da18fe905e0>, <ast.Constant object at 0x7da18fe92b90>, <ast.Constant object at 0x7da18fe91090>], [<ast.Name object at 0x7da18fe93430>, <ast.Name object at 0x7da18fe904c0>, <ast.Name object at 0x7da18fe92770>, <ast.Attribute object at 0x7da18fe91e40>]]]]
variable[key_challenge_pair] assign[=] tuple[[<ast.Name object at 0x7da18fe91fc0>, <ast.Name object at 0x7da18fe92e00>]]
call[name[client_data_map]][name[key_challenge_pair]] assign[=] name[client_data_json]
variable[signing_request] assign[=] dictionary[[<ast.Constant object at 0x7da18fe91de0>, <ast.Constant object at 0x7da18fe92170>, <ast.Constant object at 0x7da18fe90e20>, <ast.Constant object at 0x7da18fe90250>], [<ast.Constant object at 0x7da18fe92290>, <ast.Name object at 0x7da18fe93ee0>, <ast.Name object at 0x7da18fe91690>, <ast.Constant object at 0x7da18fe91390>]]
return[tuple[[<ast.Name object at 0x7da1b196e7a0>, <ast.Call object at 0x7da1b196fe20>]]] | keyword[def] identifier[_BuildPluginRequest] ( identifier[self] , identifier[app_id] , identifier[challenge_data] , identifier[origin] ):
literal[string]
identifier[client_data_map] ={}
identifier[encoded_challenges] =[]
identifier[app_id_hash_encoded] = identifier[self] . identifier[_Base64Encode] ( identifier[self] . identifier[_SHA256] ( identifier[app_id] ))
keyword[for] identifier[challenge_item] keyword[in] identifier[challenge_data] :
identifier[key] = identifier[challenge_item] [ literal[string] ]
identifier[key_handle_encoded] = identifier[self] . identifier[_Base64Encode] ( identifier[key] . identifier[key_handle] )
identifier[raw_challenge] = identifier[challenge_item] [ literal[string] ]
identifier[client_data_json] = identifier[model] . identifier[ClientData] (
identifier[model] . identifier[ClientData] . identifier[TYP_AUTHENTICATION] ,
identifier[raw_challenge] ,
identifier[origin] ). identifier[GetJson] ()
identifier[challenge_hash_encoded] = identifier[self] . identifier[_Base64Encode] (
identifier[self] . identifier[_SHA256] ( identifier[client_data_json] ))
identifier[encoded_challenges] . identifier[append] ({
literal[string] : identifier[app_id_hash_encoded] ,
literal[string] : identifier[challenge_hash_encoded] ,
literal[string] : identifier[key_handle_encoded] ,
literal[string] : identifier[key] . identifier[version] ,
})
identifier[key_challenge_pair] =( identifier[key_handle_encoded] , identifier[challenge_hash_encoded] )
identifier[client_data_map] [ identifier[key_challenge_pair] ]= identifier[client_data_json]
identifier[signing_request] ={
literal[string] : literal[string] ,
literal[string] : identifier[encoded_challenges] ,
literal[string] : identifier[U2F_SIGNATURE_TIMEOUT_SECONDS] ,
literal[string] : keyword[True]
}
keyword[return] identifier[client_data_map] , identifier[json] . identifier[dumps] ( identifier[signing_request] ) | def _BuildPluginRequest(self, app_id, challenge_data, origin):
"""Builds a JSON request in the form that the plugin expects."""
client_data_map = {}
encoded_challenges = []
app_id_hash_encoded = self._Base64Encode(self._SHA256(app_id))
for challenge_item in challenge_data:
key = challenge_item['key']
key_handle_encoded = self._Base64Encode(key.key_handle)
raw_challenge = challenge_item['challenge']
client_data_json = model.ClientData(model.ClientData.TYP_AUTHENTICATION, raw_challenge, origin).GetJson()
challenge_hash_encoded = self._Base64Encode(self._SHA256(client_data_json))
# Populate challenges list
encoded_challenges.append({'appIdHash': app_id_hash_encoded, 'challengeHash': challenge_hash_encoded, 'keyHandle': key_handle_encoded, 'version': key.version})
# Populate ClientData map
key_challenge_pair = (key_handle_encoded, challenge_hash_encoded)
client_data_map[key_challenge_pair] = client_data_json # depends on [control=['for'], data=['challenge_item']]
signing_request = {'type': 'sign_helper_request', 'signData': encoded_challenges, 'timeoutSeconds': U2F_SIGNATURE_TIMEOUT_SECONDS, 'localAlways': True}
return (client_data_map, json.dumps(signing_request)) |
def fromxlsx(filename, sheet=None, range_string=None, row_offset=0,
column_offset=0, **kwargs):
"""
Extract a table from a sheet in an Excel .xlsx file.
N.B., the sheet name is case sensitive.
The `sheet` argument can be omitted, in which case the first sheet in
the workbook is used by default.
The `range_string` argument can be used to provide a range string
specifying a range of cells to extract.
The `row_offset` and `column_offset` arguments can be used to
specify offsets.
Any other keyword arguments are passed through to
:func:`openpyxl.load_workbook()`.
"""
return XLSXView(filename, sheet=sheet, range_string=range_string,
row_offset=row_offset, column_offset=column_offset,
**kwargs) | def function[fromxlsx, parameter[filename, sheet, range_string, row_offset, column_offset]]:
constant[
Extract a table from a sheet in an Excel .xlsx file.
N.B., the sheet name is case sensitive.
The `sheet` argument can be omitted, in which case the first sheet in
the workbook is used by default.
The `range_string` argument can be used to provide a range string
specifying a range of cells to extract.
The `row_offset` and `column_offset` arguments can be used to
specify offsets.
Any other keyword arguments are passed through to
:func:`openpyxl.load_workbook()`.
]
return[call[name[XLSXView], parameter[name[filename]]]] | keyword[def] identifier[fromxlsx] ( identifier[filename] , identifier[sheet] = keyword[None] , identifier[range_string] = keyword[None] , identifier[row_offset] = literal[int] ,
identifier[column_offset] = literal[int] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[XLSXView] ( identifier[filename] , identifier[sheet] = identifier[sheet] , identifier[range_string] = identifier[range_string] ,
identifier[row_offset] = identifier[row_offset] , identifier[column_offset] = identifier[column_offset] ,
** identifier[kwargs] ) | def fromxlsx(filename, sheet=None, range_string=None, row_offset=0, column_offset=0, **kwargs):
"""
Extract a table from a sheet in an Excel .xlsx file.
N.B., the sheet name is case sensitive.
The `sheet` argument can be omitted, in which case the first sheet in
the workbook is used by default.
The `range_string` argument can be used to provide a range string
specifying a range of cells to extract.
The `row_offset` and `column_offset` arguments can be used to
specify offsets.
Any other keyword arguments are passed through to
:func:`openpyxl.load_workbook()`.
"""
return XLSXView(filename, sheet=sheet, range_string=range_string, row_offset=row_offset, column_offset=column_offset, **kwargs) |
def _setFlag(self, name, val, defVal):
"""set the objects property propName if the dictKey key exists in dict and it is not the same as default value defVal"""
if not hasattr(self, "flags"):
self.flags = {}
if val != defVal:
self.flags[name] = val | def function[_setFlag, parameter[self, name, val, defVal]]:
constant[set the objects property propName if the dictKey key exists in dict and it is not the same as default value defVal]
if <ast.UnaryOp object at 0x7da1b26ac8b0> begin[:]
name[self].flags assign[=] dictionary[[], []]
if compare[name[val] not_equal[!=] name[defVal]] begin[:]
call[name[self].flags][name[name]] assign[=] name[val] | keyword[def] identifier[_setFlag] ( identifier[self] , identifier[name] , identifier[val] , identifier[defVal] ):
literal[string]
keyword[if] keyword[not] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[flags] ={}
keyword[if] identifier[val] != identifier[defVal] :
identifier[self] . identifier[flags] [ identifier[name] ]= identifier[val] | def _setFlag(self, name, val, defVal):
"""set the objects property propName if the dictKey key exists in dict and it is not the same as default value defVal"""
if not hasattr(self, 'flags'):
self.flags = {} # depends on [control=['if'], data=[]]
if val != defVal:
self.flags[name] = val # depends on [control=['if'], data=['val']] |
def load_transform_parameters(self):
"Cache the parameters necessary to transform x & y coordinates"
x_min, x_max, x_div = self.x_range()
y_min, y_max, y_div = self.y_range()
x_step = (float(self.graph_width) - self.font_size * 2) / \
(x_max - x_min)
y_step = (float(self.graph_height) - self.font_size * 2) / \
(y_max - y_min)
self.__transform_parameters = dict(locals())
del self.__transform_parameters['self'] | def function[load_transform_parameters, parameter[self]]:
constant[Cache the parameters necessary to transform x & y coordinates]
<ast.Tuple object at 0x7da1b0217040> assign[=] call[name[self].x_range, parameter[]]
<ast.Tuple object at 0x7da1b0215cf0> assign[=] call[name[self].y_range, parameter[]]
variable[x_step] assign[=] binary_operation[binary_operation[call[name[float], parameter[name[self].graph_width]] - binary_operation[name[self].font_size * constant[2]]] / binary_operation[name[x_max] - name[x_min]]]
variable[y_step] assign[=] binary_operation[binary_operation[call[name[float], parameter[name[self].graph_height]] - binary_operation[name[self].font_size * constant[2]]] / binary_operation[name[y_max] - name[y_min]]]
name[self].__transform_parameters assign[=] call[name[dict], parameter[call[name[locals], parameter[]]]]
<ast.Delete object at 0x7da1b0216620> | keyword[def] identifier[load_transform_parameters] ( identifier[self] ):
literal[string]
identifier[x_min] , identifier[x_max] , identifier[x_div] = identifier[self] . identifier[x_range] ()
identifier[y_min] , identifier[y_max] , identifier[y_div] = identifier[self] . identifier[y_range] ()
identifier[x_step] =( identifier[float] ( identifier[self] . identifier[graph_width] )- identifier[self] . identifier[font_size] * literal[int] )/( identifier[x_max] - identifier[x_min] )
identifier[y_step] =( identifier[float] ( identifier[self] . identifier[graph_height] )- identifier[self] . identifier[font_size] * literal[int] )/( identifier[y_max] - identifier[y_min] )
identifier[self] . identifier[__transform_parameters] = identifier[dict] ( identifier[locals] ())
keyword[del] identifier[self] . identifier[__transform_parameters] [ literal[string] ] | def load_transform_parameters(self):
"""Cache the parameters necessary to transform x & y coordinates"""
(x_min, x_max, x_div) = self.x_range()
(y_min, y_max, y_div) = self.y_range()
x_step = (float(self.graph_width) - self.font_size * 2) / (x_max - x_min)
y_step = (float(self.graph_height) - self.font_size * 2) / (y_max - y_min)
self.__transform_parameters = dict(locals())
del self.__transform_parameters['self'] |
def get_file_info(self, relativePath, name=None):
"""
get file information dict from the repository given its relative path and name.
:Parameters:
#. relativePath (string): The relative to the repository path of the directory where the file is.
#. name (string): The file name.
If None is given, name will be split from relativePath.
:Returns:
#. info (None, dictionary): The file information dictionary.
If None, it means an error has occurred.
#. errorMessage (string): The error message if any error occurred.
"""
# normalize relative path and name
relativePath = os.path.normpath(relativePath)
if relativePath == '.':
relativePath = ''
assert name != '.pyrepinfo', "'.pyrepinfo' can't be a file name."
if name is None:
assert len(relativePath), "name must be given when relative path is given as empty string or as a simple dot '.'"
relativePath,name = os.path.split(relativePath)
# initialize message
errorMessage = ""
# get directory info
dirInfoDict, errorMessage = self.get_directory_info(relativePath)
if dirInfoDict is None:
return None, errorMessage
# get file info
fileInfo = dict.__getitem__(dirInfoDict, "files").get(name, None)
if fileInfo is None:
errorMessage = "file %s does not exist in relative path '%s'"%(name, relativePath)
return fileInfo, errorMessage | def function[get_file_info, parameter[self, relativePath, name]]:
constant[
get file information dict from the repository given its relative path and name.
:Parameters:
#. relativePath (string): The relative to the repository path of the directory where the file is.
#. name (string): The file name.
If None is given, name will be split from relativePath.
:Returns:
#. info (None, dictionary): The file information dictionary.
If None, it means an error has occurred.
#. errorMessage (string): The error message if any error occurred.
]
variable[relativePath] assign[=] call[name[os].path.normpath, parameter[name[relativePath]]]
if compare[name[relativePath] equal[==] constant[.]] begin[:]
variable[relativePath] assign[=] constant[]
assert[compare[name[name] not_equal[!=] constant[.pyrepinfo]]]
if compare[name[name] is constant[None]] begin[:]
assert[call[name[len], parameter[name[relativePath]]]]
<ast.Tuple object at 0x7da20e9b0e80> assign[=] call[name[os].path.split, parameter[name[relativePath]]]
variable[errorMessage] assign[=] constant[]
<ast.Tuple object at 0x7da20e9b26e0> assign[=] call[name[self].get_directory_info, parameter[name[relativePath]]]
if compare[name[dirInfoDict] is constant[None]] begin[:]
return[tuple[[<ast.Constant object at 0x7da20e9b33a0>, <ast.Name object at 0x7da20e9b3c10>]]]
variable[fileInfo] assign[=] call[call[name[dict].__getitem__, parameter[name[dirInfoDict], constant[files]]].get, parameter[name[name], constant[None]]]
if compare[name[fileInfo] is constant[None]] begin[:]
variable[errorMessage] assign[=] binary_operation[constant[file %s does not exist in relative path '%s'] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da204564550>, <ast.Name object at 0x7da2045668c0>]]]
return[tuple[[<ast.Name object at 0x7da204565960>, <ast.Name object at 0x7da204565f60>]]] | keyword[def] identifier[get_file_info] ( identifier[self] , identifier[relativePath] , identifier[name] = keyword[None] ):
literal[string]
identifier[relativePath] = identifier[os] . identifier[path] . identifier[normpath] ( identifier[relativePath] )
keyword[if] identifier[relativePath] == literal[string] :
identifier[relativePath] = literal[string]
keyword[assert] identifier[name] != literal[string] , literal[string]
keyword[if] identifier[name] keyword[is] keyword[None] :
keyword[assert] identifier[len] ( identifier[relativePath] ), literal[string]
identifier[relativePath] , identifier[name] = identifier[os] . identifier[path] . identifier[split] ( identifier[relativePath] )
identifier[errorMessage] = literal[string]
identifier[dirInfoDict] , identifier[errorMessage] = identifier[self] . identifier[get_directory_info] ( identifier[relativePath] )
keyword[if] identifier[dirInfoDict] keyword[is] keyword[None] :
keyword[return] keyword[None] , identifier[errorMessage]
identifier[fileInfo] = identifier[dict] . identifier[__getitem__] ( identifier[dirInfoDict] , literal[string] ). identifier[get] ( identifier[name] , keyword[None] )
keyword[if] identifier[fileInfo] keyword[is] keyword[None] :
identifier[errorMessage] = literal[string] %( identifier[name] , identifier[relativePath] )
keyword[return] identifier[fileInfo] , identifier[errorMessage] | def get_file_info(self, relativePath, name=None):
"""
get file information dict from the repository given its relative path and name.
:Parameters:
#. relativePath (string): The relative to the repository path of the directory where the file is.
#. name (string): The file name.
If None is given, name will be split from relativePath.
:Returns:
#. info (None, dictionary): The file information dictionary.
If None, it means an error has occurred.
#. errorMessage (string): The error message if any error occurred.
"""
# normalize relative path and name
relativePath = os.path.normpath(relativePath)
if relativePath == '.':
relativePath = ''
assert name != '.pyrepinfo', "'.pyrepinfo' can't be a file name." # depends on [control=['if'], data=['relativePath']]
if name is None:
assert len(relativePath), "name must be given when relative path is given as empty string or as a simple dot '.'"
(relativePath, name) = os.path.split(relativePath) # depends on [control=['if'], data=['name']]
# initialize message
errorMessage = ''
# get directory info
(dirInfoDict, errorMessage) = self.get_directory_info(relativePath)
if dirInfoDict is None:
return (None, errorMessage) # depends on [control=['if'], data=[]]
# get file info
fileInfo = dict.__getitem__(dirInfoDict, 'files').get(name, None)
if fileInfo is None:
errorMessage = "file %s does not exist in relative path '%s'" % (name, relativePath) # depends on [control=['if'], data=[]]
return (fileInfo, errorMessage) |
def create_table(self):
'''
Hook point for overriding how the CounterPool creates a new table
in DynamooDB
'''
table = self.conn.create_table(
name=self.get_table_name(),
schema=self.get_schema(),
read_units=self.get_read_units(),
write_units=self.get_write_units(),
)
if table.status != 'ACTIVE':
table.refresh(wait_for_active=True, retry_seconds=1)
return table | def function[create_table, parameter[self]]:
constant[
Hook point for overriding how the CounterPool creates a new table
in DynamooDB
]
variable[table] assign[=] call[name[self].conn.create_table, parameter[]]
if compare[name[table].status not_equal[!=] constant[ACTIVE]] begin[:]
call[name[table].refresh, parameter[]]
return[name[table]] | keyword[def] identifier[create_table] ( identifier[self] ):
literal[string]
identifier[table] = identifier[self] . identifier[conn] . identifier[create_table] (
identifier[name] = identifier[self] . identifier[get_table_name] (),
identifier[schema] = identifier[self] . identifier[get_schema] (),
identifier[read_units] = identifier[self] . identifier[get_read_units] (),
identifier[write_units] = identifier[self] . identifier[get_write_units] (),
)
keyword[if] identifier[table] . identifier[status] != literal[string] :
identifier[table] . identifier[refresh] ( identifier[wait_for_active] = keyword[True] , identifier[retry_seconds] = literal[int] )
keyword[return] identifier[table] | def create_table(self):
"""
Hook point for overriding how the CounterPool creates a new table
in DynamooDB
"""
table = self.conn.create_table(name=self.get_table_name(), schema=self.get_schema(), read_units=self.get_read_units(), write_units=self.get_write_units())
if table.status != 'ACTIVE':
table.refresh(wait_for_active=True, retry_seconds=1) # depends on [control=['if'], data=[]]
return table |
def get(key, **kwargs):
'''
Gets details for a single, interpreted occurrence
:param key: [int] A GBIF occurrence key
:return: A dictionary, of results
Usage::
from pygbif import occurrences
occurrences.get(key = 1258202889)
occurrences.get(key = 1227768771)
occurrences.get(key = 1227769518)
'''
url = gbif_baseurl + 'occurrence/' + str(key)
out = gbif_GET(url, {}, **kwargs)
return out | def function[get, parameter[key]]:
constant[
Gets details for a single, interpreted occurrence
:param key: [int] A GBIF occurrence key
:return: A dictionary, of results
Usage::
from pygbif import occurrences
occurrences.get(key = 1258202889)
occurrences.get(key = 1227768771)
occurrences.get(key = 1227769518)
]
variable[url] assign[=] binary_operation[binary_operation[name[gbif_baseurl] + constant[occurrence/]] + call[name[str], parameter[name[key]]]]
variable[out] assign[=] call[name[gbif_GET], parameter[name[url], dictionary[[], []]]]
return[name[out]] | keyword[def] identifier[get] ( identifier[key] ,** identifier[kwargs] ):
literal[string]
identifier[url] = identifier[gbif_baseurl] + literal[string] + identifier[str] ( identifier[key] )
identifier[out] = identifier[gbif_GET] ( identifier[url] ,{},** identifier[kwargs] )
keyword[return] identifier[out] | def get(key, **kwargs):
"""
Gets details for a single, interpreted occurrence
:param key: [int] A GBIF occurrence key
:return: A dictionary, of results
Usage::
from pygbif import occurrences
occurrences.get(key = 1258202889)
occurrences.get(key = 1227768771)
occurrences.get(key = 1227769518)
"""
url = gbif_baseurl + 'occurrence/' + str(key)
out = gbif_GET(url, {}, **kwargs)
return out |
def weighted_random_choice(items):
"""
Returns a weighted random choice from a list of items.
:param items: A list of tuples (object, weight)
:return: A random object, whose likelihood is proportional to its weight.
"""
l = list(items)
r = random.random() * sum([i[1] for i in l])
for x, p in l:
if p > r:
return x
r -= p
return None | def function[weighted_random_choice, parameter[items]]:
constant[
Returns a weighted random choice from a list of items.
:param items: A list of tuples (object, weight)
:return: A random object, whose likelihood is proportional to its weight.
]
variable[l] assign[=] call[name[list], parameter[name[items]]]
variable[r] assign[=] binary_operation[call[name[random].random, parameter[]] * call[name[sum], parameter[<ast.ListComp object at 0x7da1b13462f0>]]]
for taget[tuple[[<ast.Name object at 0x7da1b1344880>, <ast.Name object at 0x7da1b13455d0>]]] in starred[name[l]] begin[:]
if compare[name[p] greater[>] name[r]] begin[:]
return[name[x]]
<ast.AugAssign object at 0x7da1b13927d0>
return[constant[None]] | keyword[def] identifier[weighted_random_choice] ( identifier[items] ):
literal[string]
identifier[l] = identifier[list] ( identifier[items] )
identifier[r] = identifier[random] . identifier[random] ()* identifier[sum] ([ identifier[i] [ literal[int] ] keyword[for] identifier[i] keyword[in] identifier[l] ])
keyword[for] identifier[x] , identifier[p] keyword[in] identifier[l] :
keyword[if] identifier[p] > identifier[r] :
keyword[return] identifier[x]
identifier[r] -= identifier[p]
keyword[return] keyword[None] | def weighted_random_choice(items):
"""
Returns a weighted random choice from a list of items.
:param items: A list of tuples (object, weight)
:return: A random object, whose likelihood is proportional to its weight.
"""
l = list(items)
r = random.random() * sum([i[1] for i in l])
for (x, p) in l:
if p > r:
return x # depends on [control=['if'], data=[]]
r -= p # depends on [control=['for'], data=[]]
return None |
def is_equal(self, other):
"""Equality checker with message
:param other: Other Impact Function to be compared.
:type other: ImpactFunction
:returns: True if both are the same IF, other wise False and the
message.
:rtype: bool, str
"""
properties = [
'debug_mode',
'use_rounding',
'requested_extent',
'crs',
'analysis_extent',
'datastore',
'name',
'title',
'start_datetime',
'end_datetime',
'duration',
'earthquake_function',
# 'performance_log', # I don't think need we need this one
'hazard',
'exposure',
'aggregation',
# Output layers on new IF object will have a different provenance
# data with the one from original IF.
# 'impact',
# 'exposure_summary',
# 'aggregate_hazard_impacted',
# 'aggregation_summary',
# 'analysis_impacted',
# 'exposure_summary_table',
'profiling',
]
for if_property in properties:
# Skip if it's debug mode for profiling
if self.debug_mode:
if if_property == 'profiling':
continue
try:
property_a = getattr(self, if_property)
property_b = getattr(other, if_property)
if not isinstance(property_a, type(property_b)):
message = (
'Different type of property %s.\nA: %s\nB: %s' % (
if_property, type(property_a), type(property_b)))
return False, message
if isinstance(property_a, QgsMapLayer):
if byteify(property_a.keywords) != byteify(
property_b.keywords):
message = (
'Keyword Layer is not equal is %s' % if_property)
return False, message
if isinstance(property_a, QgsVectorLayer):
fields_a = [f.name() for f in property_a.fields()]
fields_b = [f.name() for f in property_b.fields()]
if fields_a != fields_b:
message = (
'Layer fields is not equal for %s' %
if_property)
return False, message
if (property_a.featureCount()
!= property_b.featureCount()):
message = (
'Feature count is not equal for %s' %
if_property)
return False, message
elif isinstance(property_a, QgsGeometry):
if not property_a.equals(property_b):
string_a = property_a.asWkt()
string_b = property_b.asWkt()
message = (
'[Non Layer] The not equal property is %s.\n'
'A: %s\nB: %s' % (if_property, string_a, string_b))
return False, message
elif isinstance(property_a, DataStore):
if property_a.uri_path != property_b.uri_path:
string_a = property_a.uri_path
string_b = property_b.uri_path
message = (
'[Non Layer] The not equal property is %s.\n'
'A: %s\nB: %s' % (if_property, string_a, string_b))
return False, message
else:
if property_a != property_b:
string_a = property_a
string_b = property_b
message = (
'[Non Layer] The not equal property is %s.\n'
'A: %s\nB: %s' % (if_property, string_a, string_b))
return False, message
except AttributeError as e:
message = (
'Property %s is not found. The exception is %s' % (
if_property, e))
return False, message
except IndexError as e:
if if_property == 'impact':
continue
else:
message = (
'Property %s is out of index. The exception is %s' % (
if_property, e))
return False, message
except Exception as e:
message = (
'Error on %s with error message %s' % (if_property, e))
return False, message
return True, '' | def function[is_equal, parameter[self, other]]:
constant[Equality checker with message
:param other: Other Impact Function to be compared.
:type other: ImpactFunction
:returns: True if both are the same IF, other wise False and the
message.
:rtype: bool, str
]
variable[properties] assign[=] list[[<ast.Constant object at 0x7da1b0cbe830>, <ast.Constant object at 0x7da1b0cbe800>, <ast.Constant object at 0x7da1b0cbd060>, <ast.Constant object at 0x7da1b0cbd0c0>, <ast.Constant object at 0x7da1b0cbdae0>, <ast.Constant object at 0x7da1b0cbd9f0>, <ast.Constant object at 0x7da1b0cbd9c0>, <ast.Constant object at 0x7da1b0cbd990>, <ast.Constant object at 0x7da1b0cbd900>, <ast.Constant object at 0x7da1b0cbd8d0>, <ast.Constant object at 0x7da1b0cbd8a0>, <ast.Constant object at 0x7da1b0cbd870>, <ast.Constant object at 0x7da1b0cbd840>, <ast.Constant object at 0x7da1b0cbcbb0>, <ast.Constant object at 0x7da1b0cbcb50>, <ast.Constant object at 0x7da1b0cbd360>]]
for taget[name[if_property]] in starred[name[properties]] begin[:]
if name[self].debug_mode begin[:]
if compare[name[if_property] equal[==] constant[profiling]] begin[:]
continue
<ast.Try object at 0x7da1b0cbca00>
return[tuple[[<ast.Constant object at 0x7da1b0ca3be0>, <ast.Constant object at 0x7da1b0ca29b0>]]] | keyword[def] identifier[is_equal] ( identifier[self] , identifier[other] ):
literal[string]
identifier[properties] =[
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
]
keyword[for] identifier[if_property] keyword[in] identifier[properties] :
keyword[if] identifier[self] . identifier[debug_mode] :
keyword[if] identifier[if_property] == literal[string] :
keyword[continue]
keyword[try] :
identifier[property_a] = identifier[getattr] ( identifier[self] , identifier[if_property] )
identifier[property_b] = identifier[getattr] ( identifier[other] , identifier[if_property] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[property_a] , identifier[type] ( identifier[property_b] )):
identifier[message] =(
literal[string] %(
identifier[if_property] , identifier[type] ( identifier[property_a] ), identifier[type] ( identifier[property_b] )))
keyword[return] keyword[False] , identifier[message]
keyword[if] identifier[isinstance] ( identifier[property_a] , identifier[QgsMapLayer] ):
keyword[if] identifier[byteify] ( identifier[property_a] . identifier[keywords] )!= identifier[byteify] (
identifier[property_b] . identifier[keywords] ):
identifier[message] =(
literal[string] % identifier[if_property] )
keyword[return] keyword[False] , identifier[message]
keyword[if] identifier[isinstance] ( identifier[property_a] , identifier[QgsVectorLayer] ):
identifier[fields_a] =[ identifier[f] . identifier[name] () keyword[for] identifier[f] keyword[in] identifier[property_a] . identifier[fields] ()]
identifier[fields_b] =[ identifier[f] . identifier[name] () keyword[for] identifier[f] keyword[in] identifier[property_b] . identifier[fields] ()]
keyword[if] identifier[fields_a] != identifier[fields_b] :
identifier[message] =(
literal[string] %
identifier[if_property] )
keyword[return] keyword[False] , identifier[message]
keyword[if] ( identifier[property_a] . identifier[featureCount] ()
!= identifier[property_b] . identifier[featureCount] ()):
identifier[message] =(
literal[string] %
identifier[if_property] )
keyword[return] keyword[False] , identifier[message]
keyword[elif] identifier[isinstance] ( identifier[property_a] , identifier[QgsGeometry] ):
keyword[if] keyword[not] identifier[property_a] . identifier[equals] ( identifier[property_b] ):
identifier[string_a] = identifier[property_a] . identifier[asWkt] ()
identifier[string_b] = identifier[property_b] . identifier[asWkt] ()
identifier[message] =(
literal[string]
literal[string] %( identifier[if_property] , identifier[string_a] , identifier[string_b] ))
keyword[return] keyword[False] , identifier[message]
keyword[elif] identifier[isinstance] ( identifier[property_a] , identifier[DataStore] ):
keyword[if] identifier[property_a] . identifier[uri_path] != identifier[property_b] . identifier[uri_path] :
identifier[string_a] = identifier[property_a] . identifier[uri_path]
identifier[string_b] = identifier[property_b] . identifier[uri_path]
identifier[message] =(
literal[string]
literal[string] %( identifier[if_property] , identifier[string_a] , identifier[string_b] ))
keyword[return] keyword[False] , identifier[message]
keyword[else] :
keyword[if] identifier[property_a] != identifier[property_b] :
identifier[string_a] = identifier[property_a]
identifier[string_b] = identifier[property_b]
identifier[message] =(
literal[string]
literal[string] %( identifier[if_property] , identifier[string_a] , identifier[string_b] ))
keyword[return] keyword[False] , identifier[message]
keyword[except] identifier[AttributeError] keyword[as] identifier[e] :
identifier[message] =(
literal[string] %(
identifier[if_property] , identifier[e] ))
keyword[return] keyword[False] , identifier[message]
keyword[except] identifier[IndexError] keyword[as] identifier[e] :
keyword[if] identifier[if_property] == literal[string] :
keyword[continue]
keyword[else] :
identifier[message] =(
literal[string] %(
identifier[if_property] , identifier[e] ))
keyword[return] keyword[False] , identifier[message]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[message] =(
literal[string] %( identifier[if_property] , identifier[e] ))
keyword[return] keyword[False] , identifier[message]
keyword[return] keyword[True] , literal[string] | def is_equal(self, other):
"""Equality checker with message
:param other: Other Impact Function to be compared.
:type other: ImpactFunction
:returns: True if both are the same IF, other wise False and the
message.
:rtype: bool, str
"""
# 'performance_log', # I don't think need we need this one
# Output layers on new IF object will have a different provenance
# data with the one from original IF.
# 'impact',
# 'exposure_summary',
# 'aggregate_hazard_impacted',
# 'aggregation_summary',
# 'analysis_impacted',
# 'exposure_summary_table',
properties = ['debug_mode', 'use_rounding', 'requested_extent', 'crs', 'analysis_extent', 'datastore', 'name', 'title', 'start_datetime', 'end_datetime', 'duration', 'earthquake_function', 'hazard', 'exposure', 'aggregation', 'profiling']
for if_property in properties:
# Skip if it's debug mode for profiling
if self.debug_mode:
if if_property == 'profiling':
continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
try:
property_a = getattr(self, if_property)
property_b = getattr(other, if_property)
if not isinstance(property_a, type(property_b)):
message = 'Different type of property %s.\nA: %s\nB: %s' % (if_property, type(property_a), type(property_b))
return (False, message) # depends on [control=['if'], data=[]]
if isinstance(property_a, QgsMapLayer):
if byteify(property_a.keywords) != byteify(property_b.keywords):
message = 'Keyword Layer is not equal is %s' % if_property
return (False, message) # depends on [control=['if'], data=[]]
if isinstance(property_a, QgsVectorLayer):
fields_a = [f.name() for f in property_a.fields()]
fields_b = [f.name() for f in property_b.fields()]
if fields_a != fields_b:
message = 'Layer fields is not equal for %s' % if_property
return (False, message) # depends on [control=['if'], data=[]]
if property_a.featureCount() != property_b.featureCount():
message = 'Feature count is not equal for %s' % if_property
return (False, message) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif isinstance(property_a, QgsGeometry):
if not property_a.equals(property_b):
string_a = property_a.asWkt()
string_b = property_b.asWkt()
message = '[Non Layer] The not equal property is %s.\nA: %s\nB: %s' % (if_property, string_a, string_b)
return (False, message) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif isinstance(property_a, DataStore):
if property_a.uri_path != property_b.uri_path:
string_a = property_a.uri_path
string_b = property_b.uri_path
message = '[Non Layer] The not equal property is %s.\nA: %s\nB: %s' % (if_property, string_a, string_b)
return (False, message) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif property_a != property_b:
string_a = property_a
string_b = property_b
message = '[Non Layer] The not equal property is %s.\nA: %s\nB: %s' % (if_property, string_a, string_b)
return (False, message) # depends on [control=['if'], data=['property_a', 'property_b']] # depends on [control=['try'], data=[]]
except AttributeError as e:
message = 'Property %s is not found. The exception is %s' % (if_property, e)
return (False, message) # depends on [control=['except'], data=['e']]
except IndexError as e:
if if_property == 'impact':
continue # depends on [control=['if'], data=[]]
else:
message = 'Property %s is out of index. The exception is %s' % (if_property, e)
return (False, message) # depends on [control=['except'], data=['e']]
except Exception as e:
message = 'Error on %s with error message %s' % (if_property, e)
return (False, message) # depends on [control=['except'], data=['e']] # depends on [control=['for'], data=['if_property']]
return (True, '') |
def trigger_event(self, source, event, args):
"""
Trigger an event on the Entity
* \a source: The source of the event
* \a event: The event being triggered
* \a args: A list of arguments to pass to the callback
"""
actions = []
for action in event.actions:
if callable(action):
ac = action(self, *args)
if not ac:
# Handle falsy returns
continue
if not hasattr(ac, "__iter__"):
actions.append(ac)
else:
actions += action(self, *args)
else:
actions.append(action)
ret = source.game.trigger(self, actions, args)
if event.once:
self._events.remove(event)
return ret | def function[trigger_event, parameter[self, source, event, args]]:
constant[
Trigger an event on the Entity
* source: The source of the event
* event: The event being triggered
* args: A list of arguments to pass to the callback
]
variable[actions] assign[=] list[[]]
for taget[name[action]] in starred[name[event].actions] begin[:]
if call[name[callable], parameter[name[action]]] begin[:]
variable[ac] assign[=] call[name[action], parameter[name[self], <ast.Starred object at 0x7da1b088e5f0>]]
if <ast.UnaryOp object at 0x7da1b088f6a0> begin[:]
continue
if <ast.UnaryOp object at 0x7da1b088e7a0> begin[:]
call[name[actions].append, parameter[name[ac]]]
variable[ret] assign[=] call[name[source].game.trigger, parameter[name[self], name[actions], name[args]]]
if name[event].once begin[:]
call[name[self]._events.remove, parameter[name[event]]]
return[name[ret]] | keyword[def] identifier[trigger_event] ( identifier[self] , identifier[source] , identifier[event] , identifier[args] ):
literal[string]
identifier[actions] =[]
keyword[for] identifier[action] keyword[in] identifier[event] . identifier[actions] :
keyword[if] identifier[callable] ( identifier[action] ):
identifier[ac] = identifier[action] ( identifier[self] ,* identifier[args] )
keyword[if] keyword[not] identifier[ac] :
keyword[continue]
keyword[if] keyword[not] identifier[hasattr] ( identifier[ac] , literal[string] ):
identifier[actions] . identifier[append] ( identifier[ac] )
keyword[else] :
identifier[actions] += identifier[action] ( identifier[self] ,* identifier[args] )
keyword[else] :
identifier[actions] . identifier[append] ( identifier[action] )
identifier[ret] = identifier[source] . identifier[game] . identifier[trigger] ( identifier[self] , identifier[actions] , identifier[args] )
keyword[if] identifier[event] . identifier[once] :
identifier[self] . identifier[_events] . identifier[remove] ( identifier[event] )
keyword[return] identifier[ret] | def trigger_event(self, source, event, args):
"""
Trigger an event on the Entity
* \x07 source: The source of the event
* \x07 event: The event being triggered
* \x07 args: A list of arguments to pass to the callback
"""
actions = []
for action in event.actions:
if callable(action):
ac = action(self, *args)
if not ac: # Handle falsy returns
continue # depends on [control=['if'], data=[]]
if not hasattr(ac, '__iter__'):
actions.append(ac) # depends on [control=['if'], data=[]]
else:
actions += action(self, *args) # depends on [control=['if'], data=[]]
else:
actions.append(action) # depends on [control=['for'], data=['action']]
ret = source.game.trigger(self, actions, args)
if event.once:
self._events.remove(event) # depends on [control=['if'], data=[]]
return ret |
def _out(ins):
""" Translates OUT to asm.
"""
output = _8bit_oper(ins.quad[2])
output.extend(_16bit_oper(ins.quad[1]))
output.append('ld b, h')
output.append('ld c, l')
output.append('out (c), a')
return output | def function[_out, parameter[ins]]:
constant[ Translates OUT to asm.
]
variable[output] assign[=] call[name[_8bit_oper], parameter[call[name[ins].quad][constant[2]]]]
call[name[output].extend, parameter[call[name[_16bit_oper], parameter[call[name[ins].quad][constant[1]]]]]]
call[name[output].append, parameter[constant[ld b, h]]]
call[name[output].append, parameter[constant[ld c, l]]]
call[name[output].append, parameter[constant[out (c), a]]]
return[name[output]] | keyword[def] identifier[_out] ( identifier[ins] ):
literal[string]
identifier[output] = identifier[_8bit_oper] ( identifier[ins] . identifier[quad] [ literal[int] ])
identifier[output] . identifier[extend] ( identifier[_16bit_oper] ( identifier[ins] . identifier[quad] [ literal[int] ]))
identifier[output] . identifier[append] ( literal[string] )
identifier[output] . identifier[append] ( literal[string] )
identifier[output] . identifier[append] ( literal[string] )
keyword[return] identifier[output] | def _out(ins):
""" Translates OUT to asm.
"""
output = _8bit_oper(ins.quad[2])
output.extend(_16bit_oper(ins.quad[1]))
output.append('ld b, h')
output.append('ld c, l')
output.append('out (c), a')
return output |
def element_wise_op(array, other, op, ty):
"""
Operation of series and other, element-wise (binary operator add)
Args:
array (WeldObject / Numpy.ndarray): Input array
other (WeldObject / Numpy.ndarray): Second Input array
op (str): Op string used to compute element-wise operation (+ / *)
ty (WeldType): Type of each element in the input array
Returns:
A WeldObject representing this computation
"""
weld_obj = WeldObject(encoder_, decoder_)
array_var = weld_obj.update(array)
if isinstance(array, WeldObject):
array_var = array.obj_id
weld_obj.dependencies[array_var] = array
other_var = weld_obj.update(other)
if isinstance(other, WeldObject):
other_var = other.obj_id
weld_obj.dependencies[other_var] = other
weld_template = """
map(
zip(%(array)s, %(other)s),
|a| a.$0 %(op)s a.$1
)
"""
weld_obj.weld_code = weld_template % {"array": array_var,
"other": other_var,
"ty": ty, "op": op}
return weld_obj | def function[element_wise_op, parameter[array, other, op, ty]]:
constant[
Operation of series and other, element-wise (binary operator add)
Args:
array (WeldObject / Numpy.ndarray): Input array
other (WeldObject / Numpy.ndarray): Second Input array
op (str): Op string used to compute element-wise operation (+ / *)
ty (WeldType): Type of each element in the input array
Returns:
A WeldObject representing this computation
]
variable[weld_obj] assign[=] call[name[WeldObject], parameter[name[encoder_], name[decoder_]]]
variable[array_var] assign[=] call[name[weld_obj].update, parameter[name[array]]]
if call[name[isinstance], parameter[name[array], name[WeldObject]]] begin[:]
variable[array_var] assign[=] name[array].obj_id
call[name[weld_obj].dependencies][name[array_var]] assign[=] name[array]
variable[other_var] assign[=] call[name[weld_obj].update, parameter[name[other]]]
if call[name[isinstance], parameter[name[other], name[WeldObject]]] begin[:]
variable[other_var] assign[=] name[other].obj_id
call[name[weld_obj].dependencies][name[other_var]] assign[=] name[other]
variable[weld_template] assign[=] constant[
map(
zip(%(array)s, %(other)s),
|a| a.$0 %(op)s a.$1
)
]
name[weld_obj].weld_code assign[=] binary_operation[name[weld_template] <ast.Mod object at 0x7da2590d6920> dictionary[[<ast.Constant object at 0x7da20c6abb20>, <ast.Constant object at 0x7da20c6a9b10>, <ast.Constant object at 0x7da20c6aba30>, <ast.Constant object at 0x7da20c6a9ba0>], [<ast.Name object at 0x7da20c6aad10>, <ast.Name object at 0x7da20c6aac50>, <ast.Name object at 0x7da20c6ab9a0>, <ast.Name object at 0x7da20c6aa8f0>]]]
return[name[weld_obj]] | keyword[def] identifier[element_wise_op] ( identifier[array] , identifier[other] , identifier[op] , identifier[ty] ):
literal[string]
identifier[weld_obj] = identifier[WeldObject] ( identifier[encoder_] , identifier[decoder_] )
identifier[array_var] = identifier[weld_obj] . identifier[update] ( identifier[array] )
keyword[if] identifier[isinstance] ( identifier[array] , identifier[WeldObject] ):
identifier[array_var] = identifier[array] . identifier[obj_id]
identifier[weld_obj] . identifier[dependencies] [ identifier[array_var] ]= identifier[array]
identifier[other_var] = identifier[weld_obj] . identifier[update] ( identifier[other] )
keyword[if] identifier[isinstance] ( identifier[other] , identifier[WeldObject] ):
identifier[other_var] = identifier[other] . identifier[obj_id]
identifier[weld_obj] . identifier[dependencies] [ identifier[other_var] ]= identifier[other]
identifier[weld_template] = literal[string]
identifier[weld_obj] . identifier[weld_code] = identifier[weld_template] %{ literal[string] : identifier[array_var] ,
literal[string] : identifier[other_var] ,
literal[string] : identifier[ty] , literal[string] : identifier[op] }
keyword[return] identifier[weld_obj] | def element_wise_op(array, other, op, ty):
"""
Operation of series and other, element-wise (binary operator add)
Args:
array (WeldObject / Numpy.ndarray): Input array
other (WeldObject / Numpy.ndarray): Second Input array
op (str): Op string used to compute element-wise operation (+ / *)
ty (WeldType): Type of each element in the input array
Returns:
A WeldObject representing this computation
"""
weld_obj = WeldObject(encoder_, decoder_)
array_var = weld_obj.update(array)
if isinstance(array, WeldObject):
array_var = array.obj_id
weld_obj.dependencies[array_var] = array # depends on [control=['if'], data=[]]
other_var = weld_obj.update(other)
if isinstance(other, WeldObject):
other_var = other.obj_id
weld_obj.dependencies[other_var] = other # depends on [control=['if'], data=[]]
weld_template = '\n map(\n zip(%(array)s, %(other)s),\n |a| a.$0 %(op)s a.$1\n )\n '
weld_obj.weld_code = weld_template % {'array': array_var, 'other': other_var, 'ty': ty, 'op': op}
return weld_obj |
def _normalize_string(raw_str):
"""Normalizes the string using tokenizer.encode.
Args:
raw_str: the input string
Returns:
A string which is ready to be tokenized using split()
"""
return " ".join(
token.strip()
for token in tokenizer.encode(text_encoder.native_to_unicode(raw_str))) | def function[_normalize_string, parameter[raw_str]]:
constant[Normalizes the string using tokenizer.encode.
Args:
raw_str: the input string
Returns:
A string which is ready to be tokenized using split()
]
return[call[constant[ ].join, parameter[<ast.GeneratorExp object at 0x7da20cabd8a0>]]] | keyword[def] identifier[_normalize_string] ( identifier[raw_str] ):
literal[string]
keyword[return] literal[string] . identifier[join] (
identifier[token] . identifier[strip] ()
keyword[for] identifier[token] keyword[in] identifier[tokenizer] . identifier[encode] ( identifier[text_encoder] . identifier[native_to_unicode] ( identifier[raw_str] ))) | def _normalize_string(raw_str):
"""Normalizes the string using tokenizer.encode.
Args:
raw_str: the input string
Returns:
A string which is ready to be tokenized using split()
"""
return ' '.join((token.strip() for token in tokenizer.encode(text_encoder.native_to_unicode(raw_str)))) |
def delete(self, source):
'''Thread worker for download operation.'''
s3url = S3URL(source)
message('Delete %s', source)
if not self.opt.dry_run:
self.s3.delete_object(Bucket=s3url.bucket, Key=s3url.path) | def function[delete, parameter[self, source]]:
constant[Thread worker for download operation.]
variable[s3url] assign[=] call[name[S3URL], parameter[name[source]]]
call[name[message], parameter[constant[Delete %s], name[source]]]
if <ast.UnaryOp object at 0x7da1b0247b50> begin[:]
call[name[self].s3.delete_object, parameter[]] | keyword[def] identifier[delete] ( identifier[self] , identifier[source] ):
literal[string]
identifier[s3url] = identifier[S3URL] ( identifier[source] )
identifier[message] ( literal[string] , identifier[source] )
keyword[if] keyword[not] identifier[self] . identifier[opt] . identifier[dry_run] :
identifier[self] . identifier[s3] . identifier[delete_object] ( identifier[Bucket] = identifier[s3url] . identifier[bucket] , identifier[Key] = identifier[s3url] . identifier[path] ) | def delete(self, source):
"""Thread worker for download operation."""
s3url = S3URL(source)
message('Delete %s', source)
if not self.opt.dry_run:
self.s3.delete_object(Bucket=s3url.bucket, Key=s3url.path) # depends on [control=['if'], data=[]] |
def map(self, path):
"""Map `path` through the aliases.
`path` is checked against all of the patterns. The first pattern to
match is used to replace the root of the path with the result root.
Only one pattern is ever used. If no patterns match, `path` is
returned unchanged.
The separator style in the result is made to match that of the result
in the alias.
"""
for regex, result, pattern_sep, result_sep in self.aliases:
m = regex.match(path)
if m:
new = path.replace(m.group(0), result)
if pattern_sep != result_sep:
new = new.replace(pattern_sep, result_sep)
if self.locator:
new = self.locator.canonical_filename(new)
return new
return path | def function[map, parameter[self, path]]:
constant[Map `path` through the aliases.
`path` is checked against all of the patterns. The first pattern to
match is used to replace the root of the path with the result root.
Only one pattern is ever used. If no patterns match, `path` is
returned unchanged.
The separator style in the result is made to match that of the result
in the alias.
]
for taget[tuple[[<ast.Name object at 0x7da18fe91d80>, <ast.Name object at 0x7da18fe90cd0>, <ast.Name object at 0x7da18fe93fd0>, <ast.Name object at 0x7da18fe93970>]]] in starred[name[self].aliases] begin[:]
variable[m] assign[=] call[name[regex].match, parameter[name[path]]]
if name[m] begin[:]
variable[new] assign[=] call[name[path].replace, parameter[call[name[m].group, parameter[constant[0]]], name[result]]]
if compare[name[pattern_sep] not_equal[!=] name[result_sep]] begin[:]
variable[new] assign[=] call[name[new].replace, parameter[name[pattern_sep], name[result_sep]]]
if name[self].locator begin[:]
variable[new] assign[=] call[name[self].locator.canonical_filename, parameter[name[new]]]
return[name[new]]
return[name[path]] | keyword[def] identifier[map] ( identifier[self] , identifier[path] ):
literal[string]
keyword[for] identifier[regex] , identifier[result] , identifier[pattern_sep] , identifier[result_sep] keyword[in] identifier[self] . identifier[aliases] :
identifier[m] = identifier[regex] . identifier[match] ( identifier[path] )
keyword[if] identifier[m] :
identifier[new] = identifier[path] . identifier[replace] ( identifier[m] . identifier[group] ( literal[int] ), identifier[result] )
keyword[if] identifier[pattern_sep] != identifier[result_sep] :
identifier[new] = identifier[new] . identifier[replace] ( identifier[pattern_sep] , identifier[result_sep] )
keyword[if] identifier[self] . identifier[locator] :
identifier[new] = identifier[self] . identifier[locator] . identifier[canonical_filename] ( identifier[new] )
keyword[return] identifier[new]
keyword[return] identifier[path] | def map(self, path):
"""Map `path` through the aliases.
`path` is checked against all of the patterns. The first pattern to
match is used to replace the root of the path with the result root.
Only one pattern is ever used. If no patterns match, `path` is
returned unchanged.
The separator style in the result is made to match that of the result
in the alias.
"""
for (regex, result, pattern_sep, result_sep) in self.aliases:
m = regex.match(path)
if m:
new = path.replace(m.group(0), result)
if pattern_sep != result_sep:
new = new.replace(pattern_sep, result_sep) # depends on [control=['if'], data=['pattern_sep', 'result_sep']]
if self.locator:
new = self.locator.canonical_filename(new) # depends on [control=['if'], data=[]]
return new # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return path |
def infer(self, x, **fit_params):
"""Perform a single inference step on a batch of data.
Parameters
----------
x : input data
A batch of the input data.
**fit_params : dict
Additional parameters passed to the ``forward`` method of
the module and to the ``self.train_split`` call.
"""
x = to_tensor(x, device=self.device)
if isinstance(x, dict):
x_dict = self._merge_x_and_fit_params(x, fit_params)
return self.module_(**x_dict)
return self.module_(x, **fit_params) | def function[infer, parameter[self, x]]:
constant[Perform a single inference step on a batch of data.
Parameters
----------
x : input data
A batch of the input data.
**fit_params : dict
Additional parameters passed to the ``forward`` method of
the module and to the ``self.train_split`` call.
]
variable[x] assign[=] call[name[to_tensor], parameter[name[x]]]
if call[name[isinstance], parameter[name[x], name[dict]]] begin[:]
variable[x_dict] assign[=] call[name[self]._merge_x_and_fit_params, parameter[name[x], name[fit_params]]]
return[call[name[self].module_, parameter[]]]
return[call[name[self].module_, parameter[name[x]]]] | keyword[def] identifier[infer] ( identifier[self] , identifier[x] ,** identifier[fit_params] ):
literal[string]
identifier[x] = identifier[to_tensor] ( identifier[x] , identifier[device] = identifier[self] . identifier[device] )
keyword[if] identifier[isinstance] ( identifier[x] , identifier[dict] ):
identifier[x_dict] = identifier[self] . identifier[_merge_x_and_fit_params] ( identifier[x] , identifier[fit_params] )
keyword[return] identifier[self] . identifier[module_] (** identifier[x_dict] )
keyword[return] identifier[self] . identifier[module_] ( identifier[x] ,** identifier[fit_params] ) | def infer(self, x, **fit_params):
"""Perform a single inference step on a batch of data.
Parameters
----------
x : input data
A batch of the input data.
**fit_params : dict
Additional parameters passed to the ``forward`` method of
the module and to the ``self.train_split`` call.
"""
x = to_tensor(x, device=self.device)
if isinstance(x, dict):
x_dict = self._merge_x_and_fit_params(x, fit_params)
return self.module_(**x_dict) # depends on [control=['if'], data=[]]
return self.module_(x, **fit_params) |
def PTEST(cpu, dest, src):
""" PTEST
PTEST set the ZF flag if all bits in the result are 0 of the bitwise AND
of the first source operand (first operand) and the second source operand
(second operand). Also this sets the CF flag if all bits in the result
are 0 of the bitwise AND of the second source operand (second operand)
and the logical NOT of the destination operand.
"""
cpu.OF = False
cpu.AF = False
cpu.PF = False
cpu.SF = False
cpu.ZF = (Operators.EXTRACT(dest.read(), 0, 128) & Operators.EXTRACT(src.read(), 0, 128)) == 0
cpu.CF = (Operators.EXTRACT(src.read(), 0, 128) & ~(Operators.EXTRACT(dest.read(), 0, 128))) == 0 | def function[PTEST, parameter[cpu, dest, src]]:
constant[ PTEST
PTEST set the ZF flag if all bits in the result are 0 of the bitwise AND
of the first source operand (first operand) and the second source operand
(second operand). Also this sets the CF flag if all bits in the result
are 0 of the bitwise AND of the second source operand (second operand)
and the logical NOT of the destination operand.
]
name[cpu].OF assign[=] constant[False]
name[cpu].AF assign[=] constant[False]
name[cpu].PF assign[=] constant[False]
name[cpu].SF assign[=] constant[False]
name[cpu].ZF assign[=] compare[binary_operation[call[name[Operators].EXTRACT, parameter[call[name[dest].read, parameter[]], constant[0], constant[128]]] <ast.BitAnd object at 0x7da2590d6b60> call[name[Operators].EXTRACT, parameter[call[name[src].read, parameter[]], constant[0], constant[128]]]] equal[==] constant[0]]
name[cpu].CF assign[=] compare[binary_operation[call[name[Operators].EXTRACT, parameter[call[name[src].read, parameter[]], constant[0], constant[128]]] <ast.BitAnd object at 0x7da2590d6b60> <ast.UnaryOp object at 0x7da18fe90970>] equal[==] constant[0]] | keyword[def] identifier[PTEST] ( identifier[cpu] , identifier[dest] , identifier[src] ):
literal[string]
identifier[cpu] . identifier[OF] = keyword[False]
identifier[cpu] . identifier[AF] = keyword[False]
identifier[cpu] . identifier[PF] = keyword[False]
identifier[cpu] . identifier[SF] = keyword[False]
identifier[cpu] . identifier[ZF] =( identifier[Operators] . identifier[EXTRACT] ( identifier[dest] . identifier[read] (), literal[int] , literal[int] )& identifier[Operators] . identifier[EXTRACT] ( identifier[src] . identifier[read] (), literal[int] , literal[int] ))== literal[int]
identifier[cpu] . identifier[CF] =( identifier[Operators] . identifier[EXTRACT] ( identifier[src] . identifier[read] (), literal[int] , literal[int] )&~( identifier[Operators] . identifier[EXTRACT] ( identifier[dest] . identifier[read] (), literal[int] , literal[int] )))== literal[int] | def PTEST(cpu, dest, src):
""" PTEST
PTEST set the ZF flag if all bits in the result are 0 of the bitwise AND
of the first source operand (first operand) and the second source operand
(second operand). Also this sets the CF flag if all bits in the result
are 0 of the bitwise AND of the second source operand (second operand)
and the logical NOT of the destination operand.
"""
cpu.OF = False
cpu.AF = False
cpu.PF = False
cpu.SF = False
cpu.ZF = Operators.EXTRACT(dest.read(), 0, 128) & Operators.EXTRACT(src.read(), 0, 128) == 0
cpu.CF = Operators.EXTRACT(src.read(), 0, 128) & ~Operators.EXTRACT(dest.read(), 0, 128) == 0 |
def expire(self, time=None):
"""Remove expired items from the cache."""
if time is None:
time = self.__timer()
root = self.__root
curr = root.next
links = self.__links
cache_delitem = Cache.__delitem__
while curr is not root and curr.expire < time:
cache_delitem(self, curr.key)
del links[curr.key]
next = curr.next
curr.unlink()
curr = next | def function[expire, parameter[self, time]]:
constant[Remove expired items from the cache.]
if compare[name[time] is constant[None]] begin[:]
variable[time] assign[=] call[name[self].__timer, parameter[]]
variable[root] assign[=] name[self].__root
variable[curr] assign[=] name[root].next
variable[links] assign[=] name[self].__links
variable[cache_delitem] assign[=] name[Cache].__delitem__
while <ast.BoolOp object at 0x7da20e9548e0> begin[:]
call[name[cache_delitem], parameter[name[self], name[curr].key]]
<ast.Delete object at 0x7da20e9556c0>
variable[next] assign[=] name[curr].next
call[name[curr].unlink, parameter[]]
variable[curr] assign[=] name[next] | keyword[def] identifier[expire] ( identifier[self] , identifier[time] = keyword[None] ):
literal[string]
keyword[if] identifier[time] keyword[is] keyword[None] :
identifier[time] = identifier[self] . identifier[__timer] ()
identifier[root] = identifier[self] . identifier[__root]
identifier[curr] = identifier[root] . identifier[next]
identifier[links] = identifier[self] . identifier[__links]
identifier[cache_delitem] = identifier[Cache] . identifier[__delitem__]
keyword[while] identifier[curr] keyword[is] keyword[not] identifier[root] keyword[and] identifier[curr] . identifier[expire] < identifier[time] :
identifier[cache_delitem] ( identifier[self] , identifier[curr] . identifier[key] )
keyword[del] identifier[links] [ identifier[curr] . identifier[key] ]
identifier[next] = identifier[curr] . identifier[next]
identifier[curr] . identifier[unlink] ()
identifier[curr] = identifier[next] | def expire(self, time=None):
"""Remove expired items from the cache."""
if time is None:
time = self.__timer() # depends on [control=['if'], data=['time']]
root = self.__root
curr = root.next
links = self.__links
cache_delitem = Cache.__delitem__
while curr is not root and curr.expire < time:
cache_delitem(self, curr.key)
del links[curr.key]
next = curr.next
curr.unlink()
curr = next # depends on [control=['while'], data=[]] |
def _unique_by_email(users_and_watches):
"""Given a sequence of (User/EmailUser, [Watch, ...]) pairs
clustered by email address (which is never ''), yield from each
cluster a single pair like this::
(User/EmailUser, [Watch, Watch, ...]).
The User/Email is that of...
(1) the first incoming pair where the User has an email and is not
anonymous, or, if there isn't such a user...
(2) the first pair.
The list of Watches consists of all those found in the cluster.
Compares email addresses case-insensitively.
"""
def ensure_user_has_email(user, cluster_email):
"""Make sure the user in the user-watch pair has an email address.
The caller guarantees us an email from either the user or the watch. If
the passed-in user has no email, we return an EmailUser instead having
the email address from the watch.
"""
# Some of these cases shouldn't happen, but we're tolerant.
if not getattr(user, 'email', ''):
user = EmailUser(cluster_email)
return user
# TODO: Do this instead with clever SQL that somehow returns just the
# best row for each email.
cluster_email = '' # email of current cluster
favorite_user = None # best user in cluster so far
watches = [] # all watches in cluster
for u, w in users_and_watches:
# w always has at least 1 Watch. All the emails are the same.
row_email = u.email or w[0].email
if cluster_email.lower() != row_email.lower():
# Starting a new cluster.
if cluster_email != '':
# Ship the favorites from the previous cluster:
yield (ensure_user_has_email(favorite_user, cluster_email),
watches)
favorite_user, watches = u, []
cluster_email = row_email
elif ((not favorite_user.email or not u.is_authenticated) and
u.email and u.is_authenticated):
favorite_user = u
watches.extend(w)
if favorite_user is not None:
yield ensure_user_has_email(favorite_user, cluster_email), watches | def function[_unique_by_email, parameter[users_and_watches]]:
constant[Given a sequence of (User/EmailUser, [Watch, ...]) pairs
clustered by email address (which is never ''), yield from each
cluster a single pair like this::
(User/EmailUser, [Watch, Watch, ...]).
The User/Email is that of...
(1) the first incoming pair where the User has an email and is not
anonymous, or, if there isn't such a user...
(2) the first pair.
The list of Watches consists of all those found in the cluster.
Compares email addresses case-insensitively.
]
def function[ensure_user_has_email, parameter[user, cluster_email]]:
constant[Make sure the user in the user-watch pair has an email address.
The caller guarantees us an email from either the user or the watch. If
the passed-in user has no email, we return an EmailUser instead having
the email address from the watch.
]
if <ast.UnaryOp object at 0x7da1b0534c10> begin[:]
variable[user] assign[=] call[name[EmailUser], parameter[name[cluster_email]]]
return[name[user]]
variable[cluster_email] assign[=] constant[]
variable[favorite_user] assign[=] constant[None]
variable[watches] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b0535cf0>, <ast.Name object at 0x7da1b0534760>]]] in starred[name[users_and_watches]] begin[:]
variable[row_email] assign[=] <ast.BoolOp object at 0x7da1b0535d50>
if compare[call[name[cluster_email].lower, parameter[]] not_equal[!=] call[name[row_email].lower, parameter[]]] begin[:]
if compare[name[cluster_email] not_equal[!=] constant[]] begin[:]
<ast.Yield object at 0x7da1b0534b20>
<ast.Tuple object at 0x7da1b0534a00> assign[=] tuple[[<ast.Name object at 0x7da1b05359f0>, <ast.List object at 0x7da1b0535d80>]]
variable[cluster_email] assign[=] name[row_email]
call[name[watches].extend, parameter[name[w]]]
if compare[name[favorite_user] is_not constant[None]] begin[:]
<ast.Yield object at 0x7da1b04e1960> | keyword[def] identifier[_unique_by_email] ( identifier[users_and_watches] ):
literal[string]
keyword[def] identifier[ensure_user_has_email] ( identifier[user] , identifier[cluster_email] ):
literal[string]
keyword[if] keyword[not] identifier[getattr] ( identifier[user] , literal[string] , literal[string] ):
identifier[user] = identifier[EmailUser] ( identifier[cluster_email] )
keyword[return] identifier[user]
identifier[cluster_email] = literal[string]
identifier[favorite_user] = keyword[None]
identifier[watches] =[]
keyword[for] identifier[u] , identifier[w] keyword[in] identifier[users_and_watches] :
identifier[row_email] = identifier[u] . identifier[email] keyword[or] identifier[w] [ literal[int] ]. identifier[email]
keyword[if] identifier[cluster_email] . identifier[lower] ()!= identifier[row_email] . identifier[lower] ():
keyword[if] identifier[cluster_email] != literal[string] :
keyword[yield] ( identifier[ensure_user_has_email] ( identifier[favorite_user] , identifier[cluster_email] ),
identifier[watches] )
identifier[favorite_user] , identifier[watches] = identifier[u] ,[]
identifier[cluster_email] = identifier[row_email]
keyword[elif] (( keyword[not] identifier[favorite_user] . identifier[email] keyword[or] keyword[not] identifier[u] . identifier[is_authenticated] ) keyword[and]
identifier[u] . identifier[email] keyword[and] identifier[u] . identifier[is_authenticated] ):
identifier[favorite_user] = identifier[u]
identifier[watches] . identifier[extend] ( identifier[w] )
keyword[if] identifier[favorite_user] keyword[is] keyword[not] keyword[None] :
keyword[yield] identifier[ensure_user_has_email] ( identifier[favorite_user] , identifier[cluster_email] ), identifier[watches] | def _unique_by_email(users_and_watches):
"""Given a sequence of (User/EmailUser, [Watch, ...]) pairs
clustered by email address (which is never ''), yield from each
cluster a single pair like this::
(User/EmailUser, [Watch, Watch, ...]).
The User/Email is that of...
(1) the first incoming pair where the User has an email and is not
anonymous, or, if there isn't such a user...
(2) the first pair.
The list of Watches consists of all those found in the cluster.
Compares email addresses case-insensitively.
"""
def ensure_user_has_email(user, cluster_email):
"""Make sure the user in the user-watch pair has an email address.
The caller guarantees us an email from either the user or the watch. If
the passed-in user has no email, we return an EmailUser instead having
the email address from the watch.
"""
# Some of these cases shouldn't happen, but we're tolerant.
if not getattr(user, 'email', ''):
user = EmailUser(cluster_email) # depends on [control=['if'], data=[]]
return user
# TODO: Do this instead with clever SQL that somehow returns just the
# best row for each email.
cluster_email = '' # email of current cluster
favorite_user = None # best user in cluster so far
watches = [] # all watches in cluster
for (u, w) in users_and_watches:
# w always has at least 1 Watch. All the emails are the same.
row_email = u.email or w[0].email
if cluster_email.lower() != row_email.lower():
# Starting a new cluster.
if cluster_email != '':
# Ship the favorites from the previous cluster:
yield (ensure_user_has_email(favorite_user, cluster_email), watches) # depends on [control=['if'], data=['cluster_email']]
(favorite_user, watches) = (u, [])
cluster_email = row_email # depends on [control=['if'], data=[]]
elif (not favorite_user.email or not u.is_authenticated) and u.email and u.is_authenticated:
favorite_user = u # depends on [control=['if'], data=[]]
watches.extend(w) # depends on [control=['for'], data=[]]
if favorite_user is not None:
yield (ensure_user_has_email(favorite_user, cluster_email), watches) # depends on [control=['if'], data=['favorite_user']] |
def connect(components, connections, force_SLH=False, expand_simplify=True):
"""Connect a list of components according to a list of connections.
Args:
components (list): List of Circuit instances
connections (list): List of pairs ``((c1, port1), (c2, port2))`` where
``c1`` and ``c2`` are elements of `components` (or the index of the
element in `components`), and ``port1`` and ``port2`` are the
indices (or port names) of the ports of the two components that
should be connected
force_SLH (bool): If True, convert the result to an SLH object
expand_simplify (bool): If the result is an SLH object, expand and
simplify the circuit after each feedback connection is added
Example:
>>> A = CircuitSymbol('A', cdim=2)
>>> B = CircuitSymbol('B', cdim=2)
>>> BS = Beamsplitter()
>>> circuit = connect(
... components=[A, B, BS],
... connections=[
... ((A, 0), (BS, 'in')),
... ((BS, 'tr'), (B, 0)),
... ((A, 1), (B, 1))])
>>> print(unicode(circuit).replace('cid(1)', '1'))
(B ⊞ 1) ◁ Perm(0, 2, 1) ◁ (BS(π/4) ⊞ 1) ◁ Perm(0, 2, 1) ◁ (A ⊞ 1)
The above example corresponds to the circuit diagram::
┌─┐ ┌───────┐ ┌─┐
>┤ ├───>┤ ├───>┤ ├
│A│ │BS(π/4)│ │B│
>┤ ├┐┌─>┤ ├┐┌─>┤ ├
└─┘└│ └───────┘└│ └─┘
│┐ │┐
─────┘└───────────┘└────
Raises:
ValueError: if `connections` includes any invalid entries
Note:
The list of `components` may contain duplicate entries, but in this
case you must use a positional index in `connections` to refer to any
duplicate component. Alternatively, use unique components by defining
different labels.
"""
combined = Concatenation.create(*components)
cdims = [c.cdim for c in components]
offsets = _cumsum([0] + cdims[:-1])
imap = []
omap = []
counts = defaultdict(int)
for component in components:
counts[component] += 1
for (ic, ((c1, op), (c2, ip))) in enumerate(connections):
# check c1; convert to index int
if not isinstance(c1, int):
if counts[c1] > 1:
raise ValueError(
"Component %s appears more than once in list of "
"components %r. You must reference it by index in the "
"connection %r" % (c1, components, connections[ic]))
try:
c1 = components.index(c1)
except ValueError:
raise ValueError(
"The component %s in connection %r is not in the list of "
"components %r" % (c1, connections[ic], components))
else:
if c1 < 0 or c1 >= len(components):
raise ValueError(
"Invalid index %d in connection %r"
% (c1, connections[ic]))
# check c2; convert to index int
if not isinstance(c2, int):
if counts[c2] > 1:
raise ValueError(
"Component %s appears more than once in list of "
"components %r. You must reference it by index in the "
"connection %r" % (c2, components, connections[ic]))
try:
c2 = components.index(c2)
except ValueError:
raise ValueError(
"The component %s in connection %r is not in the list of "
"components %r" % (c2, connections[ic], components))
else:
if c2 < 0 or c2 >= len(components):
raise ValueError(
"Invalid index %d in connection %r"
% (c2, connections[ic]))
# check op; convert to index int
if not (isinstance(op, int)):
try:
op = components[c1].PORTSOUT.index(op)
except AttributeError:
raise ValueError(
"The component %s does not define PORTSOUT labels. "
"You cannot use the string %r to refer to a port"
% (components[c1], op))
except ValueError:
raise ValueError(
"The connection %r refers to an invalid output "
"channel %s for component %r"
% (connections[ic], op, components[c1]))
else:
if op < 0 or op >= components[c1].cdim:
raise ValueError(
"Invalid output channel %d <0 or >=%d (cdim of %r) in %r"
% (op, components[c1].cdim, components[c1],
connections[ic]))
# check ip; convert to index int
if not (isinstance(ip, int)):
try:
ip = components[c2].PORTSIN.index(ip)
except AttributeError:
raise ValueError(
"The component %s does not define PORTSIN labels. "
"You cannot use the string %r to refer to a port"
% (components[c2], ip))
except ValueError:
raise ValueError(
"The connection %r refers to an invalid input channel "
"%s for component %r"
% (connections[ic], ip, components[c2]))
else:
if ip < 0 or ip >= components[c2].cdim:
raise ValueError(
"Invalid input channel %d <0 or >=%d (cdim of %r) in %r"
% (ip, components[c2].cdim, components[c2],
connections[ic]))
op_idx = offsets[c1] + op
ip_idx = offsets[c2] + ip
imap.append(ip_idx)
omap.append(op_idx)
n = combined.cdim
nfb = len(connections)
imapping = map_channels(
{k: im for (k, im) in zip(range(n-nfb, n), imap)},
n)
omapping = map_channels(
{om: k for (k, om) in zip(range(n-nfb, n), omap)},
n)
combined = omapping << combined << imapping
if force_SLH:
combined = combined.toSLH()
for k in range(nfb):
combined = combined.feedback()
if isinstance(combined, SLH) and expand_simplify:
combined = combined.expand().simplify_scalar()
return combined | def function[connect, parameter[components, connections, force_SLH, expand_simplify]]:
constant[Connect a list of components according to a list of connections.
Args:
components (list): List of Circuit instances
connections (list): List of pairs ``((c1, port1), (c2, port2))`` where
``c1`` and ``c2`` are elements of `components` (or the index of the
element in `components`), and ``port1`` and ``port2`` are the
indices (or port names) of the ports of the two components that
should be connected
force_SLH (bool): If True, convert the result to an SLH object
expand_simplify (bool): If the result is an SLH object, expand and
simplify the circuit after each feedback connection is added
Example:
>>> A = CircuitSymbol('A', cdim=2)
>>> B = CircuitSymbol('B', cdim=2)
>>> BS = Beamsplitter()
>>> circuit = connect(
... components=[A, B, BS],
... connections=[
... ((A, 0), (BS, 'in')),
... ((BS, 'tr'), (B, 0)),
... ((A, 1), (B, 1))])
>>> print(unicode(circuit).replace('cid(1)', '1'))
(B ⊞ 1) ◁ Perm(0, 2, 1) ◁ (BS(π/4) ⊞ 1) ◁ Perm(0, 2, 1) ◁ (A ⊞ 1)
The above example corresponds to the circuit diagram::
┌─┐ ┌───────┐ ┌─┐
>┤ ├───>┤ ├───>┤ ├
│A│ │BS(π/4)│ │B│
>┤ ├┐┌─>┤ ├┐┌─>┤ ├
└─┘└│ └───────┘└│ └─┘
│┐ │┐
─────┘└───────────┘└────
Raises:
ValueError: if `connections` includes any invalid entries
Note:
The list of `components` may contain duplicate entries, but in this
case you must use a positional index in `connections` to refer to any
duplicate component. Alternatively, use unique components by defining
different labels.
]
variable[combined] assign[=] call[name[Concatenation].create, parameter[<ast.Starred object at 0x7da2054a7eb0>]]
variable[cdims] assign[=] <ast.ListComp object at 0x7da2054a4c10>
variable[offsets] assign[=] call[name[_cumsum], parameter[binary_operation[list[[<ast.Constant object at 0x7da2054a54b0>]] + call[name[cdims]][<ast.Slice object at 0x7da2054a4c70>]]]]
variable[imap] assign[=] list[[]]
variable[omap] assign[=] list[[]]
variable[counts] assign[=] call[name[defaultdict], parameter[name[int]]]
for taget[name[component]] in starred[name[components]] begin[:]
<ast.AugAssign object at 0x7da2054a66e0>
for taget[tuple[[<ast.Name object at 0x7da2054a7040>, <ast.Tuple object at 0x7da2054a74c0>]]] in starred[call[name[enumerate], parameter[name[connections]]]] begin[:]
if <ast.UnaryOp object at 0x7da2054a5360> begin[:]
if compare[call[name[counts]][name[c1]] greater[>] constant[1]] begin[:]
<ast.Raise object at 0x7da2054a5c60>
<ast.Try object at 0x7da2054a6920>
if <ast.UnaryOp object at 0x7da2054a67d0> begin[:]
if compare[call[name[counts]][name[c2]] greater[>] constant[1]] begin[:]
<ast.Raise object at 0x7da2054a7070>
<ast.Try object at 0x7da2054a4640>
if <ast.UnaryOp object at 0x7da2054a6cb0> begin[:]
<ast.Try object at 0x7da2054a44f0>
if <ast.UnaryOp object at 0x7da2054a70d0> begin[:]
<ast.Try object at 0x7da2047e9150>
variable[op_idx] assign[=] binary_operation[call[name[offsets]][name[c1]] + name[op]]
variable[ip_idx] assign[=] binary_operation[call[name[offsets]][name[c2]] + name[ip]]
call[name[imap].append, parameter[name[ip_idx]]]
call[name[omap].append, parameter[name[op_idx]]]
variable[n] assign[=] name[combined].cdim
variable[nfb] assign[=] call[name[len], parameter[name[connections]]]
variable[imapping] assign[=] call[name[map_channels], parameter[<ast.DictComp object at 0x7da2047e9960>, name[n]]]
variable[omapping] assign[=] call[name[map_channels], parameter[<ast.DictComp object at 0x7da2047e8100>, name[n]]]
variable[combined] assign[=] binary_operation[binary_operation[name[omapping] <ast.LShift object at 0x7da2590d69e0> name[combined]] <ast.LShift object at 0x7da2590d69e0> name[imapping]]
if name[force_SLH] begin[:]
variable[combined] assign[=] call[name[combined].toSLH, parameter[]]
for taget[name[k]] in starred[call[name[range], parameter[name[nfb]]]] begin[:]
variable[combined] assign[=] call[name[combined].feedback, parameter[]]
if <ast.BoolOp object at 0x7da1b27b8520> begin[:]
variable[combined] assign[=] call[call[name[combined].expand, parameter[]].simplify_scalar, parameter[]]
return[name[combined]] | keyword[def] identifier[connect] ( identifier[components] , identifier[connections] , identifier[force_SLH] = keyword[False] , identifier[expand_simplify] = keyword[True] ):
literal[string]
identifier[combined] = identifier[Concatenation] . identifier[create] (* identifier[components] )
identifier[cdims] =[ identifier[c] . identifier[cdim] keyword[for] identifier[c] keyword[in] identifier[components] ]
identifier[offsets] = identifier[_cumsum] ([ literal[int] ]+ identifier[cdims] [:- literal[int] ])
identifier[imap] =[]
identifier[omap] =[]
identifier[counts] = identifier[defaultdict] ( identifier[int] )
keyword[for] identifier[component] keyword[in] identifier[components] :
identifier[counts] [ identifier[component] ]+= literal[int]
keyword[for] ( identifier[ic] ,(( identifier[c1] , identifier[op] ),( identifier[c2] , identifier[ip] ))) keyword[in] identifier[enumerate] ( identifier[connections] ):
keyword[if] keyword[not] identifier[isinstance] ( identifier[c1] , identifier[int] ):
keyword[if] identifier[counts] [ identifier[c1] ]> literal[int] :
keyword[raise] identifier[ValueError] (
literal[string]
literal[string]
literal[string] %( identifier[c1] , identifier[components] , identifier[connections] [ identifier[ic] ]))
keyword[try] :
identifier[c1] = identifier[components] . identifier[index] ( identifier[c1] )
keyword[except] identifier[ValueError] :
keyword[raise] identifier[ValueError] (
literal[string]
literal[string] %( identifier[c1] , identifier[connections] [ identifier[ic] ], identifier[components] ))
keyword[else] :
keyword[if] identifier[c1] < literal[int] keyword[or] identifier[c1] >= identifier[len] ( identifier[components] ):
keyword[raise] identifier[ValueError] (
literal[string]
%( identifier[c1] , identifier[connections] [ identifier[ic] ]))
keyword[if] keyword[not] identifier[isinstance] ( identifier[c2] , identifier[int] ):
keyword[if] identifier[counts] [ identifier[c2] ]> literal[int] :
keyword[raise] identifier[ValueError] (
literal[string]
literal[string]
literal[string] %( identifier[c2] , identifier[components] , identifier[connections] [ identifier[ic] ]))
keyword[try] :
identifier[c2] = identifier[components] . identifier[index] ( identifier[c2] )
keyword[except] identifier[ValueError] :
keyword[raise] identifier[ValueError] (
literal[string]
literal[string] %( identifier[c2] , identifier[connections] [ identifier[ic] ], identifier[components] ))
keyword[else] :
keyword[if] identifier[c2] < literal[int] keyword[or] identifier[c2] >= identifier[len] ( identifier[components] ):
keyword[raise] identifier[ValueError] (
literal[string]
%( identifier[c2] , identifier[connections] [ identifier[ic] ]))
keyword[if] keyword[not] ( identifier[isinstance] ( identifier[op] , identifier[int] )):
keyword[try] :
identifier[op] = identifier[components] [ identifier[c1] ]. identifier[PORTSOUT] . identifier[index] ( identifier[op] )
keyword[except] identifier[AttributeError] :
keyword[raise] identifier[ValueError] (
literal[string]
literal[string]
%( identifier[components] [ identifier[c1] ], identifier[op] ))
keyword[except] identifier[ValueError] :
keyword[raise] identifier[ValueError] (
literal[string]
literal[string]
%( identifier[connections] [ identifier[ic] ], identifier[op] , identifier[components] [ identifier[c1] ]))
keyword[else] :
keyword[if] identifier[op] < literal[int] keyword[or] identifier[op] >= identifier[components] [ identifier[c1] ]. identifier[cdim] :
keyword[raise] identifier[ValueError] (
literal[string]
%( identifier[op] , identifier[components] [ identifier[c1] ]. identifier[cdim] , identifier[components] [ identifier[c1] ],
identifier[connections] [ identifier[ic] ]))
keyword[if] keyword[not] ( identifier[isinstance] ( identifier[ip] , identifier[int] )):
keyword[try] :
identifier[ip] = identifier[components] [ identifier[c2] ]. identifier[PORTSIN] . identifier[index] ( identifier[ip] )
keyword[except] identifier[AttributeError] :
keyword[raise] identifier[ValueError] (
literal[string]
literal[string]
%( identifier[components] [ identifier[c2] ], identifier[ip] ))
keyword[except] identifier[ValueError] :
keyword[raise] identifier[ValueError] (
literal[string]
literal[string]
%( identifier[connections] [ identifier[ic] ], identifier[ip] , identifier[components] [ identifier[c2] ]))
keyword[else] :
keyword[if] identifier[ip] < literal[int] keyword[or] identifier[ip] >= identifier[components] [ identifier[c2] ]. identifier[cdim] :
keyword[raise] identifier[ValueError] (
literal[string]
%( identifier[ip] , identifier[components] [ identifier[c2] ]. identifier[cdim] , identifier[components] [ identifier[c2] ],
identifier[connections] [ identifier[ic] ]))
identifier[op_idx] = identifier[offsets] [ identifier[c1] ]+ identifier[op]
identifier[ip_idx] = identifier[offsets] [ identifier[c2] ]+ identifier[ip]
identifier[imap] . identifier[append] ( identifier[ip_idx] )
identifier[omap] . identifier[append] ( identifier[op_idx] )
identifier[n] = identifier[combined] . identifier[cdim]
identifier[nfb] = identifier[len] ( identifier[connections] )
identifier[imapping] = identifier[map_channels] (
{ identifier[k] : identifier[im] keyword[for] ( identifier[k] , identifier[im] ) keyword[in] identifier[zip] ( identifier[range] ( identifier[n] - identifier[nfb] , identifier[n] ), identifier[imap] )},
identifier[n] )
identifier[omapping] = identifier[map_channels] (
{ identifier[om] : identifier[k] keyword[for] ( identifier[k] , identifier[om] ) keyword[in] identifier[zip] ( identifier[range] ( identifier[n] - identifier[nfb] , identifier[n] ), identifier[omap] )},
identifier[n] )
identifier[combined] = identifier[omapping] << identifier[combined] << identifier[imapping]
keyword[if] identifier[force_SLH] :
identifier[combined] = identifier[combined] . identifier[toSLH] ()
keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[nfb] ):
identifier[combined] = identifier[combined] . identifier[feedback] ()
keyword[if] identifier[isinstance] ( identifier[combined] , identifier[SLH] ) keyword[and] identifier[expand_simplify] :
identifier[combined] = identifier[combined] . identifier[expand] (). identifier[simplify_scalar] ()
keyword[return] identifier[combined] | def connect(components, connections, force_SLH=False, expand_simplify=True):
"""Connect a list of components according to a list of connections.
Args:
components (list): List of Circuit instances
connections (list): List of pairs ``((c1, port1), (c2, port2))`` where
``c1`` and ``c2`` are elements of `components` (or the index of the
element in `components`), and ``port1`` and ``port2`` are the
indices (or port names) of the ports of the two components that
should be connected
force_SLH (bool): If True, convert the result to an SLH object
expand_simplify (bool): If the result is an SLH object, expand and
simplify the circuit after each feedback connection is added
Example:
>>> A = CircuitSymbol('A', cdim=2)
>>> B = CircuitSymbol('B', cdim=2)
>>> BS = Beamsplitter()
>>> circuit = connect(
... components=[A, B, BS],
... connections=[
... ((A, 0), (BS, 'in')),
... ((BS, 'tr'), (B, 0)),
... ((A, 1), (B, 1))])
>>> print(unicode(circuit).replace('cid(1)', '1'))
(B ⊞ 1) ◁ Perm(0, 2, 1) ◁ (BS(π/4) ⊞ 1) ◁ Perm(0, 2, 1) ◁ (A ⊞ 1)
The above example corresponds to the circuit diagram::
┌─┐ ┌───────┐ ┌─┐
>┤ ├───>┤ ├───>┤ ├
│A│ │BS(π/4)│ │B│
>┤ ├┐┌─>┤ ├┐┌─>┤ ├
└─┘└│ └───────┘└│ └─┘
│┐ │┐
─────┘└───────────┘└────
Raises:
ValueError: if `connections` includes any invalid entries
Note:
The list of `components` may contain duplicate entries, but in this
case you must use a positional index in `connections` to refer to any
duplicate component. Alternatively, use unique components by defining
different labels.
"""
combined = Concatenation.create(*components)
cdims = [c.cdim for c in components]
offsets = _cumsum([0] + cdims[:-1])
imap = []
omap = []
counts = defaultdict(int)
for component in components:
counts[component] += 1 # depends on [control=['for'], data=['component']]
for (ic, ((c1, op), (c2, ip))) in enumerate(connections):
# check c1; convert to index int
if not isinstance(c1, int):
if counts[c1] > 1:
raise ValueError('Component %s appears more than once in list of components %r. You must reference it by index in the connection %r' % (c1, components, connections[ic])) # depends on [control=['if'], data=[]]
try:
c1 = components.index(c1) # depends on [control=['try'], data=[]]
except ValueError:
raise ValueError('The component %s in connection %r is not in the list of components %r' % (c1, connections[ic], components)) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
elif c1 < 0 or c1 >= len(components):
raise ValueError('Invalid index %d in connection %r' % (c1, connections[ic])) # depends on [control=['if'], data=[]]
# check c2; convert to index int
if not isinstance(c2, int):
if counts[c2] > 1:
raise ValueError('Component %s appears more than once in list of components %r. You must reference it by index in the connection %r' % (c2, components, connections[ic])) # depends on [control=['if'], data=[]]
try:
c2 = components.index(c2) # depends on [control=['try'], data=[]]
except ValueError:
raise ValueError('The component %s in connection %r is not in the list of components %r' % (c2, connections[ic], components)) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
elif c2 < 0 or c2 >= len(components):
raise ValueError('Invalid index %d in connection %r' % (c2, connections[ic])) # depends on [control=['if'], data=[]]
# check op; convert to index int
if not isinstance(op, int):
try:
op = components[c1].PORTSOUT.index(op) # depends on [control=['try'], data=[]]
except AttributeError:
raise ValueError('The component %s does not define PORTSOUT labels. You cannot use the string %r to refer to a port' % (components[c1], op)) # depends on [control=['except'], data=[]]
except ValueError:
raise ValueError('The connection %r refers to an invalid output channel %s for component %r' % (connections[ic], op, components[c1])) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
elif op < 0 or op >= components[c1].cdim:
raise ValueError('Invalid output channel %d <0 or >=%d (cdim of %r) in %r' % (op, components[c1].cdim, components[c1], connections[ic])) # depends on [control=['if'], data=[]]
# check ip; convert to index int
if not isinstance(ip, int):
try:
ip = components[c2].PORTSIN.index(ip) # depends on [control=['try'], data=[]]
except AttributeError:
raise ValueError('The component %s does not define PORTSIN labels. You cannot use the string %r to refer to a port' % (components[c2], ip)) # depends on [control=['except'], data=[]]
except ValueError:
raise ValueError('The connection %r refers to an invalid input channel %s for component %r' % (connections[ic], ip, components[c2])) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
elif ip < 0 or ip >= components[c2].cdim:
raise ValueError('Invalid input channel %d <0 or >=%d (cdim of %r) in %r' % (ip, components[c2].cdim, components[c2], connections[ic])) # depends on [control=['if'], data=[]]
op_idx = offsets[c1] + op
ip_idx = offsets[c2] + ip
imap.append(ip_idx)
omap.append(op_idx) # depends on [control=['for'], data=[]]
n = combined.cdim
nfb = len(connections)
imapping = map_channels({k: im for (k, im) in zip(range(n - nfb, n), imap)}, n)
omapping = map_channels({om: k for (k, om) in zip(range(n - nfb, n), omap)}, n)
combined = omapping << combined << imapping
if force_SLH:
combined = combined.toSLH() # depends on [control=['if'], data=[]]
for k in range(nfb):
combined = combined.feedback()
if isinstance(combined, SLH) and expand_simplify:
combined = combined.expand().simplify_scalar() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return combined |
def _expand(template, seq):
"""
seq IS TUPLE OF OBJECTS IN PATH ORDER INTO THE DATA TREE
"""
if is_text(template):
return _simple_expand(template, seq)
elif is_data(template):
# EXPAND LISTS OF ITEMS USING THIS FORM
# {"from":from, "template":template, "separator":separator}
template = wrap(template)
assert template["from"], "Expecting template to have 'from' attribute"
assert template.template, "Expecting template to have 'template' attribute"
data = seq[-1][template["from"]]
output = []
for d in data:
s = seq + (d,)
output.append(_expand(template.template, s))
return coalesce(template.separator, "").join(output)
elif is_list(template):
return "".join(_expand(t, seq) for t in template)
else:
if not _Log:
_late_import()
_Log.error("can not handle") | def function[_expand, parameter[template, seq]]:
constant[
seq IS TUPLE OF OBJECTS IN PATH ORDER INTO THE DATA TREE
]
if call[name[is_text], parameter[name[template]]] begin[:]
return[call[name[_simple_expand], parameter[name[template], name[seq]]]] | keyword[def] identifier[_expand] ( identifier[template] , identifier[seq] ):
literal[string]
keyword[if] identifier[is_text] ( identifier[template] ):
keyword[return] identifier[_simple_expand] ( identifier[template] , identifier[seq] )
keyword[elif] identifier[is_data] ( identifier[template] ):
identifier[template] = identifier[wrap] ( identifier[template] )
keyword[assert] identifier[template] [ literal[string] ], literal[string]
keyword[assert] identifier[template] . identifier[template] , literal[string]
identifier[data] = identifier[seq] [- literal[int] ][ identifier[template] [ literal[string] ]]
identifier[output] =[]
keyword[for] identifier[d] keyword[in] identifier[data] :
identifier[s] = identifier[seq] +( identifier[d] ,)
identifier[output] . identifier[append] ( identifier[_expand] ( identifier[template] . identifier[template] , identifier[s] ))
keyword[return] identifier[coalesce] ( identifier[template] . identifier[separator] , literal[string] ). identifier[join] ( identifier[output] )
keyword[elif] identifier[is_list] ( identifier[template] ):
keyword[return] literal[string] . identifier[join] ( identifier[_expand] ( identifier[t] , identifier[seq] ) keyword[for] identifier[t] keyword[in] identifier[template] )
keyword[else] :
keyword[if] keyword[not] identifier[_Log] :
identifier[_late_import] ()
identifier[_Log] . identifier[error] ( literal[string] ) | def _expand(template, seq):
"""
seq IS TUPLE OF OBJECTS IN PATH ORDER INTO THE DATA TREE
"""
if is_text(template):
return _simple_expand(template, seq) # depends on [control=['if'], data=[]]
elif is_data(template):
# EXPAND LISTS OF ITEMS USING THIS FORM
# {"from":from, "template":template, "separator":separator}
template = wrap(template)
assert template['from'], "Expecting template to have 'from' attribute"
assert template.template, "Expecting template to have 'template' attribute"
data = seq[-1][template['from']]
output = []
for d in data:
s = seq + (d,)
output.append(_expand(template.template, s)) # depends on [control=['for'], data=['d']]
return coalesce(template.separator, '').join(output) # depends on [control=['if'], data=[]]
elif is_list(template):
return ''.join((_expand(t, seq) for t in template)) # depends on [control=['if'], data=[]]
else:
if not _Log:
_late_import() # depends on [control=['if'], data=[]]
_Log.error('can not handle') |
def get_instance(self, payload):
"""
Build an instance of DailyInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.api.v2010.account.usage.record.daily.DailyInstance
:rtype: twilio.rest.api.v2010.account.usage.record.daily.DailyInstance
"""
return DailyInstance(self._version, payload, account_sid=self._solution['account_sid'], ) | def function[get_instance, parameter[self, payload]]:
constant[
Build an instance of DailyInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.api.v2010.account.usage.record.daily.DailyInstance
:rtype: twilio.rest.api.v2010.account.usage.record.daily.DailyInstance
]
return[call[name[DailyInstance], parameter[name[self]._version, name[payload]]]] | keyword[def] identifier[get_instance] ( identifier[self] , identifier[payload] ):
literal[string]
keyword[return] identifier[DailyInstance] ( identifier[self] . identifier[_version] , identifier[payload] , identifier[account_sid] = identifier[self] . identifier[_solution] [ literal[string] ],) | def get_instance(self, payload):
"""
Build an instance of DailyInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.api.v2010.account.usage.record.daily.DailyInstance
:rtype: twilio.rest.api.v2010.account.usage.record.daily.DailyInstance
"""
return DailyInstance(self._version, payload, account_sid=self._solution['account_sid']) |
def forward(self, # pylint: disable=arguments-differ
inputs: torch.Tensor,
mask: torch.LongTensor) -> torch.Tensor:
"""
Parameters
----------
inputs : ``torch.Tensor``, required.
A Tensor of shape ``(batch_size, sequence_length, hidden_size)``.
mask : ``torch.LongTensor``, required.
A binary mask of shape ``(batch_size, sequence_length)`` representing the
non-padded elements in each sequence in the batch.
Returns
-------
A ``torch.Tensor`` of shape (num_layers, batch_size, sequence_length, hidden_size),
where the num_layers dimension represents the LSTM output from that layer.
"""
batch_size, total_sequence_length = mask.size()
stacked_sequence_output, final_states, restoration_indices = \
self.sort_and_run_forward(self._lstm_forward, inputs, mask)
num_layers, num_valid, returned_timesteps, encoder_dim = stacked_sequence_output.size()
# Add back invalid rows which were removed in the call to sort_and_run_forward.
if num_valid < batch_size:
zeros = stacked_sequence_output.new_zeros(num_layers,
batch_size - num_valid,
returned_timesteps,
encoder_dim)
stacked_sequence_output = torch.cat([stacked_sequence_output, zeros], 1)
# The states also need to have invalid rows added back.
new_states = []
for state in final_states:
state_dim = state.size(-1)
zeros = state.new_zeros(num_layers, batch_size - num_valid, state_dim)
new_states.append(torch.cat([state, zeros], 1))
final_states = new_states
# It's possible to need to pass sequences which are padded to longer than the
# max length of the sequence to a Seq2StackEncoder. However, packing and unpacking
# the sequences mean that the returned tensor won't include these dimensions, because
# the RNN did not need to process them. We add them back on in the form of zeros here.
sequence_length_difference = total_sequence_length - returned_timesteps
if sequence_length_difference > 0:
zeros = stacked_sequence_output.new_zeros(num_layers,
batch_size,
sequence_length_difference,
stacked_sequence_output[0].size(-1))
stacked_sequence_output = torch.cat([stacked_sequence_output, zeros], 2)
self._update_states(final_states, restoration_indices)
# Restore the original indices and return the sequence.
# Has shape (num_layers, batch_size, sequence_length, hidden_size)
return stacked_sequence_output.index_select(1, restoration_indices) | def function[forward, parameter[self, inputs, mask]]:
constant[
Parameters
----------
inputs : ``torch.Tensor``, required.
A Tensor of shape ``(batch_size, sequence_length, hidden_size)``.
mask : ``torch.LongTensor``, required.
A binary mask of shape ``(batch_size, sequence_length)`` representing the
non-padded elements in each sequence in the batch.
Returns
-------
A ``torch.Tensor`` of shape (num_layers, batch_size, sequence_length, hidden_size),
where the num_layers dimension represents the LSTM output from that layer.
]
<ast.Tuple object at 0x7da2054a4c10> assign[=] call[name[mask].size, parameter[]]
<ast.Tuple object at 0x7da2054a4580> assign[=] call[name[self].sort_and_run_forward, parameter[name[self]._lstm_forward, name[inputs], name[mask]]]
<ast.Tuple object at 0x7da2054a6e00> assign[=] call[name[stacked_sequence_output].size, parameter[]]
if compare[name[num_valid] less[<] name[batch_size]] begin[:]
variable[zeros] assign[=] call[name[stacked_sequence_output].new_zeros, parameter[name[num_layers], binary_operation[name[batch_size] - name[num_valid]], name[returned_timesteps], name[encoder_dim]]]
variable[stacked_sequence_output] assign[=] call[name[torch].cat, parameter[list[[<ast.Name object at 0x7da20c796ad0>, <ast.Name object at 0x7da20c795ba0>]], constant[1]]]
variable[new_states] assign[=] list[[]]
for taget[name[state]] in starred[name[final_states]] begin[:]
variable[state_dim] assign[=] call[name[state].size, parameter[<ast.UnaryOp object at 0x7da2054a45e0>]]
variable[zeros] assign[=] call[name[state].new_zeros, parameter[name[num_layers], binary_operation[name[batch_size] - name[num_valid]], name[state_dim]]]
call[name[new_states].append, parameter[call[name[torch].cat, parameter[list[[<ast.Name object at 0x7da2054a51e0>, <ast.Name object at 0x7da2054a6b00>]], constant[1]]]]]
variable[final_states] assign[=] name[new_states]
variable[sequence_length_difference] assign[=] binary_operation[name[total_sequence_length] - name[returned_timesteps]]
if compare[name[sequence_length_difference] greater[>] constant[0]] begin[:]
variable[zeros] assign[=] call[name[stacked_sequence_output].new_zeros, parameter[name[num_layers], name[batch_size], name[sequence_length_difference], call[call[name[stacked_sequence_output]][constant[0]].size, parameter[<ast.UnaryOp object at 0x7da2054a4160>]]]]
variable[stacked_sequence_output] assign[=] call[name[torch].cat, parameter[list[[<ast.Name object at 0x7da2054a70d0>, <ast.Name object at 0x7da2054a79d0>]], constant[2]]]
call[name[self]._update_states, parameter[name[final_states], name[restoration_indices]]]
return[call[name[stacked_sequence_output].index_select, parameter[constant[1], name[restoration_indices]]]] | keyword[def] identifier[forward] ( identifier[self] ,
identifier[inputs] : identifier[torch] . identifier[Tensor] ,
identifier[mask] : identifier[torch] . identifier[LongTensor] )-> identifier[torch] . identifier[Tensor] :
literal[string]
identifier[batch_size] , identifier[total_sequence_length] = identifier[mask] . identifier[size] ()
identifier[stacked_sequence_output] , identifier[final_states] , identifier[restoration_indices] = identifier[self] . identifier[sort_and_run_forward] ( identifier[self] . identifier[_lstm_forward] , identifier[inputs] , identifier[mask] )
identifier[num_layers] , identifier[num_valid] , identifier[returned_timesteps] , identifier[encoder_dim] = identifier[stacked_sequence_output] . identifier[size] ()
keyword[if] identifier[num_valid] < identifier[batch_size] :
identifier[zeros] = identifier[stacked_sequence_output] . identifier[new_zeros] ( identifier[num_layers] ,
identifier[batch_size] - identifier[num_valid] ,
identifier[returned_timesteps] ,
identifier[encoder_dim] )
identifier[stacked_sequence_output] = identifier[torch] . identifier[cat] ([ identifier[stacked_sequence_output] , identifier[zeros] ], literal[int] )
identifier[new_states] =[]
keyword[for] identifier[state] keyword[in] identifier[final_states] :
identifier[state_dim] = identifier[state] . identifier[size] (- literal[int] )
identifier[zeros] = identifier[state] . identifier[new_zeros] ( identifier[num_layers] , identifier[batch_size] - identifier[num_valid] , identifier[state_dim] )
identifier[new_states] . identifier[append] ( identifier[torch] . identifier[cat] ([ identifier[state] , identifier[zeros] ], literal[int] ))
identifier[final_states] = identifier[new_states]
identifier[sequence_length_difference] = identifier[total_sequence_length] - identifier[returned_timesteps]
keyword[if] identifier[sequence_length_difference] > literal[int] :
identifier[zeros] = identifier[stacked_sequence_output] . identifier[new_zeros] ( identifier[num_layers] ,
identifier[batch_size] ,
identifier[sequence_length_difference] ,
identifier[stacked_sequence_output] [ literal[int] ]. identifier[size] (- literal[int] ))
identifier[stacked_sequence_output] = identifier[torch] . identifier[cat] ([ identifier[stacked_sequence_output] , identifier[zeros] ], literal[int] )
identifier[self] . identifier[_update_states] ( identifier[final_states] , identifier[restoration_indices] )
keyword[return] identifier[stacked_sequence_output] . identifier[index_select] ( literal[int] , identifier[restoration_indices] ) | def forward(self, inputs: torch.Tensor, mask: torch.LongTensor) -> torch.Tensor: # pylint: disable=arguments-differ
'\n Parameters\n ----------\n inputs : ``torch.Tensor``, required.\n A Tensor of shape ``(batch_size, sequence_length, hidden_size)``.\n mask : ``torch.LongTensor``, required.\n A binary mask of shape ``(batch_size, sequence_length)`` representing the\n non-padded elements in each sequence in the batch.\n\n Returns\n -------\n A ``torch.Tensor`` of shape (num_layers, batch_size, sequence_length, hidden_size),\n where the num_layers dimension represents the LSTM output from that layer.\n '
(batch_size, total_sequence_length) = mask.size()
(stacked_sequence_output, final_states, restoration_indices) = self.sort_and_run_forward(self._lstm_forward, inputs, mask)
(num_layers, num_valid, returned_timesteps, encoder_dim) = stacked_sequence_output.size()
# Add back invalid rows which were removed in the call to sort_and_run_forward.
if num_valid < batch_size:
zeros = stacked_sequence_output.new_zeros(num_layers, batch_size - num_valid, returned_timesteps, encoder_dim)
stacked_sequence_output = torch.cat([stacked_sequence_output, zeros], 1)
# The states also need to have invalid rows added back.
new_states = []
for state in final_states:
state_dim = state.size(-1)
zeros = state.new_zeros(num_layers, batch_size - num_valid, state_dim)
new_states.append(torch.cat([state, zeros], 1)) # depends on [control=['for'], data=['state']]
final_states = new_states # depends on [control=['if'], data=['num_valid', 'batch_size']]
# It's possible to need to pass sequences which are padded to longer than the
# max length of the sequence to a Seq2StackEncoder. However, packing and unpacking
# the sequences mean that the returned tensor won't include these dimensions, because
# the RNN did not need to process them. We add them back on in the form of zeros here.
sequence_length_difference = total_sequence_length - returned_timesteps
if sequence_length_difference > 0:
zeros = stacked_sequence_output.new_zeros(num_layers, batch_size, sequence_length_difference, stacked_sequence_output[0].size(-1))
stacked_sequence_output = torch.cat([stacked_sequence_output, zeros], 2) # depends on [control=['if'], data=['sequence_length_difference']]
self._update_states(final_states, restoration_indices)
# Restore the original indices and return the sequence.
# Has shape (num_layers, batch_size, sequence_length, hidden_size)
return stacked_sequence_output.index_select(1, restoration_indices) |
def range_difference(data, ground_truth, mask=None, normalized=False,
force_lower_is_better=True):
r"""Return dynamic range difference between ``data`` and ``ground_truth``.
Evaluates difference in range between input (``data``) and reference
data (``ground_truth``). Allows for normalization (``normalized``) and a
masking of the two spaces (``mask``).
Parameters
----------
data : `array-like`
Input data to compare to the ground truth.
ground_truth : `array-like`
Reference to which ``data`` should be compared.
mask : `array-like`, optional
Binary mask or index array to define ROI in which FOM evaluation
is performed.
normalized : bool, optional
If ``True``, normalize the FOM to lie in [0, 1].
force_lower_is_better : bool, optional
If ``True``, it is ensured that lower values correspond to better
matches. For the range difference, this is already the case, and
the flag is only present for compatibility to other figures of merit.
Returns
-------
rd : float
FOM value, where a lower value means a better match.
Notes
-----
The FOM evaluates
.. math::
\mathrm{RD}(f, g) = \Big|
\big(\max(f) - \min(f) \big) -
\big(\max(g) - \min(g) \big)
\Big|
or, in normalized form
.. math::
\mathrm{RD_N}(f, g) = \frac{
\Big|
\big(\max(f) - \min(f) \big) -
\big(\max(g) - \min(g) \big)
\Big|}{
\big(\max(f) - \min(f) \big) +
\big(\max(g) - \min(g) \big)}
The normalized variant takes values in :math:`[0, 1]`.
"""
data = np.asarray(data)
ground_truth = np.asarray(ground_truth)
if mask is not None:
mask = np.asarray(mask, dtype=bool)
data = data[mask]
ground_truth = ground_truth[mask]
data_range = np.ptp(data)
ground_truth_range = np.ptp(ground_truth)
fom = np.abs(data_range - ground_truth_range)
if normalized:
denom = np.abs(data_range + ground_truth_range)
if denom == 0:
fom = 0.0
else:
fom /= denom
return fom | def function[range_difference, parameter[data, ground_truth, mask, normalized, force_lower_is_better]]:
constant[Return dynamic range difference between ``data`` and ``ground_truth``.
Evaluates difference in range between input (``data``) and reference
data (``ground_truth``). Allows for normalization (``normalized``) and a
masking of the two spaces (``mask``).
Parameters
----------
data : `array-like`
Input data to compare to the ground truth.
ground_truth : `array-like`
Reference to which ``data`` should be compared.
mask : `array-like`, optional
Binary mask or index array to define ROI in which FOM evaluation
is performed.
normalized : bool, optional
If ``True``, normalize the FOM to lie in [0, 1].
force_lower_is_better : bool, optional
If ``True``, it is ensured that lower values correspond to better
matches. For the range difference, this is already the case, and
the flag is only present for compatibility to other figures of merit.
Returns
-------
rd : float
FOM value, where a lower value means a better match.
Notes
-----
The FOM evaluates
.. math::
\mathrm{RD}(f, g) = \Big|
\big(\max(f) - \min(f) \big) -
\big(\max(g) - \min(g) \big)
\Big|
or, in normalized form
.. math::
\mathrm{RD_N}(f, g) = \frac{
\Big|
\big(\max(f) - \min(f) \big) -
\big(\max(g) - \min(g) \big)
\Big|}{
\big(\max(f) - \min(f) \big) +
\big(\max(g) - \min(g) \big)}
The normalized variant takes values in :math:`[0, 1]`.
]
variable[data] assign[=] call[name[np].asarray, parameter[name[data]]]
variable[ground_truth] assign[=] call[name[np].asarray, parameter[name[ground_truth]]]
if compare[name[mask] is_not constant[None]] begin[:]
variable[mask] assign[=] call[name[np].asarray, parameter[name[mask]]]
variable[data] assign[=] call[name[data]][name[mask]]
variable[ground_truth] assign[=] call[name[ground_truth]][name[mask]]
variable[data_range] assign[=] call[name[np].ptp, parameter[name[data]]]
variable[ground_truth_range] assign[=] call[name[np].ptp, parameter[name[ground_truth]]]
variable[fom] assign[=] call[name[np].abs, parameter[binary_operation[name[data_range] - name[ground_truth_range]]]]
if name[normalized] begin[:]
variable[denom] assign[=] call[name[np].abs, parameter[binary_operation[name[data_range] + name[ground_truth_range]]]]
if compare[name[denom] equal[==] constant[0]] begin[:]
variable[fom] assign[=] constant[0.0]
return[name[fom]] | keyword[def] identifier[range_difference] ( identifier[data] , identifier[ground_truth] , identifier[mask] = keyword[None] , identifier[normalized] = keyword[False] ,
identifier[force_lower_is_better] = keyword[True] ):
literal[string]
identifier[data] = identifier[np] . identifier[asarray] ( identifier[data] )
identifier[ground_truth] = identifier[np] . identifier[asarray] ( identifier[ground_truth] )
keyword[if] identifier[mask] keyword[is] keyword[not] keyword[None] :
identifier[mask] = identifier[np] . identifier[asarray] ( identifier[mask] , identifier[dtype] = identifier[bool] )
identifier[data] = identifier[data] [ identifier[mask] ]
identifier[ground_truth] = identifier[ground_truth] [ identifier[mask] ]
identifier[data_range] = identifier[np] . identifier[ptp] ( identifier[data] )
identifier[ground_truth_range] = identifier[np] . identifier[ptp] ( identifier[ground_truth] )
identifier[fom] = identifier[np] . identifier[abs] ( identifier[data_range] - identifier[ground_truth_range] )
keyword[if] identifier[normalized] :
identifier[denom] = identifier[np] . identifier[abs] ( identifier[data_range] + identifier[ground_truth_range] )
keyword[if] identifier[denom] == literal[int] :
identifier[fom] = literal[int]
keyword[else] :
identifier[fom] /= identifier[denom]
keyword[return] identifier[fom] | def range_difference(data, ground_truth, mask=None, normalized=False, force_lower_is_better=True):
"""Return dynamic range difference between ``data`` and ``ground_truth``.
Evaluates difference in range between input (``data``) and reference
data (``ground_truth``). Allows for normalization (``normalized``) and a
masking of the two spaces (``mask``).
Parameters
----------
data : `array-like`
Input data to compare to the ground truth.
ground_truth : `array-like`
Reference to which ``data`` should be compared.
mask : `array-like`, optional
Binary mask or index array to define ROI in which FOM evaluation
is performed.
normalized : bool, optional
If ``True``, normalize the FOM to lie in [0, 1].
force_lower_is_better : bool, optional
If ``True``, it is ensured that lower values correspond to better
matches. For the range difference, this is already the case, and
the flag is only present for compatibility to other figures of merit.
Returns
-------
rd : float
FOM value, where a lower value means a better match.
Notes
-----
The FOM evaluates
.. math::
\\mathrm{RD}(f, g) = \\Big|
\\big(\\max(f) - \\min(f) \\big) -
\\big(\\max(g) - \\min(g) \\big)
\\Big|
or, in normalized form
.. math::
\\mathrm{RD_N}(f, g) = \\frac{
\\Big|
\\big(\\max(f) - \\min(f) \\big) -
\\big(\\max(g) - \\min(g) \\big)
\\Big|}{
\\big(\\max(f) - \\min(f) \\big) +
\\big(\\max(g) - \\min(g) \\big)}
The normalized variant takes values in :math:`[0, 1]`.
"""
data = np.asarray(data)
ground_truth = np.asarray(ground_truth)
if mask is not None:
mask = np.asarray(mask, dtype=bool)
data = data[mask]
ground_truth = ground_truth[mask] # depends on [control=['if'], data=['mask']]
data_range = np.ptp(data)
ground_truth_range = np.ptp(ground_truth)
fom = np.abs(data_range - ground_truth_range)
if normalized:
denom = np.abs(data_range + ground_truth_range)
if denom == 0:
fom = 0.0 # depends on [control=['if'], data=[]]
else:
fom /= denom # depends on [control=['if'], data=[]]
return fom |
def main():
"""
This is a Toil pipeline to transfer TCGA data into an S3 Bucket
Data is pulled down with Genetorrent and transferred to S3 via S3AM.
"""
# Define Parser object and add to toil
parser = build_parser()
Job.Runner.addToilOptions(parser)
args = parser.parse_args()
# Store inputs from argparse
inputs = {'genetorrent': args.genetorrent,
'genetorrent_key': args.genetorrent_key,
'ssec': args.ssec,
's3_dir': args.s3_dir}
# Sanity checks
if args.ssec:
assert os.path.isfile(args.ssec)
if args.genetorrent:
assert os.path.isfile(args.genetorrent)
if args.genetorrent_key:
assert os.path.isfile(args.genetorrent_key)
samples = parse_genetorrent(args.genetorrent)
# Start pipeline
# map_job accepts a function, an iterable, and *args. The function is launched as a child
# process with one element from the iterable and *args, which in turn spawns a tree of child jobs.
Job.Runner.startToil(Job.wrapJobFn(map_job, download_and_transfer_sample, samples, inputs), args) | def function[main, parameter[]]:
constant[
This is a Toil pipeline to transfer TCGA data into an S3 Bucket
Data is pulled down with Genetorrent and transferred to S3 via S3AM.
]
variable[parser] assign[=] call[name[build_parser], parameter[]]
call[name[Job].Runner.addToilOptions, parameter[name[parser]]]
variable[args] assign[=] call[name[parser].parse_args, parameter[]]
variable[inputs] assign[=] dictionary[[<ast.Constant object at 0x7da20e957790>, <ast.Constant object at 0x7da20e957730>, <ast.Constant object at 0x7da20e957910>, <ast.Constant object at 0x7da20e954dc0>], [<ast.Attribute object at 0x7da20e9579a0>, <ast.Attribute object at 0x7da20e955030>, <ast.Attribute object at 0x7da18f811120>, <ast.Attribute object at 0x7da18f8132b0>]]
if name[args].ssec begin[:]
assert[call[name[os].path.isfile, parameter[name[args].ssec]]]
if name[args].genetorrent begin[:]
assert[call[name[os].path.isfile, parameter[name[args].genetorrent]]]
if name[args].genetorrent_key begin[:]
assert[call[name[os].path.isfile, parameter[name[args].genetorrent_key]]]
variable[samples] assign[=] call[name[parse_genetorrent], parameter[name[args].genetorrent]]
call[name[Job].Runner.startToil, parameter[call[name[Job].wrapJobFn, parameter[name[map_job], name[download_and_transfer_sample], name[samples], name[inputs]]], name[args]]] | keyword[def] identifier[main] ():
literal[string]
identifier[parser] = identifier[build_parser] ()
identifier[Job] . identifier[Runner] . identifier[addToilOptions] ( identifier[parser] )
identifier[args] = identifier[parser] . identifier[parse_args] ()
identifier[inputs] ={ literal[string] : identifier[args] . identifier[genetorrent] ,
literal[string] : identifier[args] . identifier[genetorrent_key] ,
literal[string] : identifier[args] . identifier[ssec] ,
literal[string] : identifier[args] . identifier[s3_dir] }
keyword[if] identifier[args] . identifier[ssec] :
keyword[assert] identifier[os] . identifier[path] . identifier[isfile] ( identifier[args] . identifier[ssec] )
keyword[if] identifier[args] . identifier[genetorrent] :
keyword[assert] identifier[os] . identifier[path] . identifier[isfile] ( identifier[args] . identifier[genetorrent] )
keyword[if] identifier[args] . identifier[genetorrent_key] :
keyword[assert] identifier[os] . identifier[path] . identifier[isfile] ( identifier[args] . identifier[genetorrent_key] )
identifier[samples] = identifier[parse_genetorrent] ( identifier[args] . identifier[genetorrent] )
identifier[Job] . identifier[Runner] . identifier[startToil] ( identifier[Job] . identifier[wrapJobFn] ( identifier[map_job] , identifier[download_and_transfer_sample] , identifier[samples] , identifier[inputs] ), identifier[args] ) | def main():
"""
This is a Toil pipeline to transfer TCGA data into an S3 Bucket
Data is pulled down with Genetorrent and transferred to S3 via S3AM.
"""
# Define Parser object and add to toil
parser = build_parser()
Job.Runner.addToilOptions(parser)
args = parser.parse_args()
# Store inputs from argparse
inputs = {'genetorrent': args.genetorrent, 'genetorrent_key': args.genetorrent_key, 'ssec': args.ssec, 's3_dir': args.s3_dir}
# Sanity checks
if args.ssec:
assert os.path.isfile(args.ssec) # depends on [control=['if'], data=[]]
if args.genetorrent:
assert os.path.isfile(args.genetorrent) # depends on [control=['if'], data=[]]
if args.genetorrent_key:
assert os.path.isfile(args.genetorrent_key) # depends on [control=['if'], data=[]]
samples = parse_genetorrent(args.genetorrent)
# Start pipeline
# map_job accepts a function, an iterable, and *args. The function is launched as a child
# process with one element from the iterable and *args, which in turn spawns a tree of child jobs.
Job.Runner.startToil(Job.wrapJobFn(map_job, download_and_transfer_sample, samples, inputs), args) |
def random_split(self, fraction, seed=None, exact=False):
"""
Randomly split the rows of an SFrame into two SFrames. The first SFrame
contains *M* rows, sampled uniformly (without replacement) from the
original SFrame. *M* is approximately the fraction times the original
number of rows. The second SFrame contains the remaining rows of the
original SFrame.
An exact fraction partition can be optionally obtained by setting
exact=True.
Parameters
----------
fraction : float
Fraction of the rows to fetch. Must be between 0 and 1.
if exact is False (default), the number of rows returned is
approximately the fraction times the number of rows.
seed : int, optional
Seed for the random number generator used to split.
exact: bool, optional
Defaults to False. If exact=True, an exact fraction is returned,
but at a performance penalty.
Returns
-------
out : tuple [SFrame]
Two new SFrames.
Examples
--------
Suppose we have an SFrame with 1,024 rows and we want to randomly split
it into training and testing datasets with about a 90%/10% split.
>>> sf = turicreate.SFrame({'id': range(1024)})
>>> sf_train, sf_test = sf.random_split(.9, seed=5)
>>> print(len(sf_train), len(sf_test))
922 102
"""
if (fraction > 1 or fraction < 0):
raise ValueError('Invalid sampling rate: ' + str(fraction))
if (self.num_rows() == 0 or self.num_columns() == 0):
return (SFrame(), SFrame())
if seed is None:
# Include the nanosecond component as well.
seed = abs(hash("%0.20f" % time.time())) % (2 ** 31)
# The server side requires this to be an int, so cast if we can
try:
seed = int(seed)
except ValueError:
raise ValueError('The \'seed\' parameter must be of type int.')
with cython_context():
proxy_pair = self.__proxy__.random_split(fraction, seed, exact)
return (SFrame(data=[], _proxy=proxy_pair[0]), SFrame(data=[], _proxy=proxy_pair[1])) | def function[random_split, parameter[self, fraction, seed, exact]]:
constant[
Randomly split the rows of an SFrame into two SFrames. The first SFrame
contains *M* rows, sampled uniformly (without replacement) from the
original SFrame. *M* is approximately the fraction times the original
number of rows. The second SFrame contains the remaining rows of the
original SFrame.
An exact fraction partition can be optionally obtained by setting
exact=True.
Parameters
----------
fraction : float
Fraction of the rows to fetch. Must be between 0 and 1.
if exact is False (default), the number of rows returned is
approximately the fraction times the number of rows.
seed : int, optional
Seed for the random number generator used to split.
exact: bool, optional
Defaults to False. If exact=True, an exact fraction is returned,
but at a performance penalty.
Returns
-------
out : tuple [SFrame]
Two new SFrames.
Examples
--------
Suppose we have an SFrame with 1,024 rows and we want to randomly split
it into training and testing datasets with about a 90%/10% split.
>>> sf = turicreate.SFrame({'id': range(1024)})
>>> sf_train, sf_test = sf.random_split(.9, seed=5)
>>> print(len(sf_train), len(sf_test))
922 102
]
if <ast.BoolOp object at 0x7da204961a80> begin[:]
<ast.Raise object at 0x7da204960c70>
if <ast.BoolOp object at 0x7da204960a30> begin[:]
return[tuple[[<ast.Call object at 0x7da204961150>, <ast.Call object at 0x7da204960760>]]]
if compare[name[seed] is constant[None]] begin[:]
variable[seed] assign[=] binary_operation[call[name[abs], parameter[call[name[hash], parameter[binary_operation[constant[%0.20f] <ast.Mod object at 0x7da2590d6920> call[name[time].time, parameter[]]]]]]] <ast.Mod object at 0x7da2590d6920> binary_operation[constant[2] ** constant[31]]]
<ast.Try object at 0x7da204961030>
with call[name[cython_context], parameter[]] begin[:]
variable[proxy_pair] assign[=] call[name[self].__proxy__.random_split, parameter[name[fraction], name[seed], name[exact]]]
return[tuple[[<ast.Call object at 0x7da204962230>, <ast.Call object at 0x7da2049631f0>]]] | keyword[def] identifier[random_split] ( identifier[self] , identifier[fraction] , identifier[seed] = keyword[None] , identifier[exact] = keyword[False] ):
literal[string]
keyword[if] ( identifier[fraction] > literal[int] keyword[or] identifier[fraction] < literal[int] ):
keyword[raise] identifier[ValueError] ( literal[string] + identifier[str] ( identifier[fraction] ))
keyword[if] ( identifier[self] . identifier[num_rows] ()== literal[int] keyword[or] identifier[self] . identifier[num_columns] ()== literal[int] ):
keyword[return] ( identifier[SFrame] (), identifier[SFrame] ())
keyword[if] identifier[seed] keyword[is] keyword[None] :
identifier[seed] = identifier[abs] ( identifier[hash] ( literal[string] % identifier[time] . identifier[time] ()))%( literal[int] ** literal[int] )
keyword[try] :
identifier[seed] = identifier[int] ( identifier[seed] )
keyword[except] identifier[ValueError] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[with] identifier[cython_context] ():
identifier[proxy_pair] = identifier[self] . identifier[__proxy__] . identifier[random_split] ( identifier[fraction] , identifier[seed] , identifier[exact] )
keyword[return] ( identifier[SFrame] ( identifier[data] =[], identifier[_proxy] = identifier[proxy_pair] [ literal[int] ]), identifier[SFrame] ( identifier[data] =[], identifier[_proxy] = identifier[proxy_pair] [ literal[int] ])) | def random_split(self, fraction, seed=None, exact=False):
"""
Randomly split the rows of an SFrame into two SFrames. The first SFrame
contains *M* rows, sampled uniformly (without replacement) from the
original SFrame. *M* is approximately the fraction times the original
number of rows. The second SFrame contains the remaining rows of the
original SFrame.
An exact fraction partition can be optionally obtained by setting
exact=True.
Parameters
----------
fraction : float
Fraction of the rows to fetch. Must be between 0 and 1.
if exact is False (default), the number of rows returned is
approximately the fraction times the number of rows.
seed : int, optional
Seed for the random number generator used to split.
exact: bool, optional
Defaults to False. If exact=True, an exact fraction is returned,
but at a performance penalty.
Returns
-------
out : tuple [SFrame]
Two new SFrames.
Examples
--------
Suppose we have an SFrame with 1,024 rows and we want to randomly split
it into training and testing datasets with about a 90%/10% split.
>>> sf = turicreate.SFrame({'id': range(1024)})
>>> sf_train, sf_test = sf.random_split(.9, seed=5)
>>> print(len(sf_train), len(sf_test))
922 102
"""
if fraction > 1 or fraction < 0:
raise ValueError('Invalid sampling rate: ' + str(fraction)) # depends on [control=['if'], data=[]]
if self.num_rows() == 0 or self.num_columns() == 0:
return (SFrame(), SFrame()) # depends on [control=['if'], data=[]]
if seed is None:
# Include the nanosecond component as well.
seed = abs(hash('%0.20f' % time.time())) % 2 ** 31 # depends on [control=['if'], data=['seed']]
# The server side requires this to be an int, so cast if we can
try:
seed = int(seed) # depends on [control=['try'], data=[]]
except ValueError:
raise ValueError("The 'seed' parameter must be of type int.") # depends on [control=['except'], data=[]]
with cython_context():
proxy_pair = self.__proxy__.random_split(fraction, seed, exact)
return (SFrame(data=[], _proxy=proxy_pair[0]), SFrame(data=[], _proxy=proxy_pair[1])) # depends on [control=['with'], data=[]] |
def decompile(input_, file_, output, format_, jar, limit, decompiler):
"""
Decompile an APK and create Control Flow Graphs.
Example:
\b
$ androguard resources.arsc
"""
from androguard import session
if file_ and input_:
print("Can not give --input and positional argument! "
"Please use only one of them!", file=sys.stderr)
sys.exit(1)
if not input_ and not file_:
print("Give one file to decode!", file=sys.stderr)
sys.exit(1)
if input_:
fname = input_
else:
fname = file_
s = session.Session()
with open(fname, "rb") as fd:
s.add(fname, fd.read())
export_apps_to_format(fname, s, output, limit,
jar, decompiler, format_) | def function[decompile, parameter[input_, file_, output, format_, jar, limit, decompiler]]:
constant[
Decompile an APK and create Control Flow Graphs.
Example:
$ androguard resources.arsc
]
from relative_module[androguard] import module[session]
if <ast.BoolOp object at 0x7da18bc73bb0> begin[:]
call[name[print], parameter[constant[Can not give --input and positional argument! Please use only one of them!]]]
call[name[sys].exit, parameter[constant[1]]]
if <ast.BoolOp object at 0x7da2041d9b70> begin[:]
call[name[print], parameter[constant[Give one file to decode!]]]
call[name[sys].exit, parameter[constant[1]]]
if name[input_] begin[:]
variable[fname] assign[=] name[input_]
variable[s] assign[=] call[name[session].Session, parameter[]]
with call[name[open], parameter[name[fname], constant[rb]]] begin[:]
call[name[s].add, parameter[name[fname], call[name[fd].read, parameter[]]]]
call[name[export_apps_to_format], parameter[name[fname], name[s], name[output], name[limit], name[jar], name[decompiler], name[format_]]] | keyword[def] identifier[decompile] ( identifier[input_] , identifier[file_] , identifier[output] , identifier[format_] , identifier[jar] , identifier[limit] , identifier[decompiler] ):
literal[string]
keyword[from] identifier[androguard] keyword[import] identifier[session]
keyword[if] identifier[file_] keyword[and] identifier[input_] :
identifier[print] ( literal[string]
literal[string] , identifier[file] = identifier[sys] . identifier[stderr] )
identifier[sys] . identifier[exit] ( literal[int] )
keyword[if] keyword[not] identifier[input_] keyword[and] keyword[not] identifier[file_] :
identifier[print] ( literal[string] , identifier[file] = identifier[sys] . identifier[stderr] )
identifier[sys] . identifier[exit] ( literal[int] )
keyword[if] identifier[input_] :
identifier[fname] = identifier[input_]
keyword[else] :
identifier[fname] = identifier[file_]
identifier[s] = identifier[session] . identifier[Session] ()
keyword[with] identifier[open] ( identifier[fname] , literal[string] ) keyword[as] identifier[fd] :
identifier[s] . identifier[add] ( identifier[fname] , identifier[fd] . identifier[read] ())
identifier[export_apps_to_format] ( identifier[fname] , identifier[s] , identifier[output] , identifier[limit] ,
identifier[jar] , identifier[decompiler] , identifier[format_] ) | def decompile(input_, file_, output, format_, jar, limit, decompiler):
"""
Decompile an APK and create Control Flow Graphs.
Example:
\x08
$ androguard resources.arsc
"""
from androguard import session
if file_ and input_:
print('Can not give --input and positional argument! Please use only one of them!', file=sys.stderr)
sys.exit(1) # depends on [control=['if'], data=[]]
if not input_ and (not file_):
print('Give one file to decode!', file=sys.stderr)
sys.exit(1) # depends on [control=['if'], data=[]]
if input_:
fname = input_ # depends on [control=['if'], data=[]]
else:
fname = file_
s = session.Session()
with open(fname, 'rb') as fd:
s.add(fname, fd.read()) # depends on [control=['with'], data=['fd']]
export_apps_to_format(fname, s, output, limit, jar, decompiler, format_) |
def cget(self, item):
"""Return the value of an option"""
return getattr(self, "_" + item) if item in self.options else ttk.Frame.cget(self, item) | def function[cget, parameter[self, item]]:
constant[Return the value of an option]
return[<ast.IfExp object at 0x7da1b2345330>] | keyword[def] identifier[cget] ( identifier[self] , identifier[item] ):
literal[string]
keyword[return] identifier[getattr] ( identifier[self] , literal[string] + identifier[item] ) keyword[if] identifier[item] keyword[in] identifier[self] . identifier[options] keyword[else] identifier[ttk] . identifier[Frame] . identifier[cget] ( identifier[self] , identifier[item] ) | def cget(self, item):
"""Return the value of an option"""
return getattr(self, '_' + item) if item in self.options else ttk.Frame.cget(self, item) |
def to_internal_value(self, data):
"""
Calls super() from DRF, but with an addition.
Creates initial_data and _validated_data for nested
EmbeddedDocumentSerializers, so that recursive_save could make
use of them.
If meets any arbitrary data, not expected by fields,
just silently drops them from validated_data.
"""
# for EmbeddedDocumentSerializers create initial data
# so that _get_dynamic_data could use them
for field in self._writable_fields:
if isinstance(field, EmbeddedDocumentSerializer) and field.field_name in data:
field.initial_data = data[field.field_name]
ret = super(DocumentSerializer, self).to_internal_value(data)
# for EmbeddedDocumentSerializers create _validated_data
# so that create()/update() could use them
for field in self._writable_fields:
if isinstance(field, EmbeddedDocumentSerializer) and field.field_name in ret:
field._validated_data = ret[field.field_name]
return ret | def function[to_internal_value, parameter[self, data]]:
constant[
Calls super() from DRF, but with an addition.
Creates initial_data and _validated_data for nested
EmbeddedDocumentSerializers, so that recursive_save could make
use of them.
If meets any arbitrary data, not expected by fields,
just silently drops them from validated_data.
]
for taget[name[field]] in starred[name[self]._writable_fields] begin[:]
if <ast.BoolOp object at 0x7da20e9b3070> begin[:]
name[field].initial_data assign[=] call[name[data]][name[field].field_name]
variable[ret] assign[=] call[call[name[super], parameter[name[DocumentSerializer], name[self]]].to_internal_value, parameter[name[data]]]
for taget[name[field]] in starred[name[self]._writable_fields] begin[:]
if <ast.BoolOp object at 0x7da20e9b33a0> begin[:]
name[field]._validated_data assign[=] call[name[ret]][name[field].field_name]
return[name[ret]] | keyword[def] identifier[to_internal_value] ( identifier[self] , identifier[data] ):
literal[string]
keyword[for] identifier[field] keyword[in] identifier[self] . identifier[_writable_fields] :
keyword[if] identifier[isinstance] ( identifier[field] , identifier[EmbeddedDocumentSerializer] ) keyword[and] identifier[field] . identifier[field_name] keyword[in] identifier[data] :
identifier[field] . identifier[initial_data] = identifier[data] [ identifier[field] . identifier[field_name] ]
identifier[ret] = identifier[super] ( identifier[DocumentSerializer] , identifier[self] ). identifier[to_internal_value] ( identifier[data] )
keyword[for] identifier[field] keyword[in] identifier[self] . identifier[_writable_fields] :
keyword[if] identifier[isinstance] ( identifier[field] , identifier[EmbeddedDocumentSerializer] ) keyword[and] identifier[field] . identifier[field_name] keyword[in] identifier[ret] :
identifier[field] . identifier[_validated_data] = identifier[ret] [ identifier[field] . identifier[field_name] ]
keyword[return] identifier[ret] | def to_internal_value(self, data):
"""
Calls super() from DRF, but with an addition.
Creates initial_data and _validated_data for nested
EmbeddedDocumentSerializers, so that recursive_save could make
use of them.
If meets any arbitrary data, not expected by fields,
just silently drops them from validated_data.
"""
# for EmbeddedDocumentSerializers create initial data
# so that _get_dynamic_data could use them
for field in self._writable_fields:
if isinstance(field, EmbeddedDocumentSerializer) and field.field_name in data:
field.initial_data = data[field.field_name] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['field']]
ret = super(DocumentSerializer, self).to_internal_value(data)
# for EmbeddedDocumentSerializers create _validated_data
# so that create()/update() could use them
for field in self._writable_fields:
if isinstance(field, EmbeddedDocumentSerializer) and field.field_name in ret:
field._validated_data = ret[field.field_name] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['field']]
return ret |
def gabc(elem, doc):
"""Handle gabc file inclusion and gabc code block."""
if type(elem) == Code and "gabc" in elem.classes:
if doc.format == "latex":
if elem.identifier == "":
label = ""
else:
label = '\\label{' + elem.identifier + '}'
return latex(
"\n\\smallskip\n{%\n" +
latexsnippet('\\gregorioscore{' + elem.text + '}', elem.attributes) +
"%\n}" +
label
)
else:
infile = elem.text + (
'.gabc' if '.gabc' not in elem.text else ''
)
with open(infile, 'r') as doc:
code = doc.read().split('%%\n')[1]
return Image(png(
elem.text,
latexsnippet('\\gregorioscore', elem.attributes)
))
elif type(elem) == CodeBlock and "gabc" in elem.classes:
if doc.format == "latex":
if elem.identifier == "":
label = ""
else:
label = '\\label{' + elem.identifier + '}'
return latexblock(
"\n\\smallskip\n{%\n" +
latexsnippet('\\gabcsnippet{' + elem.text + '}', elem.attributes) +
"%\n}" +
label
)
else:
return Para(Image(url=png(elem.text, latexsnippet('\\gabcsnippet', elem.attributes)))) | def function[gabc, parameter[elem, doc]]:
constant[Handle gabc file inclusion and gabc code block.]
if <ast.BoolOp object at 0x7da18bc73c40> begin[:]
if compare[name[doc].format equal[==] constant[latex]] begin[:]
if compare[name[elem].identifier equal[==] constant[]] begin[:]
variable[label] assign[=] constant[]
return[call[name[latex], parameter[binary_operation[binary_operation[binary_operation[constant[
\smallskip
{%
] + call[name[latexsnippet], parameter[binary_operation[binary_operation[constant[\gregorioscore{] + name[elem].text] + constant[}]], name[elem].attributes]]] + constant[%
}]] + name[label]]]]] | keyword[def] identifier[gabc] ( identifier[elem] , identifier[doc] ):
literal[string]
keyword[if] identifier[type] ( identifier[elem] )== identifier[Code] keyword[and] literal[string] keyword[in] identifier[elem] . identifier[classes] :
keyword[if] identifier[doc] . identifier[format] == literal[string] :
keyword[if] identifier[elem] . identifier[identifier] == literal[string] :
identifier[label] = literal[string]
keyword[else] :
identifier[label] = literal[string] + identifier[elem] . identifier[identifier] + literal[string]
keyword[return] identifier[latex] (
literal[string] +
identifier[latexsnippet] ( literal[string] + identifier[elem] . identifier[text] + literal[string] , identifier[elem] . identifier[attributes] )+
literal[string] +
identifier[label]
)
keyword[else] :
identifier[infile] = identifier[elem] . identifier[text] +(
literal[string] keyword[if] literal[string] keyword[not] keyword[in] identifier[elem] . identifier[text] keyword[else] literal[string]
)
keyword[with] identifier[open] ( identifier[infile] , literal[string] ) keyword[as] identifier[doc] :
identifier[code] = identifier[doc] . identifier[read] (). identifier[split] ( literal[string] )[ literal[int] ]
keyword[return] identifier[Image] ( identifier[png] (
identifier[elem] . identifier[text] ,
identifier[latexsnippet] ( literal[string] , identifier[elem] . identifier[attributes] )
))
keyword[elif] identifier[type] ( identifier[elem] )== identifier[CodeBlock] keyword[and] literal[string] keyword[in] identifier[elem] . identifier[classes] :
keyword[if] identifier[doc] . identifier[format] == literal[string] :
keyword[if] identifier[elem] . identifier[identifier] == literal[string] :
identifier[label] = literal[string]
keyword[else] :
identifier[label] = literal[string] + identifier[elem] . identifier[identifier] + literal[string]
keyword[return] identifier[latexblock] (
literal[string] +
identifier[latexsnippet] ( literal[string] + identifier[elem] . identifier[text] + literal[string] , identifier[elem] . identifier[attributes] )+
literal[string] +
identifier[label]
)
keyword[else] :
keyword[return] identifier[Para] ( identifier[Image] ( identifier[url] = identifier[png] ( identifier[elem] . identifier[text] , identifier[latexsnippet] ( literal[string] , identifier[elem] . identifier[attributes] )))) | def gabc(elem, doc):
"""Handle gabc file inclusion and gabc code block."""
if type(elem) == Code and 'gabc' in elem.classes:
if doc.format == 'latex':
if elem.identifier == '':
label = '' # depends on [control=['if'], data=[]]
else:
label = '\\label{' + elem.identifier + '}'
return latex('\n\\smallskip\n{%\n' + latexsnippet('\\gregorioscore{' + elem.text + '}', elem.attributes) + '%\n}' + label) # depends on [control=['if'], data=[]]
else:
infile = elem.text + ('.gabc' if '.gabc' not in elem.text else '')
with open(infile, 'r') as doc:
code = doc.read().split('%%\n')[1] # depends on [control=['with'], data=['doc']]
return Image(png(elem.text, latexsnippet('\\gregorioscore', elem.attributes))) # depends on [control=['if'], data=[]]
elif type(elem) == CodeBlock and 'gabc' in elem.classes:
if doc.format == 'latex':
if elem.identifier == '':
label = '' # depends on [control=['if'], data=[]]
else:
label = '\\label{' + elem.identifier + '}'
return latexblock('\n\\smallskip\n{%\n' + latexsnippet('\\gabcsnippet{' + elem.text + '}', elem.attributes) + '%\n}' + label) # depends on [control=['if'], data=[]]
else:
return Para(Image(url=png(elem.text, latexsnippet('\\gabcsnippet', elem.attributes)))) # depends on [control=['if'], data=[]] |
def size(array):
"""
Return a human-readable description of the number of bytes required
to store the data of the given array.
For example::
>>> array.nbytes
14000000
>> biggus.size(array)
'13.35 MiB'
Parameters
----------
array : array-like object
The array object must provide an `nbytes` property.
Returns
-------
out : str
The Array representing the requested mean.
"""
nbytes = array.nbytes
if nbytes < (1 << 10):
size = '{} B'.format(nbytes)
elif nbytes < (1 << 20):
size = '{:.02f} KiB'.format(nbytes / (1 << 10))
elif nbytes < (1 << 30):
size = '{:.02f} MiB'.format(nbytes / (1 << 20))
elif nbytes < (1 << 40):
size = '{:.02f} GiB'.format(nbytes / (1 << 30))
else:
size = '{:.02f} TiB'.format(nbytes / (1 << 40))
return size | def function[size, parameter[array]]:
constant[
Return a human-readable description of the number of bytes required
to store the data of the given array.
For example::
>>> array.nbytes
14000000
>> biggus.size(array)
'13.35 MiB'
Parameters
----------
array : array-like object
The array object must provide an `nbytes` property.
Returns
-------
out : str
The Array representing the requested mean.
]
variable[nbytes] assign[=] name[array].nbytes
if compare[name[nbytes] less[<] binary_operation[constant[1] <ast.LShift object at 0x7da2590d69e0> constant[10]]] begin[:]
variable[size] assign[=] call[constant[{} B].format, parameter[name[nbytes]]]
return[name[size]] | keyword[def] identifier[size] ( identifier[array] ):
literal[string]
identifier[nbytes] = identifier[array] . identifier[nbytes]
keyword[if] identifier[nbytes] <( literal[int] << literal[int] ):
identifier[size] = literal[string] . identifier[format] ( identifier[nbytes] )
keyword[elif] identifier[nbytes] <( literal[int] << literal[int] ):
identifier[size] = literal[string] . identifier[format] ( identifier[nbytes] /( literal[int] << literal[int] ))
keyword[elif] identifier[nbytes] <( literal[int] << literal[int] ):
identifier[size] = literal[string] . identifier[format] ( identifier[nbytes] /( literal[int] << literal[int] ))
keyword[elif] identifier[nbytes] <( literal[int] << literal[int] ):
identifier[size] = literal[string] . identifier[format] ( identifier[nbytes] /( literal[int] << literal[int] ))
keyword[else] :
identifier[size] = literal[string] . identifier[format] ( identifier[nbytes] /( literal[int] << literal[int] ))
keyword[return] identifier[size] | def size(array):
"""
Return a human-readable description of the number of bytes required
to store the data of the given array.
For example::
>>> array.nbytes
14000000
>> biggus.size(array)
'13.35 MiB'
Parameters
----------
array : array-like object
The array object must provide an `nbytes` property.
Returns
-------
out : str
The Array representing the requested mean.
"""
nbytes = array.nbytes
if nbytes < 1 << 10:
size = '{} B'.format(nbytes) # depends on [control=['if'], data=['nbytes']]
elif nbytes < 1 << 20:
size = '{:.02f} KiB'.format(nbytes / (1 << 10)) # depends on [control=['if'], data=['nbytes']]
elif nbytes < 1 << 30:
size = '{:.02f} MiB'.format(nbytes / (1 << 20)) # depends on [control=['if'], data=['nbytes']]
elif nbytes < 1 << 40:
size = '{:.02f} GiB'.format(nbytes / (1 << 30)) # depends on [control=['if'], data=['nbytes']]
else:
size = '{:.02f} TiB'.format(nbytes / (1 << 40))
return size |
def _get_aws_variables(self):
"""
Returns the AWS specific environment variables that should be available in the Lambda runtime.
They are prefixed it "AWS_*".
:return dict: Name and value of AWS environment variable
"""
result = {
# Variable that says this function is running in Local Lambda
"AWS_SAM_LOCAL": "true",
# Function configuration
"AWS_LAMBDA_FUNCTION_MEMORY_SIZE": str(self.memory),
"AWS_LAMBDA_FUNCTION_TIMEOUT": str(self.timeout),
"AWS_LAMBDA_FUNCTION_HANDLER": str(self._function["handler"]),
# AWS Credentials - Use the input credentials or use the defaults
"AWS_REGION": self.aws_creds.get("region", self._DEFAULT_AWS_CREDS["region"]),
"AWS_DEFAULT_REGION": self.aws_creds.get("region", self._DEFAULT_AWS_CREDS["region"]),
"AWS_ACCESS_KEY_ID": self.aws_creds.get("key", self._DEFAULT_AWS_CREDS["key"]),
"AWS_SECRET_ACCESS_KEY": self.aws_creds.get("secret", self._DEFAULT_AWS_CREDS["secret"])
# Additional variables we don't fill in
# "AWS_ACCOUNT_ID="
# "AWS_LAMBDA_EVENT_BODY=",
# "AWS_LAMBDA_FUNCTION_NAME=",
# "AWS_LAMBDA_FUNCTION_VERSION=",
}
# Session Token should be added **only** if the input creds have a token and the value is not empty.
if self.aws_creds.get("sessiontoken"):
result["AWS_SESSION_TOKEN"] = self.aws_creds.get("sessiontoken")
return result | def function[_get_aws_variables, parameter[self]]:
constant[
Returns the AWS specific environment variables that should be available in the Lambda runtime.
They are prefixed it "AWS_*".
:return dict: Name and value of AWS environment variable
]
variable[result] assign[=] dictionary[[<ast.Constant object at 0x7da1b2095c00>, <ast.Constant object at 0x7da1b2095f30>, <ast.Constant object at 0x7da1b2095d80>, <ast.Constant object at 0x7da1b20953f0>, <ast.Constant object at 0x7da1b2095f90>, <ast.Constant object at 0x7da1b2094610>, <ast.Constant object at 0x7da1b2095720>, <ast.Constant object at 0x7da1b2095ff0>], [<ast.Constant object at 0x7da1b2095000>, <ast.Call object at 0x7da1b20940d0>, <ast.Call object at 0x7da1b2095ea0>, <ast.Call object at 0x7da1b2094580>, <ast.Call object at 0x7da1b2094cd0>, <ast.Call object at 0x7da1b2095ab0>, <ast.Call object at 0x7da1b20945e0>, <ast.Call object at 0x7da1b2095900>]]
if call[name[self].aws_creds.get, parameter[constant[sessiontoken]]] begin[:]
call[name[result]][constant[AWS_SESSION_TOKEN]] assign[=] call[name[self].aws_creds.get, parameter[constant[sessiontoken]]]
return[name[result]] | keyword[def] identifier[_get_aws_variables] ( identifier[self] ):
literal[string]
identifier[result] ={
literal[string] : literal[string] ,
literal[string] : identifier[str] ( identifier[self] . identifier[memory] ),
literal[string] : identifier[str] ( identifier[self] . identifier[timeout] ),
literal[string] : identifier[str] ( identifier[self] . identifier[_function] [ literal[string] ]),
literal[string] : identifier[self] . identifier[aws_creds] . identifier[get] ( literal[string] , identifier[self] . identifier[_DEFAULT_AWS_CREDS] [ literal[string] ]),
literal[string] : identifier[self] . identifier[aws_creds] . identifier[get] ( literal[string] , identifier[self] . identifier[_DEFAULT_AWS_CREDS] [ literal[string] ]),
literal[string] : identifier[self] . identifier[aws_creds] . identifier[get] ( literal[string] , identifier[self] . identifier[_DEFAULT_AWS_CREDS] [ literal[string] ]),
literal[string] : identifier[self] . identifier[aws_creds] . identifier[get] ( literal[string] , identifier[self] . identifier[_DEFAULT_AWS_CREDS] [ literal[string] ])
}
keyword[if] identifier[self] . identifier[aws_creds] . identifier[get] ( literal[string] ):
identifier[result] [ literal[string] ]= identifier[self] . identifier[aws_creds] . identifier[get] ( literal[string] )
keyword[return] identifier[result] | def _get_aws_variables(self):
"""
Returns the AWS specific environment variables that should be available in the Lambda runtime.
They are prefixed it "AWS_*".
:return dict: Name and value of AWS environment variable
"""
# Variable that says this function is running in Local Lambda
# Function configuration
# AWS Credentials - Use the input credentials or use the defaults
# Additional variables we don't fill in
# "AWS_ACCOUNT_ID="
# "AWS_LAMBDA_EVENT_BODY=",
# "AWS_LAMBDA_FUNCTION_NAME=",
# "AWS_LAMBDA_FUNCTION_VERSION=",
result = {'AWS_SAM_LOCAL': 'true', 'AWS_LAMBDA_FUNCTION_MEMORY_SIZE': str(self.memory), 'AWS_LAMBDA_FUNCTION_TIMEOUT': str(self.timeout), 'AWS_LAMBDA_FUNCTION_HANDLER': str(self._function['handler']), 'AWS_REGION': self.aws_creds.get('region', self._DEFAULT_AWS_CREDS['region']), 'AWS_DEFAULT_REGION': self.aws_creds.get('region', self._DEFAULT_AWS_CREDS['region']), 'AWS_ACCESS_KEY_ID': self.aws_creds.get('key', self._DEFAULT_AWS_CREDS['key']), 'AWS_SECRET_ACCESS_KEY': self.aws_creds.get('secret', self._DEFAULT_AWS_CREDS['secret'])}
# Session Token should be added **only** if the input creds have a token and the value is not empty.
if self.aws_creds.get('sessiontoken'):
result['AWS_SESSION_TOKEN'] = self.aws_creds.get('sessiontoken') # depends on [control=['if'], data=[]]
return result |
def get_command(namespace):
"""
Get the pylint command for these arguments.
:param `Namespace` namespace: the namespace
"""
cmd = ["pylint", namespace.package] + arg_map[namespace.package]
if namespace.ignore:
cmd.append("--ignore=%s" % namespace.ignore)
return cmd | def function[get_command, parameter[namespace]]:
constant[
Get the pylint command for these arguments.
:param `Namespace` namespace: the namespace
]
variable[cmd] assign[=] binary_operation[list[[<ast.Constant object at 0x7da18f722bc0>, <ast.Attribute object at 0x7da18f722950>]] + call[name[arg_map]][name[namespace].package]]
if name[namespace].ignore begin[:]
call[name[cmd].append, parameter[binary_operation[constant[--ignore=%s] <ast.Mod object at 0x7da2590d6920> name[namespace].ignore]]]
return[name[cmd]] | keyword[def] identifier[get_command] ( identifier[namespace] ):
literal[string]
identifier[cmd] =[ literal[string] , identifier[namespace] . identifier[package] ]+ identifier[arg_map] [ identifier[namespace] . identifier[package] ]
keyword[if] identifier[namespace] . identifier[ignore] :
identifier[cmd] . identifier[append] ( literal[string] % identifier[namespace] . identifier[ignore] )
keyword[return] identifier[cmd] | def get_command(namespace):
"""
Get the pylint command for these arguments.
:param `Namespace` namespace: the namespace
"""
cmd = ['pylint', namespace.package] + arg_map[namespace.package]
if namespace.ignore:
cmd.append('--ignore=%s' % namespace.ignore) # depends on [control=['if'], data=[]]
return cmd |
def update_ports_tree(ports_tree):
'''
Updates the ports tree, either the default or the `ports_tree` specified
CLI Example:
.. code-block:: bash
salt '*' poudriere.update_ports_tree staging
'''
_check_config_exists()
if ports_tree:
cmd = 'poudriere ports -u -p {0}'.format(ports_tree)
else:
cmd = 'poudriere ports -u'
ret = __salt__['cmd.run'](cmd)
return ret | def function[update_ports_tree, parameter[ports_tree]]:
constant[
Updates the ports tree, either the default or the `ports_tree` specified
CLI Example:
.. code-block:: bash
salt '*' poudriere.update_ports_tree staging
]
call[name[_check_config_exists], parameter[]]
if name[ports_tree] begin[:]
variable[cmd] assign[=] call[constant[poudriere ports -u -p {0}].format, parameter[name[ports_tree]]]
variable[ret] assign[=] call[call[name[__salt__]][constant[cmd.run]], parameter[name[cmd]]]
return[name[ret]] | keyword[def] identifier[update_ports_tree] ( identifier[ports_tree] ):
literal[string]
identifier[_check_config_exists] ()
keyword[if] identifier[ports_tree] :
identifier[cmd] = literal[string] . identifier[format] ( identifier[ports_tree] )
keyword[else] :
identifier[cmd] = literal[string]
identifier[ret] = identifier[__salt__] [ literal[string] ]( identifier[cmd] )
keyword[return] identifier[ret] | def update_ports_tree(ports_tree):
"""
Updates the ports tree, either the default or the `ports_tree` specified
CLI Example:
.. code-block:: bash
salt '*' poudriere.update_ports_tree staging
"""
_check_config_exists()
if ports_tree:
cmd = 'poudriere ports -u -p {0}'.format(ports_tree) # depends on [control=['if'], data=[]]
else:
cmd = 'poudriere ports -u'
ret = __salt__['cmd.run'](cmd)
return ret |
def fmt_subst(regex, subst):
"""Replace regex with string."""
return lambda text: re.sub(regex, subst, text) if text else text | def function[fmt_subst, parameter[regex, subst]]:
constant[Replace regex with string.]
return[<ast.Lambda object at 0x7da1b13e8b20>] | keyword[def] identifier[fmt_subst] ( identifier[regex] , identifier[subst] ):
literal[string]
keyword[return] keyword[lambda] identifier[text] : identifier[re] . identifier[sub] ( identifier[regex] , identifier[subst] , identifier[text] ) keyword[if] identifier[text] keyword[else] identifier[text] | def fmt_subst(regex, subst):
"""Replace regex with string."""
return lambda text: re.sub(regex, subst, text) if text else text |
def MapFile(self, key_path_prefix, registry_file):
"""Maps the Windows Registry file to a specific key path prefix.
Args:
key_path_prefix (str): key path prefix.
registry_file (WinRegistryFile): Windows Registry file.
"""
self._registry_files[key_path_prefix.upper()] = registry_file
registry_file.SetKeyPathPrefix(key_path_prefix) | def function[MapFile, parameter[self, key_path_prefix, registry_file]]:
constant[Maps the Windows Registry file to a specific key path prefix.
Args:
key_path_prefix (str): key path prefix.
registry_file (WinRegistryFile): Windows Registry file.
]
call[name[self]._registry_files][call[name[key_path_prefix].upper, parameter[]]] assign[=] name[registry_file]
call[name[registry_file].SetKeyPathPrefix, parameter[name[key_path_prefix]]] | keyword[def] identifier[MapFile] ( identifier[self] , identifier[key_path_prefix] , identifier[registry_file] ):
literal[string]
identifier[self] . identifier[_registry_files] [ identifier[key_path_prefix] . identifier[upper] ()]= identifier[registry_file]
identifier[registry_file] . identifier[SetKeyPathPrefix] ( identifier[key_path_prefix] ) | def MapFile(self, key_path_prefix, registry_file):
"""Maps the Windows Registry file to a specific key path prefix.
Args:
key_path_prefix (str): key path prefix.
registry_file (WinRegistryFile): Windows Registry file.
"""
self._registry_files[key_path_prefix.upper()] = registry_file
registry_file.SetKeyPathPrefix(key_path_prefix) |
def modflow_hob_to_instruction_file(hob_file):
"""write an instruction file for a modflow head observation file
Parameters
----------
hob_file : str
modflow hob file
Returns
-------
df : pandas.DataFrame
pandas DataFrame with control file observation information
"""
hob_df = pd.read_csv(hob_file,delim_whitespace=True,skiprows=1,
header=None,names=["simval","obsval","obsnme"])
hob_df.loc[:,"obsnme"] = hob_df.obsnme.apply(str.lower)
hob_df.loc[:,"ins_line"] = hob_df.obsnme.apply(lambda x:"l1 !{0:s}!".format(x))
hob_df.loc[0,"ins_line"] = hob_df.loc[0,"ins_line"].replace('l1','l2')
ins_file = hob_file + ".ins"
f_ins = open(ins_file, 'w')
f_ins.write("pif ~\n")
f_ins.write(hob_df.loc[:,["ins_line"]].to_string(col_space=0,
columns=["ins_line"],
header=False,
index=False,
formatters=[SFMT]) + '\n')
hob_df.loc[:,"weight"] = 1.0
hob_df.loc[:,"obgnme"] = "obgnme"
f_ins.close()
return hob_df | def function[modflow_hob_to_instruction_file, parameter[hob_file]]:
constant[write an instruction file for a modflow head observation file
Parameters
----------
hob_file : str
modflow hob file
Returns
-------
df : pandas.DataFrame
pandas DataFrame with control file observation information
]
variable[hob_df] assign[=] call[name[pd].read_csv, parameter[name[hob_file]]]
call[name[hob_df].loc][tuple[[<ast.Slice object at 0x7da20c76c2e0>, <ast.Constant object at 0x7da20c76df00>]]] assign[=] call[name[hob_df].obsnme.apply, parameter[name[str].lower]]
call[name[hob_df].loc][tuple[[<ast.Slice object at 0x7da1b2406dd0>, <ast.Constant object at 0x7da1b2406fb0>]]] assign[=] call[name[hob_df].obsnme.apply, parameter[<ast.Lambda object at 0x7da1b2406350>]]
call[name[hob_df].loc][tuple[[<ast.Constant object at 0x7da1b2407f10>, <ast.Constant object at 0x7da1b2404400>]]] assign[=] call[call[name[hob_df].loc][tuple[[<ast.Constant object at 0x7da1b2407c70>, <ast.Constant object at 0x7da1b2407400>]]].replace, parameter[constant[l1], constant[l2]]]
variable[ins_file] assign[=] binary_operation[name[hob_file] + constant[.ins]]
variable[f_ins] assign[=] call[name[open], parameter[name[ins_file], constant[w]]]
call[name[f_ins].write, parameter[constant[pif ~
]]]
call[name[f_ins].write, parameter[binary_operation[call[call[name[hob_df].loc][tuple[[<ast.Slice object at 0x7da1b2405120>, <ast.List object at 0x7da1b2406a10>]]].to_string, parameter[]] + constant[
]]]]
call[name[hob_df].loc][tuple[[<ast.Slice object at 0x7da1b2407e20>, <ast.Constant object at 0x7da1b2404430>]]] assign[=] constant[1.0]
call[name[hob_df].loc][tuple[[<ast.Slice object at 0x7da1b246c220>, <ast.Constant object at 0x7da1b246de10>]]] assign[=] constant[obgnme]
call[name[f_ins].close, parameter[]]
return[name[hob_df]] | keyword[def] identifier[modflow_hob_to_instruction_file] ( identifier[hob_file] ):
literal[string]
identifier[hob_df] = identifier[pd] . identifier[read_csv] ( identifier[hob_file] , identifier[delim_whitespace] = keyword[True] , identifier[skiprows] = literal[int] ,
identifier[header] = keyword[None] , identifier[names] =[ literal[string] , literal[string] , literal[string] ])
identifier[hob_df] . identifier[loc] [:, literal[string] ]= identifier[hob_df] . identifier[obsnme] . identifier[apply] ( identifier[str] . identifier[lower] )
identifier[hob_df] . identifier[loc] [:, literal[string] ]= identifier[hob_df] . identifier[obsnme] . identifier[apply] ( keyword[lambda] identifier[x] : literal[string] . identifier[format] ( identifier[x] ))
identifier[hob_df] . identifier[loc] [ literal[int] , literal[string] ]= identifier[hob_df] . identifier[loc] [ literal[int] , literal[string] ]. identifier[replace] ( literal[string] , literal[string] )
identifier[ins_file] = identifier[hob_file] + literal[string]
identifier[f_ins] = identifier[open] ( identifier[ins_file] , literal[string] )
identifier[f_ins] . identifier[write] ( literal[string] )
identifier[f_ins] . identifier[write] ( identifier[hob_df] . identifier[loc] [:,[ literal[string] ]]. identifier[to_string] ( identifier[col_space] = literal[int] ,
identifier[columns] =[ literal[string] ],
identifier[header] = keyword[False] ,
identifier[index] = keyword[False] ,
identifier[formatters] =[ identifier[SFMT] ])+ literal[string] )
identifier[hob_df] . identifier[loc] [:, literal[string] ]= literal[int]
identifier[hob_df] . identifier[loc] [:, literal[string] ]= literal[string]
identifier[f_ins] . identifier[close] ()
keyword[return] identifier[hob_df] | def modflow_hob_to_instruction_file(hob_file):
"""write an instruction file for a modflow head observation file
Parameters
----------
hob_file : str
modflow hob file
Returns
-------
df : pandas.DataFrame
pandas DataFrame with control file observation information
"""
hob_df = pd.read_csv(hob_file, delim_whitespace=True, skiprows=1, header=None, names=['simval', 'obsval', 'obsnme'])
hob_df.loc[:, 'obsnme'] = hob_df.obsnme.apply(str.lower)
hob_df.loc[:, 'ins_line'] = hob_df.obsnme.apply(lambda x: 'l1 !{0:s}!'.format(x))
hob_df.loc[0, 'ins_line'] = hob_df.loc[0, 'ins_line'].replace('l1', 'l2')
ins_file = hob_file + '.ins'
f_ins = open(ins_file, 'w')
f_ins.write('pif ~\n')
f_ins.write(hob_df.loc[:, ['ins_line']].to_string(col_space=0, columns=['ins_line'], header=False, index=False, formatters=[SFMT]) + '\n')
hob_df.loc[:, 'weight'] = 1.0
hob_df.loc[:, 'obgnme'] = 'obgnme'
f_ins.close()
return hob_df |
def registry_path(cls, project, location, registry):
"""Return a fully-qualified registry string."""
return google.api_core.path_template.expand(
"projects/{project}/locations/{location}/registries/{registry}",
project=project,
location=location,
registry=registry,
) | def function[registry_path, parameter[cls, project, location, registry]]:
constant[Return a fully-qualified registry string.]
return[call[name[google].api_core.path_template.expand, parameter[constant[projects/{project}/locations/{location}/registries/{registry}]]]] | keyword[def] identifier[registry_path] ( identifier[cls] , identifier[project] , identifier[location] , identifier[registry] ):
literal[string]
keyword[return] identifier[google] . identifier[api_core] . identifier[path_template] . identifier[expand] (
literal[string] ,
identifier[project] = identifier[project] ,
identifier[location] = identifier[location] ,
identifier[registry] = identifier[registry] ,
) | def registry_path(cls, project, location, registry):
"""Return a fully-qualified registry string."""
return google.api_core.path_template.expand('projects/{project}/locations/{location}/registries/{registry}', project=project, location=location, registry=registry) |
def prepare_outdir(outdir):
"""
Creates the output directory if not existing.
If outdir is None or if no output_files are provided nothing happens.
:param outdir: The output directory to create.
"""
if outdir:
outdir = os.path.expanduser(outdir)
if not os.path.isdir(outdir):
try:
os.makedirs(outdir)
except os.error as e:
raise JobExecutionError('Failed to create outdir "{}".\n{}'.format(outdir, str(e))) | def function[prepare_outdir, parameter[outdir]]:
constant[
Creates the output directory if not existing.
If outdir is None or if no output_files are provided nothing happens.
:param outdir: The output directory to create.
]
if name[outdir] begin[:]
variable[outdir] assign[=] call[name[os].path.expanduser, parameter[name[outdir]]]
if <ast.UnaryOp object at 0x7da1b10f6710> begin[:]
<ast.Try object at 0x7da1b10f68f0> | keyword[def] identifier[prepare_outdir] ( identifier[outdir] ):
literal[string]
keyword[if] identifier[outdir] :
identifier[outdir] = identifier[os] . identifier[path] . identifier[expanduser] ( identifier[outdir] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[outdir] ):
keyword[try] :
identifier[os] . identifier[makedirs] ( identifier[outdir] )
keyword[except] identifier[os] . identifier[error] keyword[as] identifier[e] :
keyword[raise] identifier[JobExecutionError] ( literal[string] . identifier[format] ( identifier[outdir] , identifier[str] ( identifier[e] ))) | def prepare_outdir(outdir):
"""
Creates the output directory if not existing.
If outdir is None or if no output_files are provided nothing happens.
:param outdir: The output directory to create.
"""
if outdir:
outdir = os.path.expanduser(outdir)
if not os.path.isdir(outdir):
try:
os.makedirs(outdir) # depends on [control=['try'], data=[]]
except os.error as e:
raise JobExecutionError('Failed to create outdir "{}".\n{}'.format(outdir, str(e))) # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def encipher(self,string):
"""Encipher string using Porta cipher according to initialised key. Punctuation and whitespace
are removed from the input.
Example::
ciphertext = Porta('HELLO').encipher(plaintext)
:param string: The string to encipher.
:returns: The enciphered string.
"""
string = self.remove_punctuation(string)
ret = ''
for (i,c) in enumerate(string):
i = i%len(self.key)
if self.key[i] in 'AB': ret += 'NOPQRSTUVWXYZABCDEFGHIJKLM'[self.a2i(c)]
elif self.key[i] in 'YZ': ret += 'ZNOPQRSTUVWXYBCDEFGHIJKLMA'[self.a2i(c)]
elif self.key[i] in 'WX': ret += 'YZNOPQRSTUVWXCDEFGHIJKLMAB'[self.a2i(c)]
elif self.key[i] in 'UV': ret += 'XYZNOPQRSTUVWDEFGHIJKLMABC'[self.a2i(c)]
elif self.key[i] in 'ST': ret += 'WXYZNOPQRSTUVEFGHIJKLMABCD'[self.a2i(c)]
elif self.key[i] in 'QR': ret += 'VWXYZNOPQRSTUFGHIJKLMABCDE'[self.a2i(c)]
elif self.key[i] in 'OP': ret += 'UVWXYZNOPQRSTGHIJKLMABCDEF'[self.a2i(c)]
elif self.key[i] in 'MN': ret += 'TUVWXYZNOPQRSHIJKLMABCDEFG'[self.a2i(c)]
elif self.key[i] in 'KL': ret += 'STUVWXYZNOPQRIJKLMABCDEFGH'[self.a2i(c)]
elif self.key[i] in 'IJ': ret += 'RSTUVWXYZNOPQJKLMABCDEFGHI'[self.a2i(c)]
elif self.key[i] in 'GH': ret += 'QRSTUVWXYZNOPKLMABCDEFGHIJ'[self.a2i(c)]
elif self.key[i] in 'EF': ret += 'PQRSTUVWXYZNOLMABCDEFGHIJK'[self.a2i(c)]
elif self.key[i] in 'CD': ret += 'OPQRSTUVWXYZNMABCDEFGHIJKL'[self.a2i(c)]
return ret | def function[encipher, parameter[self, string]]:
constant[Encipher string using Porta cipher according to initialised key. Punctuation and whitespace
are removed from the input.
Example::
ciphertext = Porta('HELLO').encipher(plaintext)
:param string: The string to encipher.
:returns: The enciphered string.
]
variable[string] assign[=] call[name[self].remove_punctuation, parameter[name[string]]]
variable[ret] assign[=] constant[]
for taget[tuple[[<ast.Name object at 0x7da1b2344250>, <ast.Name object at 0x7da1b2346140>]]] in starred[call[name[enumerate], parameter[name[string]]]] begin[:]
variable[i] assign[=] binary_operation[name[i] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[self].key]]]
if compare[call[name[self].key][name[i]] in constant[AB]] begin[:]
<ast.AugAssign object at 0x7da1b2345f30>
return[name[ret]] | keyword[def] identifier[encipher] ( identifier[self] , identifier[string] ):
literal[string]
identifier[string] = identifier[self] . identifier[remove_punctuation] ( identifier[string] )
identifier[ret] = literal[string]
keyword[for] ( identifier[i] , identifier[c] ) keyword[in] identifier[enumerate] ( identifier[string] ):
identifier[i] = identifier[i] % identifier[len] ( identifier[self] . identifier[key] )
keyword[if] identifier[self] . identifier[key] [ identifier[i] ] keyword[in] literal[string] : identifier[ret] += literal[string] [ identifier[self] . identifier[a2i] ( identifier[c] )]
keyword[elif] identifier[self] . identifier[key] [ identifier[i] ] keyword[in] literal[string] : identifier[ret] += literal[string] [ identifier[self] . identifier[a2i] ( identifier[c] )]
keyword[elif] identifier[self] . identifier[key] [ identifier[i] ] keyword[in] literal[string] : identifier[ret] += literal[string] [ identifier[self] . identifier[a2i] ( identifier[c] )]
keyword[elif] identifier[self] . identifier[key] [ identifier[i] ] keyword[in] literal[string] : identifier[ret] += literal[string] [ identifier[self] . identifier[a2i] ( identifier[c] )]
keyword[elif] identifier[self] . identifier[key] [ identifier[i] ] keyword[in] literal[string] : identifier[ret] += literal[string] [ identifier[self] . identifier[a2i] ( identifier[c] )]
keyword[elif] identifier[self] . identifier[key] [ identifier[i] ] keyword[in] literal[string] : identifier[ret] += literal[string] [ identifier[self] . identifier[a2i] ( identifier[c] )]
keyword[elif] identifier[self] . identifier[key] [ identifier[i] ] keyword[in] literal[string] : identifier[ret] += literal[string] [ identifier[self] . identifier[a2i] ( identifier[c] )]
keyword[elif] identifier[self] . identifier[key] [ identifier[i] ] keyword[in] literal[string] : identifier[ret] += literal[string] [ identifier[self] . identifier[a2i] ( identifier[c] )]
keyword[elif] identifier[self] . identifier[key] [ identifier[i] ] keyword[in] literal[string] : identifier[ret] += literal[string] [ identifier[self] . identifier[a2i] ( identifier[c] )]
keyword[elif] identifier[self] . identifier[key] [ identifier[i] ] keyword[in] literal[string] : identifier[ret] += literal[string] [ identifier[self] . identifier[a2i] ( identifier[c] )]
keyword[elif] identifier[self] . identifier[key] [ identifier[i] ] keyword[in] literal[string] : identifier[ret] += literal[string] [ identifier[self] . identifier[a2i] ( identifier[c] )]
keyword[elif] identifier[self] . identifier[key] [ identifier[i] ] keyword[in] literal[string] : identifier[ret] += literal[string] [ identifier[self] . identifier[a2i] ( identifier[c] )]
keyword[elif] identifier[self] . identifier[key] [ identifier[i] ] keyword[in] literal[string] : identifier[ret] += literal[string] [ identifier[self] . identifier[a2i] ( identifier[c] )]
keyword[return] identifier[ret] | def encipher(self, string):
"""Encipher string using Porta cipher according to initialised key. Punctuation and whitespace
are removed from the input.
Example::
ciphertext = Porta('HELLO').encipher(plaintext)
:param string: The string to encipher.
:returns: The enciphered string.
"""
string = self.remove_punctuation(string)
ret = ''
for (i, c) in enumerate(string):
i = i % len(self.key)
if self.key[i] in 'AB':
ret += 'NOPQRSTUVWXYZABCDEFGHIJKLM'[self.a2i(c)] # depends on [control=['if'], data=[]]
elif self.key[i] in 'YZ':
ret += 'ZNOPQRSTUVWXYBCDEFGHIJKLMA'[self.a2i(c)] # depends on [control=['if'], data=[]]
elif self.key[i] in 'WX':
ret += 'YZNOPQRSTUVWXCDEFGHIJKLMAB'[self.a2i(c)] # depends on [control=['if'], data=[]]
elif self.key[i] in 'UV':
ret += 'XYZNOPQRSTUVWDEFGHIJKLMABC'[self.a2i(c)] # depends on [control=['if'], data=[]]
elif self.key[i] in 'ST':
ret += 'WXYZNOPQRSTUVEFGHIJKLMABCD'[self.a2i(c)] # depends on [control=['if'], data=[]]
elif self.key[i] in 'QR':
ret += 'VWXYZNOPQRSTUFGHIJKLMABCDE'[self.a2i(c)] # depends on [control=['if'], data=[]]
elif self.key[i] in 'OP':
ret += 'UVWXYZNOPQRSTGHIJKLMABCDEF'[self.a2i(c)] # depends on [control=['if'], data=[]]
elif self.key[i] in 'MN':
ret += 'TUVWXYZNOPQRSHIJKLMABCDEFG'[self.a2i(c)] # depends on [control=['if'], data=[]]
elif self.key[i] in 'KL':
ret += 'STUVWXYZNOPQRIJKLMABCDEFGH'[self.a2i(c)] # depends on [control=['if'], data=[]]
elif self.key[i] in 'IJ':
ret += 'RSTUVWXYZNOPQJKLMABCDEFGHI'[self.a2i(c)] # depends on [control=['if'], data=[]]
elif self.key[i] in 'GH':
ret += 'QRSTUVWXYZNOPKLMABCDEFGHIJ'[self.a2i(c)] # depends on [control=['if'], data=[]]
elif self.key[i] in 'EF':
ret += 'PQRSTUVWXYZNOLMABCDEFGHIJK'[self.a2i(c)] # depends on [control=['if'], data=[]]
elif self.key[i] in 'CD':
ret += 'OPQRSTUVWXYZNMABCDEFGHIJKL'[self.a2i(c)] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return ret |
def vertex_fingerprints(self):
"""A fingerprint for each vertex
The result is invariant under permutation of the vertex indexes.
Vertices that are symmetrically equivalent will get the same
fingerprint, e.g. the hydrogens in methane would get the same
fingerprint.
"""
return self.get_vertex_fingerprints(
[self.get_vertex_string(i) for i in range(self.num_vertices)],
[self.get_edge_string(i) for i in range(self.num_edges)],
) | def function[vertex_fingerprints, parameter[self]]:
constant[A fingerprint for each vertex
The result is invariant under permutation of the vertex indexes.
Vertices that are symmetrically equivalent will get the same
fingerprint, e.g. the hydrogens in methane would get the same
fingerprint.
]
return[call[name[self].get_vertex_fingerprints, parameter[<ast.ListComp object at 0x7da18eb55600>, <ast.ListComp object at 0x7da18eb57790>]]] | keyword[def] identifier[vertex_fingerprints] ( identifier[self] ):
literal[string]
keyword[return] identifier[self] . identifier[get_vertex_fingerprints] (
[ identifier[self] . identifier[get_vertex_string] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[num_vertices] )],
[ identifier[self] . identifier[get_edge_string] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[num_edges] )],
) | def vertex_fingerprints(self):
"""A fingerprint for each vertex
The result is invariant under permutation of the vertex indexes.
Vertices that are symmetrically equivalent will get the same
fingerprint, e.g. the hydrogens in methane would get the same
fingerprint.
"""
return self.get_vertex_fingerprints([self.get_vertex_string(i) for i in range(self.num_vertices)], [self.get_edge_string(i) for i in range(self.num_edges)]) |
def netflix(es, ps, e0, l=.0001):
"""
Combine predictions with the optimal weights to minimize RMSE.
Args:
es (list of float): RMSEs of predictions
ps (list of np.array): predictions
e0 (float): RMSE of all zero prediction
l (float): lambda as in the ridge regression
Returns:
Ensemble prediction (np.array) and weights (np.array) for input predictions
"""
m = len(es)
n = len(ps[0])
X = np.stack(ps).T
pTy = .5 * (n * e0**2 + (X**2).sum(axis=0) - n * np.array(es)**2)
w = np.linalg.pinv(X.T.dot(X) + l * n * np.eye(m)).dot(pTy)
return X.dot(w), w | def function[netflix, parameter[es, ps, e0, l]]:
constant[
Combine predictions with the optimal weights to minimize RMSE.
Args:
es (list of float): RMSEs of predictions
ps (list of np.array): predictions
e0 (float): RMSE of all zero prediction
l (float): lambda as in the ridge regression
Returns:
Ensemble prediction (np.array) and weights (np.array) for input predictions
]
variable[m] assign[=] call[name[len], parameter[name[es]]]
variable[n] assign[=] call[name[len], parameter[call[name[ps]][constant[0]]]]
variable[X] assign[=] call[name[np].stack, parameter[name[ps]]].T
variable[pTy] assign[=] binary_operation[constant[0.5] * binary_operation[binary_operation[binary_operation[name[n] * binary_operation[name[e0] ** constant[2]]] + call[binary_operation[name[X] ** constant[2]].sum, parameter[]]] - binary_operation[name[n] * binary_operation[call[name[np].array, parameter[name[es]]] ** constant[2]]]]]
variable[w] assign[=] call[call[name[np].linalg.pinv, parameter[binary_operation[call[name[X].T.dot, parameter[name[X]]] + binary_operation[binary_operation[name[l] * name[n]] * call[name[np].eye, parameter[name[m]]]]]]].dot, parameter[name[pTy]]]
return[tuple[[<ast.Call object at 0x7da2054a6950>, <ast.Name object at 0x7da2054a64d0>]]] | keyword[def] identifier[netflix] ( identifier[es] , identifier[ps] , identifier[e0] , identifier[l] = literal[int] ):
literal[string]
identifier[m] = identifier[len] ( identifier[es] )
identifier[n] = identifier[len] ( identifier[ps] [ literal[int] ])
identifier[X] = identifier[np] . identifier[stack] ( identifier[ps] ). identifier[T]
identifier[pTy] = literal[int] *( identifier[n] * identifier[e0] ** literal[int] +( identifier[X] ** literal[int] ). identifier[sum] ( identifier[axis] = literal[int] )- identifier[n] * identifier[np] . identifier[array] ( identifier[es] )** literal[int] )
identifier[w] = identifier[np] . identifier[linalg] . identifier[pinv] ( identifier[X] . identifier[T] . identifier[dot] ( identifier[X] )+ identifier[l] * identifier[n] * identifier[np] . identifier[eye] ( identifier[m] )). identifier[dot] ( identifier[pTy] )
keyword[return] identifier[X] . identifier[dot] ( identifier[w] ), identifier[w] | def netflix(es, ps, e0, l=0.0001):
"""
Combine predictions with the optimal weights to minimize RMSE.
Args:
es (list of float): RMSEs of predictions
ps (list of np.array): predictions
e0 (float): RMSE of all zero prediction
l (float): lambda as in the ridge regression
Returns:
Ensemble prediction (np.array) and weights (np.array) for input predictions
"""
m = len(es)
n = len(ps[0])
X = np.stack(ps).T
pTy = 0.5 * (n * e0 ** 2 + (X ** 2).sum(axis=0) - n * np.array(es) ** 2)
w = np.linalg.pinv(X.T.dot(X) + l * n * np.eye(m)).dot(pTy)
return (X.dot(w), w) |
def get_substring_idxs(substr, string):
"""
Return a list of indexes of substr. If substr not found, list is
empty.
Arguments:
substr (str): Substring to match.
string (str): String to match in.
Returns:
list of int: Start indices of substr.
"""
return [match.start() for match in re.finditer(substr, string)] | def function[get_substring_idxs, parameter[substr, string]]:
constant[
Return a list of indexes of substr. If substr not found, list is
empty.
Arguments:
substr (str): Substring to match.
string (str): String to match in.
Returns:
list of int: Start indices of substr.
]
return[<ast.ListComp object at 0x7da1b085d120>] | keyword[def] identifier[get_substring_idxs] ( identifier[substr] , identifier[string] ):
literal[string]
keyword[return] [ identifier[match] . identifier[start] () keyword[for] identifier[match] keyword[in] identifier[re] . identifier[finditer] ( identifier[substr] , identifier[string] )] | def get_substring_idxs(substr, string):
"""
Return a list of indexes of substr. If substr not found, list is
empty.
Arguments:
substr (str): Substring to match.
string (str): String to match in.
Returns:
list of int: Start indices of substr.
"""
return [match.start() for match in re.finditer(substr, string)] |
def simulate(self, T):
"""Simulate state and observation processes.
Parameters
----------
T: int
processes are simulated from time 0 to time T-1
Returns
-------
x, y: lists
lists of length T
"""
x = []
for t in range(T):
law_x = self.PX0() if t == 0 else self.PX(t, x[-1])
x.append(law_x.rvs(size=1))
y = self.simulate_given_x(x)
return x, y | def function[simulate, parameter[self, T]]:
constant[Simulate state and observation processes.
Parameters
----------
T: int
processes are simulated from time 0 to time T-1
Returns
-------
x, y: lists
lists of length T
]
variable[x] assign[=] list[[]]
for taget[name[t]] in starred[call[name[range], parameter[name[T]]]] begin[:]
variable[law_x] assign[=] <ast.IfExp object at 0x7da20e9557b0>
call[name[x].append, parameter[call[name[law_x].rvs, parameter[]]]]
variable[y] assign[=] call[name[self].simulate_given_x, parameter[name[x]]]
return[tuple[[<ast.Name object at 0x7da20e957dc0>, <ast.Name object at 0x7da20e957c10>]]] | keyword[def] identifier[simulate] ( identifier[self] , identifier[T] ):
literal[string]
identifier[x] =[]
keyword[for] identifier[t] keyword[in] identifier[range] ( identifier[T] ):
identifier[law_x] = identifier[self] . identifier[PX0] () keyword[if] identifier[t] == literal[int] keyword[else] identifier[self] . identifier[PX] ( identifier[t] , identifier[x] [- literal[int] ])
identifier[x] . identifier[append] ( identifier[law_x] . identifier[rvs] ( identifier[size] = literal[int] ))
identifier[y] = identifier[self] . identifier[simulate_given_x] ( identifier[x] )
keyword[return] identifier[x] , identifier[y] | def simulate(self, T):
"""Simulate state and observation processes.
Parameters
----------
T: int
processes are simulated from time 0 to time T-1
Returns
-------
x, y: lists
lists of length T
"""
x = []
for t in range(T):
law_x = self.PX0() if t == 0 else self.PX(t, x[-1])
x.append(law_x.rvs(size=1)) # depends on [control=['for'], data=['t']]
y = self.simulate_given_x(x)
return (x, y) |
def format_symbol(self, symbol, link_resolver):
"""
Format a symbols.Symbol
"""
if not symbol:
return ''
if isinstance(symbol, FieldSymbol):
return ''
# pylint: disable=unused-variable
out = self._format_symbol(symbol)
template = self.get_template('symbol_wrapper.html')
return template.render(
{'symbol': symbol,
'formatted_doc': out}) | def function[format_symbol, parameter[self, symbol, link_resolver]]:
constant[
Format a symbols.Symbol
]
if <ast.UnaryOp object at 0x7da18f00eef0> begin[:]
return[constant[]]
if call[name[isinstance], parameter[name[symbol], name[FieldSymbol]]] begin[:]
return[constant[]]
variable[out] assign[=] call[name[self]._format_symbol, parameter[name[symbol]]]
variable[template] assign[=] call[name[self].get_template, parameter[constant[symbol_wrapper.html]]]
return[call[name[template].render, parameter[dictionary[[<ast.Constant object at 0x7da18f00d1b0>, <ast.Constant object at 0x7da18f00f820>], [<ast.Name object at 0x7da18f00fb20>, <ast.Name object at 0x7da18f00d900>]]]]] | keyword[def] identifier[format_symbol] ( identifier[self] , identifier[symbol] , identifier[link_resolver] ):
literal[string]
keyword[if] keyword[not] identifier[symbol] :
keyword[return] literal[string]
keyword[if] identifier[isinstance] ( identifier[symbol] , identifier[FieldSymbol] ):
keyword[return] literal[string]
identifier[out] = identifier[self] . identifier[_format_symbol] ( identifier[symbol] )
identifier[template] = identifier[self] . identifier[get_template] ( literal[string] )
keyword[return] identifier[template] . identifier[render] (
{ literal[string] : identifier[symbol] ,
literal[string] : identifier[out] }) | def format_symbol(self, symbol, link_resolver):
"""
Format a symbols.Symbol
"""
if not symbol:
return '' # depends on [control=['if'], data=[]]
if isinstance(symbol, FieldSymbol):
return '' # depends on [control=['if'], data=[]]
# pylint: disable=unused-variable
out = self._format_symbol(symbol)
template = self.get_template('symbol_wrapper.html')
return template.render({'symbol': symbol, 'formatted_doc': out}) |
def albedo(self, value=999.0):
"""Corresponds to IDD Field `albedo`
Args:
value (float): value for IDD Field `albedo`
Missing value: 999.0
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
"""
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError('value {} need to be of type float '
'for field `albedo`'.format(value))
self._albedo = value | def function[albedo, parameter[self, value]]:
constant[Corresponds to IDD Field `albedo`
Args:
value (float): value for IDD Field `albedo`
Missing value: 999.0
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
]
if compare[name[value] is_not constant[None]] begin[:]
<ast.Try object at 0x7da1b0fb05b0>
name[self]._albedo assign[=] name[value] | keyword[def] identifier[albedo] ( identifier[self] , identifier[value] = literal[int] ):
literal[string]
keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] :
keyword[try] :
identifier[value] = identifier[float] ( identifier[value] )
keyword[except] identifier[ValueError] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] . identifier[format] ( identifier[value] ))
identifier[self] . identifier[_albedo] = identifier[value] | def albedo(self, value=999.0):
"""Corresponds to IDD Field `albedo`
Args:
value (float): value for IDD Field `albedo`
Missing value: 999.0
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
"""
if value is not None:
try:
value = float(value) # depends on [control=['try'], data=[]]
except ValueError:
raise ValueError('value {} need to be of type float for field `albedo`'.format(value)) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['value']]
self._albedo = value |
def resolve_inline_message_id(inline_msg_id):
"""
Resolves an inline message ID. Returns a tuple of
``(message id, peer, dc id, access hash)``
The ``peer`` may either be a :tl:`PeerUser` referencing
the user who sent the message via the bot in a private
conversation or small group chat, or a :tl:`PeerChannel`
if the message was sent in a channel.
The ``access_hash`` does not have any use yet.
"""
try:
dc_id, message_id, pid, access_hash = \
struct.unpack('<iiiq', _decode_telegram_base64(inline_msg_id))
peer = types.PeerChannel(-pid) if pid < 0 else types.PeerUser(pid)
return message_id, peer, dc_id, access_hash
except (struct.error, TypeError):
return None, None, None, None | def function[resolve_inline_message_id, parameter[inline_msg_id]]:
constant[
Resolves an inline message ID. Returns a tuple of
``(message id, peer, dc id, access hash)``
The ``peer`` may either be a :tl:`PeerUser` referencing
the user who sent the message via the bot in a private
conversation or small group chat, or a :tl:`PeerChannel`
if the message was sent in a channel.
The ``access_hash`` does not have any use yet.
]
<ast.Try object at 0x7da1b21891b0> | keyword[def] identifier[resolve_inline_message_id] ( identifier[inline_msg_id] ):
literal[string]
keyword[try] :
identifier[dc_id] , identifier[message_id] , identifier[pid] , identifier[access_hash] = identifier[struct] . identifier[unpack] ( literal[string] , identifier[_decode_telegram_base64] ( identifier[inline_msg_id] ))
identifier[peer] = identifier[types] . identifier[PeerChannel] (- identifier[pid] ) keyword[if] identifier[pid] < literal[int] keyword[else] identifier[types] . identifier[PeerUser] ( identifier[pid] )
keyword[return] identifier[message_id] , identifier[peer] , identifier[dc_id] , identifier[access_hash]
keyword[except] ( identifier[struct] . identifier[error] , identifier[TypeError] ):
keyword[return] keyword[None] , keyword[None] , keyword[None] , keyword[None] | def resolve_inline_message_id(inline_msg_id):
"""
Resolves an inline message ID. Returns a tuple of
``(message id, peer, dc id, access hash)``
The ``peer`` may either be a :tl:`PeerUser` referencing
the user who sent the message via the bot in a private
conversation or small group chat, or a :tl:`PeerChannel`
if the message was sent in a channel.
The ``access_hash`` does not have any use yet.
"""
try:
(dc_id, message_id, pid, access_hash) = struct.unpack('<iiiq', _decode_telegram_base64(inline_msg_id))
peer = types.PeerChannel(-pid) if pid < 0 else types.PeerUser(pid)
return (message_id, peer, dc_id, access_hash) # depends on [control=['try'], data=[]]
except (struct.error, TypeError):
return (None, None, None, None) # depends on [control=['except'], data=[]] |
def load_raw_arrays(self, fields, start_dt, end_dt, sids):
"""
Parameters
----------
fields : list of str
'open', 'high', 'low', 'close', or 'volume'
start_dt: Timestamp
Beginning of the window range.
end_dt: Timestamp
End of the window range.
sids : list of int
The asset identifiers in the window.
Returns
-------
list of np.ndarray
A list with an entry per field of ndarrays with shape
(minutes in range, sids) with a dtype of float64, containing the
values for the respective field over start and end dt range.
"""
start_idx = self._find_position_of_minute(start_dt)
end_idx = self._find_position_of_minute(end_dt)
num_minutes = (end_idx - start_idx + 1)
results = []
indices_to_exclude = self._exclusion_indices_for_range(
start_idx, end_idx)
if indices_to_exclude is not None:
for excl_start, excl_stop in indices_to_exclude:
length = excl_stop - excl_start + 1
num_minutes -= length
shape = num_minutes, len(sids)
for field in fields:
if field != 'volume':
out = np.full(shape, np.nan)
else:
out = np.zeros(shape, dtype=np.uint32)
for i, sid in enumerate(sids):
carray = self._open_minute_file(field, sid)
values = carray[start_idx:end_idx + 1]
if indices_to_exclude is not None:
for excl_start, excl_stop in indices_to_exclude[::-1]:
excl_slice = np.s_[
excl_start - start_idx:excl_stop - start_idx + 1]
values = np.delete(values, excl_slice)
where = values != 0
# first slice down to len(where) because we might not have
# written data for all the minutes requested
if field != 'volume':
out[:len(where), i][where] = (
values[where] * self._ohlc_ratio_inverse_for_sid(sid))
else:
out[:len(where), i][where] = values[where]
results.append(out)
return results | def function[load_raw_arrays, parameter[self, fields, start_dt, end_dt, sids]]:
constant[
Parameters
----------
fields : list of str
'open', 'high', 'low', 'close', or 'volume'
start_dt: Timestamp
Beginning of the window range.
end_dt: Timestamp
End of the window range.
sids : list of int
The asset identifiers in the window.
Returns
-------
list of np.ndarray
A list with an entry per field of ndarrays with shape
(minutes in range, sids) with a dtype of float64, containing the
values for the respective field over start and end dt range.
]
variable[start_idx] assign[=] call[name[self]._find_position_of_minute, parameter[name[start_dt]]]
variable[end_idx] assign[=] call[name[self]._find_position_of_minute, parameter[name[end_dt]]]
variable[num_minutes] assign[=] binary_operation[binary_operation[name[end_idx] - name[start_idx]] + constant[1]]
variable[results] assign[=] list[[]]
variable[indices_to_exclude] assign[=] call[name[self]._exclusion_indices_for_range, parameter[name[start_idx], name[end_idx]]]
if compare[name[indices_to_exclude] is_not constant[None]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b2024c70>, <ast.Name object at 0x7da1b2025810>]]] in starred[name[indices_to_exclude]] begin[:]
variable[length] assign[=] binary_operation[binary_operation[name[excl_stop] - name[excl_start]] + constant[1]]
<ast.AugAssign object at 0x7da1b20249a0>
variable[shape] assign[=] tuple[[<ast.Name object at 0x7da1b2025480>, <ast.Call object at 0x7da1b20247c0>]]
for taget[name[field]] in starred[name[fields]] begin[:]
if compare[name[field] not_equal[!=] constant[volume]] begin[:]
variable[out] assign[=] call[name[np].full, parameter[name[shape], name[np].nan]]
for taget[tuple[[<ast.Name object at 0x7da1b20248e0>, <ast.Name object at 0x7da1b20254e0>]]] in starred[call[name[enumerate], parameter[name[sids]]]] begin[:]
variable[carray] assign[=] call[name[self]._open_minute_file, parameter[name[field], name[sid]]]
variable[values] assign[=] call[name[carray]][<ast.Slice object at 0x7da1b1e8f0d0>]
if compare[name[indices_to_exclude] is_not constant[None]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b1e8e1d0>, <ast.Name object at 0x7da1b1e8fac0>]]] in starred[call[name[indices_to_exclude]][<ast.Slice object at 0x7da1b1e8f820>]] begin[:]
variable[excl_slice] assign[=] call[name[np].s_][<ast.Slice object at 0x7da1b1e8e290>]
variable[values] assign[=] call[name[np].delete, parameter[name[values], name[excl_slice]]]
variable[where] assign[=] compare[name[values] not_equal[!=] constant[0]]
if compare[name[field] not_equal[!=] constant[volume]] begin[:]
call[call[name[out]][tuple[[<ast.Slice object at 0x7da1b1e8eda0>, <ast.Name object at 0x7da1b1e8e4a0>]]]][name[where]] assign[=] binary_operation[call[name[values]][name[where]] * call[name[self]._ohlc_ratio_inverse_for_sid, parameter[name[sid]]]]
call[name[results].append, parameter[name[out]]]
return[name[results]] | keyword[def] identifier[load_raw_arrays] ( identifier[self] , identifier[fields] , identifier[start_dt] , identifier[end_dt] , identifier[sids] ):
literal[string]
identifier[start_idx] = identifier[self] . identifier[_find_position_of_minute] ( identifier[start_dt] )
identifier[end_idx] = identifier[self] . identifier[_find_position_of_minute] ( identifier[end_dt] )
identifier[num_minutes] =( identifier[end_idx] - identifier[start_idx] + literal[int] )
identifier[results] =[]
identifier[indices_to_exclude] = identifier[self] . identifier[_exclusion_indices_for_range] (
identifier[start_idx] , identifier[end_idx] )
keyword[if] identifier[indices_to_exclude] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[excl_start] , identifier[excl_stop] keyword[in] identifier[indices_to_exclude] :
identifier[length] = identifier[excl_stop] - identifier[excl_start] + literal[int]
identifier[num_minutes] -= identifier[length]
identifier[shape] = identifier[num_minutes] , identifier[len] ( identifier[sids] )
keyword[for] identifier[field] keyword[in] identifier[fields] :
keyword[if] identifier[field] != literal[string] :
identifier[out] = identifier[np] . identifier[full] ( identifier[shape] , identifier[np] . identifier[nan] )
keyword[else] :
identifier[out] = identifier[np] . identifier[zeros] ( identifier[shape] , identifier[dtype] = identifier[np] . identifier[uint32] )
keyword[for] identifier[i] , identifier[sid] keyword[in] identifier[enumerate] ( identifier[sids] ):
identifier[carray] = identifier[self] . identifier[_open_minute_file] ( identifier[field] , identifier[sid] )
identifier[values] = identifier[carray] [ identifier[start_idx] : identifier[end_idx] + literal[int] ]
keyword[if] identifier[indices_to_exclude] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[excl_start] , identifier[excl_stop] keyword[in] identifier[indices_to_exclude] [::- literal[int] ]:
identifier[excl_slice] = identifier[np] . identifier[s_] [
identifier[excl_start] - identifier[start_idx] : identifier[excl_stop] - identifier[start_idx] + literal[int] ]
identifier[values] = identifier[np] . identifier[delete] ( identifier[values] , identifier[excl_slice] )
identifier[where] = identifier[values] != literal[int]
keyword[if] identifier[field] != literal[string] :
identifier[out] [: identifier[len] ( identifier[where] ), identifier[i] ][ identifier[where] ]=(
identifier[values] [ identifier[where] ]* identifier[self] . identifier[_ohlc_ratio_inverse_for_sid] ( identifier[sid] ))
keyword[else] :
identifier[out] [: identifier[len] ( identifier[where] ), identifier[i] ][ identifier[where] ]= identifier[values] [ identifier[where] ]
identifier[results] . identifier[append] ( identifier[out] )
keyword[return] identifier[results] | def load_raw_arrays(self, fields, start_dt, end_dt, sids):
"""
Parameters
----------
fields : list of str
'open', 'high', 'low', 'close', or 'volume'
start_dt: Timestamp
Beginning of the window range.
end_dt: Timestamp
End of the window range.
sids : list of int
The asset identifiers in the window.
Returns
-------
list of np.ndarray
A list with an entry per field of ndarrays with shape
(minutes in range, sids) with a dtype of float64, containing the
values for the respective field over start and end dt range.
"""
start_idx = self._find_position_of_minute(start_dt)
end_idx = self._find_position_of_minute(end_dt)
num_minutes = end_idx - start_idx + 1
results = []
indices_to_exclude = self._exclusion_indices_for_range(start_idx, end_idx)
if indices_to_exclude is not None:
for (excl_start, excl_stop) in indices_to_exclude:
length = excl_stop - excl_start + 1
num_minutes -= length # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['indices_to_exclude']]
shape = (num_minutes, len(sids))
for field in fields:
if field != 'volume':
out = np.full(shape, np.nan) # depends on [control=['if'], data=[]]
else:
out = np.zeros(shape, dtype=np.uint32)
for (i, sid) in enumerate(sids):
carray = self._open_minute_file(field, sid)
values = carray[start_idx:end_idx + 1]
if indices_to_exclude is not None:
for (excl_start, excl_stop) in indices_to_exclude[::-1]:
excl_slice = np.s_[excl_start - start_idx:excl_stop - start_idx + 1]
values = np.delete(values, excl_slice) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['indices_to_exclude']]
where = values != 0
# first slice down to len(where) because we might not have
# written data for all the minutes requested
if field != 'volume':
out[:len(where), i][where] = values[where] * self._ohlc_ratio_inverse_for_sid(sid) # depends on [control=['if'], data=[]]
else:
out[:len(where), i][where] = values[where] # depends on [control=['for'], data=[]]
results.append(out) # depends on [control=['for'], data=['field']]
return results |
def _parse_player(self, index, attributes, postgame, game_type):
"""Parse a player."""
try:
voobly_user = voobly.user(self._voobly_session, attributes.player_name,
ladder_ids=VOOBLY_LADDERS.values())
voobly_ladder = '{} {}'.format(game_type, self._diplomacy['type'])
voobly_ladder_id = VOOBLY_LADDERS.get(voobly_ladder)
if voobly_ladder_id in voobly_user['ladders']:
voobly_rating = voobly_user['ladders'].get(voobly_ladder_id).get('rating')
else:
voobly_rating = None
except voobly.VooblyError:
voobly_user = None
voobly_rating = None
player = {
'index': index,
'number': attributes.player_color + 1,
'color': mgz.const.PLAYER_COLORS[attributes.player_color],
'coordinates': {
'x': attributes.camera_x,
'y': attributes.camera_y
},
'action_histogram': dict(self._actions_by_player[index]),
'apm': _calculate_apm(index, self._actions_by_player,
self._actions_without_player, self._time / 1000),
'name': attributes.player_name,
'civilization': mgz.const.CIVILIZATION_NAMES[attributes.civilization],
'position': self._compass_position(attributes.camera_x, attributes.camera_y),
'research': self._research.get(index, []),
'build': self._build.get(index, []),
'voobly': {
'rating_game': self._ratings.get(attributes.player_name),
'rating_current': voobly_rating,
'nation': voobly_user['nationid'] if voobly_user else None,
'uid': voobly_user['uid'] if voobly_user else None
},
'ages': {},
'achievements': {}
}
if postgame:
achievements = None
# player index doesn't always match order of postgame achievements! (restores?)
for ach in postgame.achievements:
if attributes.player_name == ach.player_name:
achievements = ach
if not achievements:
return player
player['score'] = achievements.total_score
player['mvp'] = achievements.mvp
player['winner'] = achievements.victory
player['achievements'] = {
'units_killed': achievements.military.units_killed,
'units_lost': achievements.military.units_lost,
'buildings_razed': achievements.military.buildings_razed,
'buildings_lost': achievements.military.buildings_lost,
'conversions': achievements.military.units_converted,
'food_collected': achievements.economy.food_collected,
'wood_collected': achievements.economy.wood_collected,
'gold_collected': achievements.economy.gold_collected,
'stone_collected': achievements.economy.stone_collected,
'tribute_sent': achievements.economy.tribute_sent,
'tribute_received': achievements.economy.tribute_received,
'trade_gold': achievements.economy.trade_gold,
'relic_gold': achievements.economy.relic_gold,
'explored_percent': achievements.technology.explored_percent,
'total_castles': achievements.society.total_castles,
'relics_collected': achievements.society.relics_captured,
'villager_high': achievements.society.villager_high
}
player['ages'] = {
'feudal': _timestamp_to_time(achievements.technology.feudal_time),
'castle': _timestamp_to_time(achievements.technology.castle_time),
'imperial': _timestamp_to_time(achievements.technology.imperial_time)
}
return player | def function[_parse_player, parameter[self, index, attributes, postgame, game_type]]:
constant[Parse a player.]
<ast.Try object at 0x7da1b25efd60>
variable[player] assign[=] dictionary[[<ast.Constant object at 0x7da1b25ef1c0>, <ast.Constant object at 0x7da1b25ef190>, <ast.Constant object at 0x7da1b25ef160>, <ast.Constant object at 0x7da1b25ef130>, <ast.Constant object at 0x7da1b25ef100>, <ast.Constant object at 0x7da1b25ef0d0>, <ast.Constant object at 0x7da1b25ef0a0>, <ast.Constant object at 0x7da1b25ef070>, <ast.Constant object at 0x7da1b25ef040>, <ast.Constant object at 0x7da1b25ef010>, <ast.Constant object at 0x7da1b25eefe0>, <ast.Constant object at 0x7da1b25eefb0>, <ast.Constant object at 0x7da1b25eef80>, <ast.Constant object at 0x7da1b25eef50>], [<ast.Name object at 0x7da1b25eef20>, <ast.BinOp object at 0x7da1b25eeef0>, <ast.Subscript object at 0x7da1b25eee30>, <ast.Dict object at 0x7da1b25eed10>, <ast.Call object at 0x7da1b25eebc0>, <ast.Call object at 0x7da1b25eeaa0>, <ast.Attribute object at 0x7da1b25ee890>, <ast.Subscript object at 0x7da1b25ee830>, <ast.Call object at 0x7da1b25ee710>, <ast.Call object at 0x7da1b25ee5c0>, <ast.Call object at 0x7da1b25ee4a0>, <ast.Dict object at 0x7da1b25ee380>, <ast.Dict object at 0x7da1b25edf00>, <ast.Dict object at 0x7da1b25eded0>]]
if name[postgame] begin[:]
variable[achievements] assign[=] constant[None]
for taget[name[ach]] in starred[name[postgame].achievements] begin[:]
if compare[name[attributes].player_name equal[==] name[ach].player_name] begin[:]
variable[achievements] assign[=] name[ach]
if <ast.UnaryOp object at 0x7da1b25edb10> begin[:]
return[name[player]]
call[name[player]][constant[score]] assign[=] name[achievements].total_score
call[name[player]][constant[mvp]] assign[=] name[achievements].mvp
call[name[player]][constant[winner]] assign[=] name[achievements].victory
call[name[player]][constant[achievements]] assign[=] dictionary[[<ast.Constant object at 0x7da1b25ed5d0>, <ast.Constant object at 0x7da1b25ed5a0>, <ast.Constant object at 0x7da1b25ed570>, <ast.Constant object at 0x7da1b25ed540>, <ast.Constant object at 0x7da1b25ed510>, <ast.Constant object at 0x7da1b25ed4e0>, <ast.Constant object at 0x7da1b25ed4b0>, <ast.Constant object at 0x7da1b25ed480>, <ast.Constant object at 0x7da1b25ed450>, <ast.Constant object at 0x7da1b25ed420>, <ast.Constant object at 0x7da1b25ed3f0>, <ast.Constant object at 0x7da1b25ed3c0>, <ast.Constant object at 0x7da1b25ed390>, <ast.Constant object at 0x7da1b25ed360>, <ast.Constant object at 0x7da1b25ed330>, <ast.Constant object at 0x7da1b25ed300>, <ast.Constant object at 0x7da1b25ed2d0>], [<ast.Attribute object at 0x7da1b25ed2a0>, <ast.Attribute object at 0x7da1b259c130>, <ast.Attribute object at 0x7da1b259c400>, <ast.Attribute object at 0x7da1b259c3a0>, <ast.Attribute object at 0x7da1b259c2b0>, <ast.Attribute object at 0x7da1b259f5b0>, <ast.Attribute object at 0x7da1b259f4f0>, <ast.Attribute object at 0x7da1b259f580>, <ast.Attribute object at 0x7da1b259f670>, <ast.Attribute object at 0x7da1b259f7f0>, <ast.Attribute object at 0x7da1b259f730>, <ast.Attribute object at 0x7da1b259f6a0>, <ast.Attribute object at 0x7da1b259f8e0>, <ast.Attribute object at 0x7da1b259ff70>, <ast.Attribute object at 0x7da1b259ffd0>, <ast.Attribute object at 0x7da1b259eb30>, <ast.Attribute object at 0x7da1b259d9c0>]]
call[name[player]][constant[ages]] assign[=] dictionary[[<ast.Constant object at 0x7da1b259e3b0>, <ast.Constant object at 0x7da1b259de40>, <ast.Constant object at 0x7da1b259f370>], [<ast.Call object at 0x7da1b259e380>, <ast.Call object at 0x7da1b259f2b0>, <ast.Call object at 0x7da1b259f340>]]
return[name[player]] | keyword[def] identifier[_parse_player] ( identifier[self] , identifier[index] , identifier[attributes] , identifier[postgame] , identifier[game_type] ):
literal[string]
keyword[try] :
identifier[voobly_user] = identifier[voobly] . identifier[user] ( identifier[self] . identifier[_voobly_session] , identifier[attributes] . identifier[player_name] ,
identifier[ladder_ids] = identifier[VOOBLY_LADDERS] . identifier[values] ())
identifier[voobly_ladder] = literal[string] . identifier[format] ( identifier[game_type] , identifier[self] . identifier[_diplomacy] [ literal[string] ])
identifier[voobly_ladder_id] = identifier[VOOBLY_LADDERS] . identifier[get] ( identifier[voobly_ladder] )
keyword[if] identifier[voobly_ladder_id] keyword[in] identifier[voobly_user] [ literal[string] ]:
identifier[voobly_rating] = identifier[voobly_user] [ literal[string] ]. identifier[get] ( identifier[voobly_ladder_id] ). identifier[get] ( literal[string] )
keyword[else] :
identifier[voobly_rating] = keyword[None]
keyword[except] identifier[voobly] . identifier[VooblyError] :
identifier[voobly_user] = keyword[None]
identifier[voobly_rating] = keyword[None]
identifier[player] ={
literal[string] : identifier[index] ,
literal[string] : identifier[attributes] . identifier[player_color] + literal[int] ,
literal[string] : identifier[mgz] . identifier[const] . identifier[PLAYER_COLORS] [ identifier[attributes] . identifier[player_color] ],
literal[string] :{
literal[string] : identifier[attributes] . identifier[camera_x] ,
literal[string] : identifier[attributes] . identifier[camera_y]
},
literal[string] : identifier[dict] ( identifier[self] . identifier[_actions_by_player] [ identifier[index] ]),
literal[string] : identifier[_calculate_apm] ( identifier[index] , identifier[self] . identifier[_actions_by_player] ,
identifier[self] . identifier[_actions_without_player] , identifier[self] . identifier[_time] / literal[int] ),
literal[string] : identifier[attributes] . identifier[player_name] ,
literal[string] : identifier[mgz] . identifier[const] . identifier[CIVILIZATION_NAMES] [ identifier[attributes] . identifier[civilization] ],
literal[string] : identifier[self] . identifier[_compass_position] ( identifier[attributes] . identifier[camera_x] , identifier[attributes] . identifier[camera_y] ),
literal[string] : identifier[self] . identifier[_research] . identifier[get] ( identifier[index] ,[]),
literal[string] : identifier[self] . identifier[_build] . identifier[get] ( identifier[index] ,[]),
literal[string] :{
literal[string] : identifier[self] . identifier[_ratings] . identifier[get] ( identifier[attributes] . identifier[player_name] ),
literal[string] : identifier[voobly_rating] ,
literal[string] : identifier[voobly_user] [ literal[string] ] keyword[if] identifier[voobly_user] keyword[else] keyword[None] ,
literal[string] : identifier[voobly_user] [ literal[string] ] keyword[if] identifier[voobly_user] keyword[else] keyword[None]
},
literal[string] :{},
literal[string] :{}
}
keyword[if] identifier[postgame] :
identifier[achievements] = keyword[None]
keyword[for] identifier[ach] keyword[in] identifier[postgame] . identifier[achievements] :
keyword[if] identifier[attributes] . identifier[player_name] == identifier[ach] . identifier[player_name] :
identifier[achievements] = identifier[ach]
keyword[if] keyword[not] identifier[achievements] :
keyword[return] identifier[player]
identifier[player] [ literal[string] ]= identifier[achievements] . identifier[total_score]
identifier[player] [ literal[string] ]= identifier[achievements] . identifier[mvp]
identifier[player] [ literal[string] ]= identifier[achievements] . identifier[victory]
identifier[player] [ literal[string] ]={
literal[string] : identifier[achievements] . identifier[military] . identifier[units_killed] ,
literal[string] : identifier[achievements] . identifier[military] . identifier[units_lost] ,
literal[string] : identifier[achievements] . identifier[military] . identifier[buildings_razed] ,
literal[string] : identifier[achievements] . identifier[military] . identifier[buildings_lost] ,
literal[string] : identifier[achievements] . identifier[military] . identifier[units_converted] ,
literal[string] : identifier[achievements] . identifier[economy] . identifier[food_collected] ,
literal[string] : identifier[achievements] . identifier[economy] . identifier[wood_collected] ,
literal[string] : identifier[achievements] . identifier[economy] . identifier[gold_collected] ,
literal[string] : identifier[achievements] . identifier[economy] . identifier[stone_collected] ,
literal[string] : identifier[achievements] . identifier[economy] . identifier[tribute_sent] ,
literal[string] : identifier[achievements] . identifier[economy] . identifier[tribute_received] ,
literal[string] : identifier[achievements] . identifier[economy] . identifier[trade_gold] ,
literal[string] : identifier[achievements] . identifier[economy] . identifier[relic_gold] ,
literal[string] : identifier[achievements] . identifier[technology] . identifier[explored_percent] ,
literal[string] : identifier[achievements] . identifier[society] . identifier[total_castles] ,
literal[string] : identifier[achievements] . identifier[society] . identifier[relics_captured] ,
literal[string] : identifier[achievements] . identifier[society] . identifier[villager_high]
}
identifier[player] [ literal[string] ]={
literal[string] : identifier[_timestamp_to_time] ( identifier[achievements] . identifier[technology] . identifier[feudal_time] ),
literal[string] : identifier[_timestamp_to_time] ( identifier[achievements] . identifier[technology] . identifier[castle_time] ),
literal[string] : identifier[_timestamp_to_time] ( identifier[achievements] . identifier[technology] . identifier[imperial_time] )
}
keyword[return] identifier[player] | def _parse_player(self, index, attributes, postgame, game_type):
"""Parse a player."""
try:
voobly_user = voobly.user(self._voobly_session, attributes.player_name, ladder_ids=VOOBLY_LADDERS.values())
voobly_ladder = '{} {}'.format(game_type, self._diplomacy['type'])
voobly_ladder_id = VOOBLY_LADDERS.get(voobly_ladder)
if voobly_ladder_id in voobly_user['ladders']:
voobly_rating = voobly_user['ladders'].get(voobly_ladder_id).get('rating') # depends on [control=['if'], data=['voobly_ladder_id']]
else:
voobly_rating = None # depends on [control=['try'], data=[]]
except voobly.VooblyError:
voobly_user = None
voobly_rating = None # depends on [control=['except'], data=[]]
player = {'index': index, 'number': attributes.player_color + 1, 'color': mgz.const.PLAYER_COLORS[attributes.player_color], 'coordinates': {'x': attributes.camera_x, 'y': attributes.camera_y}, 'action_histogram': dict(self._actions_by_player[index]), 'apm': _calculate_apm(index, self._actions_by_player, self._actions_without_player, self._time / 1000), 'name': attributes.player_name, 'civilization': mgz.const.CIVILIZATION_NAMES[attributes.civilization], 'position': self._compass_position(attributes.camera_x, attributes.camera_y), 'research': self._research.get(index, []), 'build': self._build.get(index, []), 'voobly': {'rating_game': self._ratings.get(attributes.player_name), 'rating_current': voobly_rating, 'nation': voobly_user['nationid'] if voobly_user else None, 'uid': voobly_user['uid'] if voobly_user else None}, 'ages': {}, 'achievements': {}}
if postgame:
achievements = None
# player index doesn't always match order of postgame achievements! (restores?)
for ach in postgame.achievements:
if attributes.player_name == ach.player_name:
achievements = ach # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['ach']]
if not achievements:
return player # depends on [control=['if'], data=[]]
player['score'] = achievements.total_score
player['mvp'] = achievements.mvp
player['winner'] = achievements.victory
player['achievements'] = {'units_killed': achievements.military.units_killed, 'units_lost': achievements.military.units_lost, 'buildings_razed': achievements.military.buildings_razed, 'buildings_lost': achievements.military.buildings_lost, 'conversions': achievements.military.units_converted, 'food_collected': achievements.economy.food_collected, 'wood_collected': achievements.economy.wood_collected, 'gold_collected': achievements.economy.gold_collected, 'stone_collected': achievements.economy.stone_collected, 'tribute_sent': achievements.economy.tribute_sent, 'tribute_received': achievements.economy.tribute_received, 'trade_gold': achievements.economy.trade_gold, 'relic_gold': achievements.economy.relic_gold, 'explored_percent': achievements.technology.explored_percent, 'total_castles': achievements.society.total_castles, 'relics_collected': achievements.society.relics_captured, 'villager_high': achievements.society.villager_high}
player['ages'] = {'feudal': _timestamp_to_time(achievements.technology.feudal_time), 'castle': _timestamp_to_time(achievements.technology.castle_time), 'imperial': _timestamp_to_time(achievements.technology.imperial_time)} # depends on [control=['if'], data=[]]
return player |
def run(self, context):
"""We have to overwrite this method because we don't want an implicit context
"""
args = []
kwargs = {}
for arg in self.explicit_arguments:
if arg.name is not None:
kwargs[arg.name] = arg.value
else:
args.append(arg.value)
for arg in self.implicit_arguments:
if arg.name is not None:
annotation = self.signature.parameters[arg.name].annotation
annotation_name = annotation
if not isinstance(annotation, str):
annotation_name = annotation.__name__
if annotation is Table:
value = context.table
elif annotation is Context:
value = context
elif annotation is Text:
value = context.text
elif annotation is inspect._empty:
raise RuntimeError(
"Parameter '{}' of step implementation '{}{}' does not have a type! Please specify it in the correct steps file.".format(
arg.name,
self.func.__qualname__,
self.signature,
)
)
elif CONTEXT_NAMESPACE.format(annotation_name) in context:
value = context.__getattr__(CONTEXT_NAMESPACE.format(annotation_name))
else:
raise RuntimeError(
"'{}' was not found in context. Is a context parameter missing?".format(arg.name))
kwargs[arg.name] = value
else:
raise RuntimeError("Argument name shouldn't be None")
with context.user_mode():
return_value = self.func(*args, **kwargs)
return_annotation = self.signature.return_annotation
if return_annotation == inspect.Signature.empty:
return
if not isinstance(return_annotation, str):
return_annotation = return_annotation.__name__
context.__setattr__(CONTEXT_NAMESPACE.format(return_annotation), return_value) | def function[run, parameter[self, context]]:
constant[We have to overwrite this method because we don't want an implicit context
]
variable[args] assign[=] list[[]]
variable[kwargs] assign[=] dictionary[[], []]
for taget[name[arg]] in starred[name[self].explicit_arguments] begin[:]
if compare[name[arg].name is_not constant[None]] begin[:]
call[name[kwargs]][name[arg].name] assign[=] name[arg].value
for taget[name[arg]] in starred[name[self].implicit_arguments] begin[:]
if compare[name[arg].name is_not constant[None]] begin[:]
variable[annotation] assign[=] call[name[self].signature.parameters][name[arg].name].annotation
variable[annotation_name] assign[=] name[annotation]
if <ast.UnaryOp object at 0x7da1b1448eb0> begin[:]
variable[annotation_name] assign[=] name[annotation].__name__
if compare[name[annotation] is name[Table]] begin[:]
variable[value] assign[=] name[context].table
call[name[kwargs]][name[arg].name] assign[=] name[value]
with call[name[context].user_mode, parameter[]] begin[:]
variable[return_value] assign[=] call[name[self].func, parameter[<ast.Starred object at 0x7da1b144b220>]]
variable[return_annotation] assign[=] name[self].signature.return_annotation
if compare[name[return_annotation] equal[==] name[inspect].Signature.empty] begin[:]
return[None]
if <ast.UnaryOp object at 0x7da1b144aec0> begin[:]
variable[return_annotation] assign[=] name[return_annotation].__name__
call[name[context].__setattr__, parameter[call[name[CONTEXT_NAMESPACE].format, parameter[name[return_annotation]]], name[return_value]]] | keyword[def] identifier[run] ( identifier[self] , identifier[context] ):
literal[string]
identifier[args] =[]
identifier[kwargs] ={}
keyword[for] identifier[arg] keyword[in] identifier[self] . identifier[explicit_arguments] :
keyword[if] identifier[arg] . identifier[name] keyword[is] keyword[not] keyword[None] :
identifier[kwargs] [ identifier[arg] . identifier[name] ]= identifier[arg] . identifier[value]
keyword[else] :
identifier[args] . identifier[append] ( identifier[arg] . identifier[value] )
keyword[for] identifier[arg] keyword[in] identifier[self] . identifier[implicit_arguments] :
keyword[if] identifier[arg] . identifier[name] keyword[is] keyword[not] keyword[None] :
identifier[annotation] = identifier[self] . identifier[signature] . identifier[parameters] [ identifier[arg] . identifier[name] ]. identifier[annotation]
identifier[annotation_name] = identifier[annotation]
keyword[if] keyword[not] identifier[isinstance] ( identifier[annotation] , identifier[str] ):
identifier[annotation_name] = identifier[annotation] . identifier[__name__]
keyword[if] identifier[annotation] keyword[is] identifier[Table] :
identifier[value] = identifier[context] . identifier[table]
keyword[elif] identifier[annotation] keyword[is] identifier[Context] :
identifier[value] = identifier[context]
keyword[elif] identifier[annotation] keyword[is] identifier[Text] :
identifier[value] = identifier[context] . identifier[text]
keyword[elif] identifier[annotation] keyword[is] identifier[inspect] . identifier[_empty] :
keyword[raise] identifier[RuntimeError] (
literal[string] . identifier[format] (
identifier[arg] . identifier[name] ,
identifier[self] . identifier[func] . identifier[__qualname__] ,
identifier[self] . identifier[signature] ,
)
)
keyword[elif] identifier[CONTEXT_NAMESPACE] . identifier[format] ( identifier[annotation_name] ) keyword[in] identifier[context] :
identifier[value] = identifier[context] . identifier[__getattr__] ( identifier[CONTEXT_NAMESPACE] . identifier[format] ( identifier[annotation_name] ))
keyword[else] :
keyword[raise] identifier[RuntimeError] (
literal[string] . identifier[format] ( identifier[arg] . identifier[name] ))
identifier[kwargs] [ identifier[arg] . identifier[name] ]= identifier[value]
keyword[else] :
keyword[raise] identifier[RuntimeError] ( literal[string] )
keyword[with] identifier[context] . identifier[user_mode] ():
identifier[return_value] = identifier[self] . identifier[func] (* identifier[args] ,** identifier[kwargs] )
identifier[return_annotation] = identifier[self] . identifier[signature] . identifier[return_annotation]
keyword[if] identifier[return_annotation] == identifier[inspect] . identifier[Signature] . identifier[empty] :
keyword[return]
keyword[if] keyword[not] identifier[isinstance] ( identifier[return_annotation] , identifier[str] ):
identifier[return_annotation] = identifier[return_annotation] . identifier[__name__]
identifier[context] . identifier[__setattr__] ( identifier[CONTEXT_NAMESPACE] . identifier[format] ( identifier[return_annotation] ), identifier[return_value] ) | def run(self, context):
"""We have to overwrite this method because we don't want an implicit context
"""
args = []
kwargs = {}
for arg in self.explicit_arguments:
if arg.name is not None:
kwargs[arg.name] = arg.value # depends on [control=['if'], data=[]]
else:
args.append(arg.value) # depends on [control=['for'], data=['arg']]
for arg in self.implicit_arguments:
if arg.name is not None:
annotation = self.signature.parameters[arg.name].annotation
annotation_name = annotation
if not isinstance(annotation, str):
annotation_name = annotation.__name__ # depends on [control=['if'], data=[]]
if annotation is Table:
value = context.table # depends on [control=['if'], data=[]]
elif annotation is Context:
value = context # depends on [control=['if'], data=[]]
elif annotation is Text:
value = context.text # depends on [control=['if'], data=[]]
elif annotation is inspect._empty:
raise RuntimeError("Parameter '{}' of step implementation '{}{}' does not have a type! Please specify it in the correct steps file.".format(arg.name, self.func.__qualname__, self.signature)) # depends on [control=['if'], data=[]]
elif CONTEXT_NAMESPACE.format(annotation_name) in context:
value = context.__getattr__(CONTEXT_NAMESPACE.format(annotation_name)) # depends on [control=['if'], data=['context']]
else:
raise RuntimeError("'{}' was not found in context. Is a context parameter missing?".format(arg.name))
kwargs[arg.name] = value # depends on [control=['if'], data=[]]
else:
raise RuntimeError("Argument name shouldn't be None") # depends on [control=['for'], data=['arg']]
with context.user_mode():
return_value = self.func(*args, **kwargs)
return_annotation = self.signature.return_annotation
if return_annotation == inspect.Signature.empty:
return # depends on [control=['if'], data=[]]
if not isinstance(return_annotation, str):
return_annotation = return_annotation.__name__ # depends on [control=['if'], data=[]]
context.__setattr__(CONTEXT_NAMESPACE.format(return_annotation), return_value) # depends on [control=['with'], data=[]] |
def find_elements_by_id(self, id_):
"""
Finds multiple elements by id.
:Args:
- id\\_ - The id of the elements to be found.
:Returns:
- list of WebElement - a list with elements if any was found. An
empty list if not
:Usage:
::
elements = driver.find_elements_by_id('foo')
"""
return self.find_elements(by=By.ID, value=id_) | def function[find_elements_by_id, parameter[self, id_]]:
constant[
Finds multiple elements by id.
:Args:
- id\_ - The id of the elements to be found.
:Returns:
- list of WebElement - a list with elements if any was found. An
empty list if not
:Usage:
::
elements = driver.find_elements_by_id('foo')
]
return[call[name[self].find_elements, parameter[]]] | keyword[def] identifier[find_elements_by_id] ( identifier[self] , identifier[id_] ):
literal[string]
keyword[return] identifier[self] . identifier[find_elements] ( identifier[by] = identifier[By] . identifier[ID] , identifier[value] = identifier[id_] ) | def find_elements_by_id(self, id_):
"""
Finds multiple elements by id.
:Args:
- id\\_ - The id of the elements to be found.
:Returns:
- list of WebElement - a list with elements if any was found. An
empty list if not
:Usage:
::
elements = driver.find_elements_by_id('foo')
"""
return self.find_elements(by=By.ID, value=id_) |
def _Supercooled(T, P):
"""Guideline on thermodynamic properties of supercooled water
Parameters
----------
T : float
Temperature, [K]
P : float
Pressure, [MPa]
Returns
-------
prop : dict
Dict with calculated properties of water. The available properties are:
* L: Ordering field, [-]
* x: Mole fraction of low-density structure, [-]
* rho: Density, [kg/m³]
* s: Specific entropy, [kJ/kgK]
* h: Specific enthalpy, [kJ/kg]
* u: Specific internal energy, [kJ/kg]
* a: Specific Helmholtz energy, [kJ/kg]
* g: Specific Gibbs energy, [kJ/kg]
* alfap: Thermal expansion coefficient, [1/K]
* xkappa : Isothermal compressibility, [1/MPa]
* cp: Specific isobaric heat capacity, [kJ/kgK]
* cv: Specific isochoric heat capacity, [kJ/kgK]
* w: Speed of sound, [m/s²]
Notes
------
Raise :class:`NotImplementedError` if input isn't in limit:
* Tm ≤ T ≤ 300
* 0 < P ≤ 1000
The minimum temperature in range of validity is the melting temperature, it
depend of pressure
Examples
--------
>>> liq = _Supercooled(235.15, 0.101325)
>>> liq["rho"], liq["cp"], liq["w"]
968.09999 5.997563 1134.5855
References
----------
IAPWS, Guideline on Thermodynamic Properties of Supercooled Water,
http://iapws.org/relguide/Supercooled.html
"""
# Check input in range of validity
if P < 198.9:
Tita = T/235.15
Ph = 0.1+228.27*(1-Tita**6.243)+15.724*(1-Tita**79.81)
if P < Ph or T > 300:
raise NotImplementedError("Incoming out of bound")
else:
Th = 172.82+0.03718*P+3.403e-5*P**2-1.573e-8*P**3
if T < Th or T > 300 or P > 1000:
raise NotImplementedError("Incoming out of bound")
# Parameters, Table 1
Tll = 228.2
rho0 = 1081.6482
R = 0.461523087
pi0 = 300e3/rho0/R/Tll
omega0 = 0.5212269
L0 = 0.76317954
k0 = 0.072158686
k1 = -0.31569232
k2 = 5.2992608
# Reducing parameters, Eq 2
tau = T/Tll-1
p = P*1000/rho0/R/Tll
tau_ = tau+1
p_ = p+pi0
# Eq 3
ci = [-8.1570681381655, 1.2875032, 7.0901673598012, -3.2779161e-2,
7.3703949e-1, -2.1628622e-1, -5.1782479, 4.2293517e-4, 2.3592109e-2,
4.3773754, -2.9967770e-3, -9.6558018e-1, 3.7595286, 1.2632441,
2.8542697e-1, -8.5994947e-1, -3.2916153e-1, 9.0019616e-2,
8.1149726e-2, -3.2788213]
ai = [0, 0, 1, -0.2555, 1.5762, 1.6400, 3.6385, -0.3828, 1.6219, 4.3287,
3.4763, 5.1556, -0.3593, 5.0361, 2.9786, 6.2373, 4.0460, 5.3558,
9.0157, 1.2194]
bi = [0, 1, 0, 2.1051, 1.1422, 0.9510, 0, 3.6402, 2.0760, -0.0016, 2.2769,
0.0008, 0.3706, -0.3975, 2.9730, -0.3180, 2.9805, 2.9265, 0.4456,
0.1298]
di = [0, 0, 0, -0.0016, 0.6894, 0.0130, 0.0002, 0.0435, 0.0500, 0.0004,
0.0528, 0.0147, 0.8584, 0.9924, 1.0041, 1.0961, 1.0228, 1.0303,
1.6180, 0.5213]
phir = phirt = phirp = phirtt = phirtp = phirpp = 0
for c, a, b, d in zip(ci, ai, bi, di):
phir += c*tau_**a*p_**b*exp(-d*p_)
phirt += c*a*tau_**(a-1)*p_**b*exp(-d*p_)
phirp += c*tau_**a*p_**(b-1)*(b-d*p_)*exp(-d*p_)
phirtt += c*a*(a-1)*tau_**(a-2)*p_**b*exp(-d*p_)
phirtp += c*a*tau_**(a-1)*p_**(b-1)*(b-d*p_)*exp(-d*p_)
phirpp += c*tau_**a*p_**(b-2)*((d*p_-b)**2-b)*exp(-d*p_)
# Eq 5
K1 = ((1+k0*k2+k1*(p-k2*tau))**2-4*k0*k1*k2*(p-k2*tau))**0.5
K2 = (1+k2**2)**0.5
# Eq 6
omega = 2+omega0*p
# Eq 4
L = L0*K2/2/k1/k2*(1+k0*k2+k1*(p+k2*tau)-K1)
# Define interval of solution, Table 4
if omega < 10/9*(log(19)-L):
xmin = 0.049
xmax = 0.5
elif 10/9*(log(19)-L) <= omega < 50/49*(log(99)-L):
xmin = 0.0099
xmax = 0.051
else:
xmin = 0.99*exp(-50/49*L-omega)
xmax = min(1.1*exp(-L-omega), 0.0101)
def f(x):
return abs(L+log(x/(1-x))+omega*(1-2*x))
x = minimize(f, ((xmin+xmax)/2,), bounds=((xmin, xmax),))["x"][0]
# Eq 12
fi = 2*x-1
Xi = 1/(2/(1-fi**2)-omega)
# Derivatives, Table 3
Lt = L0*K2/2*(1+(1-k0*k2+k1*(p-k2*tau))/K1)
Lp = L0*K2*(K1+k0*k2-k1*p+k1*k2*tau-1)/2/k2/K1
Ltt = -2*L0*K2*k0*k1*k2**2/K1**3
Ltp = 2*L0*K2*k0*k1*k2/K1**3
Lpp = -2*L0*K2*k0*k1/K1**3
prop = {}
prop["L"] = L
prop["x"] = x
# Eq 13
prop["rho"] = rho0/((tau+1)/2*(omega0/2*(1-fi**2)+Lp*(fi+1))+phirp)
# Eq 1
prop["g"] = phir+(tau+1)*(x*L+x*log(x)+(1-x)*log(1-x)+omega*x*(1-x))
# Eq 14
prop["s"] = -R*((tau+1)/2*Lt*(fi+1) +
(x*L+x*log(x)+(1-x)*log(1-x)+omega*x*(1-x))+phirt)
# Basic derived state properties
prop["h"] = prop["g"]+T*prop["s"]
prop["u"] = prop["h"]+P/prop["rho"]
prop["a"] = prop["u"]-T*prop["s"]
# Eq 15
prop["xkappa"] = prop["rho"]/rho0**2/R*1000/Tll*(
(tau+1)/2*(Xi*(Lp-omega0*fi)**2-(fi+1)*Lpp)-phirpp)
prop["alfap"] = prop["rho"]/rho0/Tll*(
Ltp/2*(tau+1)*(fi+1) + (omega0*(1-fi**2)/2+Lp*(fi+1))/2 -
(tau+1)*Lt/2*Xi*(Lp-omega0*fi) + phirtp)
prop["cp"] = -R*(tau+1)*(Lt*(fi+1)+(tau+1)/2*(Ltt*(fi+1)-Lt**2*Xi)+phirtt)
# Eq 16
prop["cv"] = prop["cp"]-T*prop["alfap"]**2/prop["rho"]/prop["xkappa"]*1e3
# Eq 17
prop["w"] = (prop["rho"]*prop["xkappa"]*1e-6*prop["cv"]/prop["cp"])**-0.5
return prop | def function[_Supercooled, parameter[T, P]]:
constant[Guideline on thermodynamic properties of supercooled water
Parameters
----------
T : float
Temperature, [K]
P : float
Pressure, [MPa]
Returns
-------
prop : dict
Dict with calculated properties of water. The available properties are:
* L: Ordering field, [-]
* x: Mole fraction of low-density structure, [-]
* rho: Density, [kg/m³]
* s: Specific entropy, [kJ/kgK]
* h: Specific enthalpy, [kJ/kg]
* u: Specific internal energy, [kJ/kg]
* a: Specific Helmholtz energy, [kJ/kg]
* g: Specific Gibbs energy, [kJ/kg]
* alfap: Thermal expansion coefficient, [1/K]
* xkappa : Isothermal compressibility, [1/MPa]
* cp: Specific isobaric heat capacity, [kJ/kgK]
* cv: Specific isochoric heat capacity, [kJ/kgK]
* w: Speed of sound, [m/s²]
Notes
------
Raise :class:`NotImplementedError` if input isn't in limit:
* Tm ≤ T ≤ 300
* 0 < P ≤ 1000
The minimum temperature in range of validity is the melting temperature, it
depend of pressure
Examples
--------
>>> liq = _Supercooled(235.15, 0.101325)
>>> liq["rho"], liq["cp"], liq["w"]
968.09999 5.997563 1134.5855
References
----------
IAPWS, Guideline on Thermodynamic Properties of Supercooled Water,
http://iapws.org/relguide/Supercooled.html
]
if compare[name[P] less[<] constant[198.9]] begin[:]
variable[Tita] assign[=] binary_operation[name[T] / constant[235.15]]
variable[Ph] assign[=] binary_operation[binary_operation[constant[0.1] + binary_operation[constant[228.27] * binary_operation[constant[1] - binary_operation[name[Tita] ** constant[6.243]]]]] + binary_operation[constant[15.724] * binary_operation[constant[1] - binary_operation[name[Tita] ** constant[79.81]]]]]
if <ast.BoolOp object at 0x7da1b0694220> begin[:]
<ast.Raise object at 0x7da1b0695510>
variable[Tll] assign[=] constant[228.2]
variable[rho0] assign[=] constant[1081.6482]
variable[R] assign[=] constant[0.461523087]
variable[pi0] assign[=] binary_operation[binary_operation[binary_operation[constant[300000.0] / name[rho0]] / name[R]] / name[Tll]]
variable[omega0] assign[=] constant[0.5212269]
variable[L0] assign[=] constant[0.76317954]
variable[k0] assign[=] constant[0.072158686]
variable[k1] assign[=] <ast.UnaryOp object at 0x7da20e954430>
variable[k2] assign[=] constant[5.2992608]
variable[tau] assign[=] binary_operation[binary_operation[name[T] / name[Tll]] - constant[1]]
variable[p] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[name[P] * constant[1000]] / name[rho0]] / name[R]] / name[Tll]]
variable[tau_] assign[=] binary_operation[name[tau] + constant[1]]
variable[p_] assign[=] binary_operation[name[p] + name[pi0]]
variable[ci] assign[=] list[[<ast.UnaryOp object at 0x7da20e9569b0>, <ast.Constant object at 0x7da20e954190>, <ast.Constant object at 0x7da20e9566b0>, <ast.UnaryOp object at 0x7da20e957280>, <ast.Constant object at 0x7da20e955690>, <ast.UnaryOp object at 0x7da20e954790>, <ast.UnaryOp object at 0x7da20e955b10>, <ast.Constant object at 0x7da20e955090>, <ast.Constant object at 0x7da20e957700>, <ast.Constant object at 0x7da20e956530>, <ast.UnaryOp object at 0x7da20e957ee0>, <ast.UnaryOp object at 0x7da20e9546d0>, <ast.Constant object at 0x7da20e9541f0>, <ast.Constant object at 0x7da20e956d70>, <ast.Constant object at 0x7da20e9557b0>, <ast.UnaryOp object at 0x7da20e9579a0>, <ast.UnaryOp object at 0x7da20e955e10>, <ast.Constant object at 0x7da20e954ca0>, <ast.Constant object at 0x7da20e954130>, <ast.UnaryOp object at 0x7da20e957d60>]]
variable[ai] assign[=] list[[<ast.Constant object at 0x7da20e954580>, <ast.Constant object at 0x7da20e9570a0>, <ast.Constant object at 0x7da20e954a30>, <ast.UnaryOp object at 0x7da20e955ae0>, <ast.Constant object at 0x7da20e956350>, <ast.Constant object at 0x7da20e955510>, <ast.Constant object at 0x7da20e957850>, <ast.UnaryOp object at 0x7da20e956920>, <ast.Constant object at 0x7da20e955870>, <ast.Constant object at 0x7da20e956aa0>, <ast.Constant object at 0x7da20e956fe0>, <ast.Constant object at 0x7da20e957eb0>, <ast.UnaryOp object at 0x7da20e955390>, <ast.Constant object at 0x7da20e954220>, <ast.Constant object at 0x7da20e9572e0>, <ast.Constant object at 0x7da20e957550>, <ast.Constant object at 0x7da20e957c10>, <ast.Constant object at 0x7da20e955060>, <ast.Constant object at 0x7da20e955ea0>, <ast.Constant object at 0x7da20e956bc0>]]
variable[bi] assign[=] list[[<ast.Constant object at 0x7da20e957100>, <ast.Constant object at 0x7da20e956710>, <ast.Constant object at 0x7da20e954340>, <ast.Constant object at 0x7da20e956ef0>, <ast.Constant object at 0x7da20e9562f0>, <ast.Constant object at 0x7da20e955600>, <ast.Constant object at 0x7da20e954280>, <ast.Constant object at 0x7da20e955750>, <ast.Constant object at 0x7da20e9551b0>, <ast.UnaryOp object at 0x7da20e957f40>, <ast.Constant object at 0x7da20e9549a0>, <ast.Constant object at 0x7da20e954be0>, <ast.Constant object at 0x7da20e954400>, <ast.UnaryOp object at 0x7da20e954d00>, <ast.Constant object at 0x7da20e955b70>, <ast.UnaryOp object at 0x7da20e954940>, <ast.Constant object at 0x7da20e956ad0>, <ast.Constant object at 0x7da20e9578b0>, <ast.Constant object at 0x7da20e956ce0>, <ast.Constant object at 0x7da20e957520>]]
variable[di] assign[=] list[[<ast.Constant object at 0x7da20e956e30>, <ast.Constant object at 0x7da20e955c60>, <ast.Constant object at 0x7da20e956290>, <ast.UnaryOp object at 0x7da20e957d90>, <ast.Constant object at 0x7da20e955ba0>, <ast.Constant object at 0x7da20e956770>, <ast.Constant object at 0x7da20e957580>, <ast.Constant object at 0x7da20e957670>, <ast.Constant object at 0x7da20e9549d0>, <ast.Constant object at 0x7da20e956b60>, <ast.Constant object at 0x7da20e955360>, <ast.Constant object at 0x7da20e955a20>, <ast.Constant object at 0x7da20e9562c0>, <ast.Constant object at 0x7da20e957cd0>, <ast.Constant object at 0x7da20e957b50>, <ast.Constant object at 0x7da20e957970>, <ast.Constant object at 0x7da20e954d90>, <ast.Constant object at 0x7da20e956da0>, <ast.Constant object at 0x7da20e956f50>, <ast.Constant object at 0x7da20e9563b0>]]
variable[phir] assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da20e955150>, <ast.Name object at 0x7da20e954100>, <ast.Name object at 0x7da20e957760>, <ast.Name object at 0x7da20e954e50>]]] in starred[call[name[zip], parameter[name[ci], name[ai], name[bi], name[di]]]] begin[:]
<ast.AugAssign object at 0x7da18bc72050>
<ast.AugAssign object at 0x7da18bc72bf0>
<ast.AugAssign object at 0x7da18bc733d0>
<ast.AugAssign object at 0x7da18bc713f0>
<ast.AugAssign object at 0x7da18bc713c0>
<ast.AugAssign object at 0x7da18bc70c10>
variable[K1] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[1] + binary_operation[name[k0] * name[k2]]] + binary_operation[name[k1] * binary_operation[name[p] - binary_operation[name[k2] * name[tau]]]]] ** constant[2]] - binary_operation[binary_operation[binary_operation[binary_operation[constant[4] * name[k0]] * name[k1]] * name[k2]] * binary_operation[name[p] - binary_operation[name[k2] * name[tau]]]]] ** constant[0.5]]
variable[K2] assign[=] binary_operation[binary_operation[constant[1] + binary_operation[name[k2] ** constant[2]]] ** constant[0.5]]
variable[omega] assign[=] binary_operation[constant[2] + binary_operation[name[omega0] * name[p]]]
variable[L] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[L0] * name[K2]] / constant[2]] / name[k1]] / name[k2]] * binary_operation[binary_operation[binary_operation[constant[1] + binary_operation[name[k0] * name[k2]]] + binary_operation[name[k1] * binary_operation[name[p] + binary_operation[name[k2] * name[tau]]]]] - name[K1]]]
if compare[name[omega] less[<] binary_operation[binary_operation[constant[10] / constant[9]] * binary_operation[call[name[log], parameter[constant[19]]] - name[L]]]] begin[:]
variable[xmin] assign[=] constant[0.049]
variable[xmax] assign[=] constant[0.5]
def function[f, parameter[x]]:
return[call[name[abs], parameter[binary_operation[binary_operation[name[L] + call[name[log], parameter[binary_operation[name[x] / binary_operation[constant[1] - name[x]]]]]] + binary_operation[name[omega] * binary_operation[constant[1] - binary_operation[constant[2] * name[x]]]]]]]]
variable[x] assign[=] call[call[call[name[minimize], parameter[name[f], tuple[[<ast.BinOp object at 0x7da20c7962f0>]]]]][constant[x]]][constant[0]]
variable[fi] assign[=] binary_operation[binary_operation[constant[2] * name[x]] - constant[1]]
variable[Xi] assign[=] binary_operation[constant[1] / binary_operation[binary_operation[constant[2] / binary_operation[constant[1] - binary_operation[name[fi] ** constant[2]]]] - name[omega]]]
variable[Lt] assign[=] binary_operation[binary_operation[binary_operation[name[L0] * name[K2]] / constant[2]] * binary_operation[constant[1] + binary_operation[binary_operation[binary_operation[constant[1] - binary_operation[name[k0] * name[k2]]] + binary_operation[name[k1] * binary_operation[name[p] - binary_operation[name[k2] * name[tau]]]]] / name[K1]]]]
variable[Lp] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[L0] * name[K2]] * binary_operation[binary_operation[binary_operation[binary_operation[name[K1] + binary_operation[name[k0] * name[k2]]] - binary_operation[name[k1] * name[p]]] + binary_operation[binary_operation[name[k1] * name[k2]] * name[tau]]] - constant[1]]] / constant[2]] / name[k2]] / name[K1]]
variable[Ltt] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da20c794be0> * name[L0]] * name[K2]] * name[k0]] * name[k1]] * binary_operation[name[k2] ** constant[2]]] / binary_operation[name[K1] ** constant[3]]]
variable[Ltp] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[2] * name[L0]] * name[K2]] * name[k0]] * name[k1]] * name[k2]] / binary_operation[name[K1] ** constant[3]]]
variable[Lpp] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da18dc05e70> * name[L0]] * name[K2]] * name[k0]] * name[k1]] / binary_operation[name[K1] ** constant[3]]]
variable[prop] assign[=] dictionary[[], []]
call[name[prop]][constant[L]] assign[=] name[L]
call[name[prop]][constant[x]] assign[=] name[x]
call[name[prop]][constant[rho]] assign[=] binary_operation[name[rho0] / binary_operation[binary_operation[binary_operation[binary_operation[name[tau] + constant[1]] / constant[2]] * binary_operation[binary_operation[binary_operation[name[omega0] / constant[2]] * binary_operation[constant[1] - binary_operation[name[fi] ** constant[2]]]] + binary_operation[name[Lp] * binary_operation[name[fi] + constant[1]]]]] + name[phirp]]]
call[name[prop]][constant[g]] assign[=] binary_operation[name[phir] + binary_operation[binary_operation[name[tau] + constant[1]] * binary_operation[binary_operation[binary_operation[binary_operation[name[x] * name[L]] + binary_operation[name[x] * call[name[log], parameter[name[x]]]]] + binary_operation[binary_operation[constant[1] - name[x]] * call[name[log], parameter[binary_operation[constant[1] - name[x]]]]]] + binary_operation[binary_operation[name[omega] * name[x]] * binary_operation[constant[1] - name[x]]]]]]
call[name[prop]][constant[s]] assign[=] binary_operation[<ast.UnaryOp object at 0x7da18dc05450> * binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[tau] + constant[1]] / constant[2]] * name[Lt]] * binary_operation[name[fi] + constant[1]]] + binary_operation[binary_operation[binary_operation[binary_operation[name[x] * name[L]] + binary_operation[name[x] * call[name[log], parameter[name[x]]]]] + binary_operation[binary_operation[constant[1] - name[x]] * call[name[log], parameter[binary_operation[constant[1] - name[x]]]]]] + binary_operation[binary_operation[name[omega] * name[x]] * binary_operation[constant[1] - name[x]]]]] + name[phirt]]]
call[name[prop]][constant[h]] assign[=] binary_operation[call[name[prop]][constant[g]] + binary_operation[name[T] * call[name[prop]][constant[s]]]]
call[name[prop]][constant[u]] assign[=] binary_operation[call[name[prop]][constant[h]] + binary_operation[name[P] / call[name[prop]][constant[rho]]]]
call[name[prop]][constant[a]] assign[=] binary_operation[call[name[prop]][constant[u]] - binary_operation[name[T] * call[name[prop]][constant[s]]]]
call[name[prop]][constant[xkappa]] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[call[name[prop]][constant[rho]] / binary_operation[name[rho0] ** constant[2]]] / name[R]] * constant[1000]] / name[Tll]] * binary_operation[binary_operation[binary_operation[binary_operation[name[tau] + constant[1]] / constant[2]] * binary_operation[binary_operation[name[Xi] * binary_operation[binary_operation[name[Lp] - binary_operation[name[omega0] * name[fi]]] ** constant[2]]] - binary_operation[binary_operation[name[fi] + constant[1]] * name[Lpp]]]] - name[phirpp]]]
call[name[prop]][constant[alfap]] assign[=] binary_operation[binary_operation[binary_operation[call[name[prop]][constant[rho]] / name[rho0]] / name[Tll]] * binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[Ltp] / constant[2]] * binary_operation[name[tau] + constant[1]]] * binary_operation[name[fi] + constant[1]]] + binary_operation[binary_operation[binary_operation[binary_operation[name[omega0] * binary_operation[constant[1] - binary_operation[name[fi] ** constant[2]]]] / constant[2]] + binary_operation[name[Lp] * binary_operation[name[fi] + constant[1]]]] / constant[2]]] - binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[tau] + constant[1]] * name[Lt]] / constant[2]] * name[Xi]] * binary_operation[name[Lp] - binary_operation[name[omega0] * name[fi]]]]] + name[phirtp]]]
call[name[prop]][constant[cp]] assign[=] binary_operation[binary_operation[<ast.UnaryOp object at 0x7da1b2346d70> * binary_operation[name[tau] + constant[1]]] * binary_operation[binary_operation[binary_operation[name[Lt] * binary_operation[name[fi] + constant[1]]] + binary_operation[binary_operation[binary_operation[name[tau] + constant[1]] / constant[2]] * binary_operation[binary_operation[name[Ltt] * binary_operation[name[fi] + constant[1]]] - binary_operation[binary_operation[name[Lt] ** constant[2]] * name[Xi]]]]] + name[phirtt]]]
call[name[prop]][constant[cv]] assign[=] binary_operation[call[name[prop]][constant[cp]] - binary_operation[binary_operation[binary_operation[binary_operation[name[T] * binary_operation[call[name[prop]][constant[alfap]] ** constant[2]]] / call[name[prop]][constant[rho]]] / call[name[prop]][constant[xkappa]]] * constant[1000.0]]]
call[name[prop]][constant[w]] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[call[name[prop]][constant[rho]] * call[name[prop]][constant[xkappa]]] * constant[1e-06]] * call[name[prop]][constant[cv]]] / call[name[prop]][constant[cp]]] ** <ast.UnaryOp object at 0x7da18eb56e60>]
return[name[prop]] | keyword[def] identifier[_Supercooled] ( identifier[T] , identifier[P] ):
literal[string]
keyword[if] identifier[P] < literal[int] :
identifier[Tita] = identifier[T] / literal[int]
identifier[Ph] = literal[int] + literal[int] *( literal[int] - identifier[Tita] ** literal[int] )+ literal[int] *( literal[int] - identifier[Tita] ** literal[int] )
keyword[if] identifier[P] < identifier[Ph] keyword[or] identifier[T] > literal[int] :
keyword[raise] identifier[NotImplementedError] ( literal[string] )
keyword[else] :
identifier[Th] = literal[int] + literal[int] * identifier[P] + literal[int] * identifier[P] ** literal[int] - literal[int] * identifier[P] ** literal[int]
keyword[if] identifier[T] < identifier[Th] keyword[or] identifier[T] > literal[int] keyword[or] identifier[P] > literal[int] :
keyword[raise] identifier[NotImplementedError] ( literal[string] )
identifier[Tll] = literal[int]
identifier[rho0] = literal[int]
identifier[R] = literal[int]
identifier[pi0] = literal[int] / identifier[rho0] / identifier[R] / identifier[Tll]
identifier[omega0] = literal[int]
identifier[L0] = literal[int]
identifier[k0] = literal[int]
identifier[k1] =- literal[int]
identifier[k2] = literal[int]
identifier[tau] = identifier[T] / identifier[Tll] - literal[int]
identifier[p] = identifier[P] * literal[int] / identifier[rho0] / identifier[R] / identifier[Tll]
identifier[tau_] = identifier[tau] + literal[int]
identifier[p_] = identifier[p] + identifier[pi0]
identifier[ci] =[- literal[int] , literal[int] , literal[int] ,- literal[int] ,
literal[int] ,- literal[int] ,- literal[int] , literal[int] , literal[int] ,
literal[int] ,- literal[int] ,- literal[int] , literal[int] , literal[int] ,
literal[int] ,- literal[int] ,- literal[int] , literal[int] ,
literal[int] ,- literal[int] ]
identifier[ai] =[ literal[int] , literal[int] , literal[int] ,- literal[int] , literal[int] , literal[int] , literal[int] ,- literal[int] , literal[int] , literal[int] ,
literal[int] , literal[int] ,- literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ,
literal[int] , literal[int] ]
identifier[bi] =[ literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ,- literal[int] , literal[int] ,
literal[int] , literal[int] ,- literal[int] , literal[int] ,- literal[int] , literal[int] , literal[int] , literal[int] ,
literal[int] ]
identifier[di] =[ literal[int] , literal[int] , literal[int] ,- literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ,
literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ,
literal[int] , literal[int] ]
identifier[phir] = identifier[phirt] = identifier[phirp] = identifier[phirtt] = identifier[phirtp] = identifier[phirpp] = literal[int]
keyword[for] identifier[c] , identifier[a] , identifier[b] , identifier[d] keyword[in] identifier[zip] ( identifier[ci] , identifier[ai] , identifier[bi] , identifier[di] ):
identifier[phir] += identifier[c] * identifier[tau_] ** identifier[a] * identifier[p_] ** identifier[b] * identifier[exp] (- identifier[d] * identifier[p_] )
identifier[phirt] += identifier[c] * identifier[a] * identifier[tau_] **( identifier[a] - literal[int] )* identifier[p_] ** identifier[b] * identifier[exp] (- identifier[d] * identifier[p_] )
identifier[phirp] += identifier[c] * identifier[tau_] ** identifier[a] * identifier[p_] **( identifier[b] - literal[int] )*( identifier[b] - identifier[d] * identifier[p_] )* identifier[exp] (- identifier[d] * identifier[p_] )
identifier[phirtt] += identifier[c] * identifier[a] *( identifier[a] - literal[int] )* identifier[tau_] **( identifier[a] - literal[int] )* identifier[p_] ** identifier[b] * identifier[exp] (- identifier[d] * identifier[p_] )
identifier[phirtp] += identifier[c] * identifier[a] * identifier[tau_] **( identifier[a] - literal[int] )* identifier[p_] **( identifier[b] - literal[int] )*( identifier[b] - identifier[d] * identifier[p_] )* identifier[exp] (- identifier[d] * identifier[p_] )
identifier[phirpp] += identifier[c] * identifier[tau_] ** identifier[a] * identifier[p_] **( identifier[b] - literal[int] )*(( identifier[d] * identifier[p_] - identifier[b] )** literal[int] - identifier[b] )* identifier[exp] (- identifier[d] * identifier[p_] )
identifier[K1] =(( literal[int] + identifier[k0] * identifier[k2] + identifier[k1] *( identifier[p] - identifier[k2] * identifier[tau] ))** literal[int] - literal[int] * identifier[k0] * identifier[k1] * identifier[k2] *( identifier[p] - identifier[k2] * identifier[tau] ))** literal[int]
identifier[K2] =( literal[int] + identifier[k2] ** literal[int] )** literal[int]
identifier[omega] = literal[int] + identifier[omega0] * identifier[p]
identifier[L] = identifier[L0] * identifier[K2] / literal[int] / identifier[k1] / identifier[k2] *( literal[int] + identifier[k0] * identifier[k2] + identifier[k1] *( identifier[p] + identifier[k2] * identifier[tau] )- identifier[K1] )
keyword[if] identifier[omega] < literal[int] / literal[int] *( identifier[log] ( literal[int] )- identifier[L] ):
identifier[xmin] = literal[int]
identifier[xmax] = literal[int]
keyword[elif] literal[int] / literal[int] *( identifier[log] ( literal[int] )- identifier[L] )<= identifier[omega] < literal[int] / literal[int] *( identifier[log] ( literal[int] )- identifier[L] ):
identifier[xmin] = literal[int]
identifier[xmax] = literal[int]
keyword[else] :
identifier[xmin] = literal[int] * identifier[exp] (- literal[int] / literal[int] * identifier[L] - identifier[omega] )
identifier[xmax] = identifier[min] ( literal[int] * identifier[exp] (- identifier[L] - identifier[omega] ), literal[int] )
keyword[def] identifier[f] ( identifier[x] ):
keyword[return] identifier[abs] ( identifier[L] + identifier[log] ( identifier[x] /( literal[int] - identifier[x] ))+ identifier[omega] *( literal[int] - literal[int] * identifier[x] ))
identifier[x] = identifier[minimize] ( identifier[f] ,(( identifier[xmin] + identifier[xmax] )/ literal[int] ,), identifier[bounds] =(( identifier[xmin] , identifier[xmax] ),))[ literal[string] ][ literal[int] ]
identifier[fi] = literal[int] * identifier[x] - literal[int]
identifier[Xi] = literal[int] /( literal[int] /( literal[int] - identifier[fi] ** literal[int] )- identifier[omega] )
identifier[Lt] = identifier[L0] * identifier[K2] / literal[int] *( literal[int] +( literal[int] - identifier[k0] * identifier[k2] + identifier[k1] *( identifier[p] - identifier[k2] * identifier[tau] ))/ identifier[K1] )
identifier[Lp] = identifier[L0] * identifier[K2] *( identifier[K1] + identifier[k0] * identifier[k2] - identifier[k1] * identifier[p] + identifier[k1] * identifier[k2] * identifier[tau] - literal[int] )/ literal[int] / identifier[k2] / identifier[K1]
identifier[Ltt] =- literal[int] * identifier[L0] * identifier[K2] * identifier[k0] * identifier[k1] * identifier[k2] ** literal[int] / identifier[K1] ** literal[int]
identifier[Ltp] = literal[int] * identifier[L0] * identifier[K2] * identifier[k0] * identifier[k1] * identifier[k2] / identifier[K1] ** literal[int]
identifier[Lpp] =- literal[int] * identifier[L0] * identifier[K2] * identifier[k0] * identifier[k1] / identifier[K1] ** literal[int]
identifier[prop] ={}
identifier[prop] [ literal[string] ]= identifier[L]
identifier[prop] [ literal[string] ]= identifier[x]
identifier[prop] [ literal[string] ]= identifier[rho0] /(( identifier[tau] + literal[int] )/ literal[int] *( identifier[omega0] / literal[int] *( literal[int] - identifier[fi] ** literal[int] )+ identifier[Lp] *( identifier[fi] + literal[int] ))+ identifier[phirp] )
identifier[prop] [ literal[string] ]= identifier[phir] +( identifier[tau] + literal[int] )*( identifier[x] * identifier[L] + identifier[x] * identifier[log] ( identifier[x] )+( literal[int] - identifier[x] )* identifier[log] ( literal[int] - identifier[x] )+ identifier[omega] * identifier[x] *( literal[int] - identifier[x] ))
identifier[prop] [ literal[string] ]=- identifier[R] *(( identifier[tau] + literal[int] )/ literal[int] * identifier[Lt] *( identifier[fi] + literal[int] )+
( identifier[x] * identifier[L] + identifier[x] * identifier[log] ( identifier[x] )+( literal[int] - identifier[x] )* identifier[log] ( literal[int] - identifier[x] )+ identifier[omega] * identifier[x] *( literal[int] - identifier[x] ))+ identifier[phirt] )
identifier[prop] [ literal[string] ]= identifier[prop] [ literal[string] ]+ identifier[T] * identifier[prop] [ literal[string] ]
identifier[prop] [ literal[string] ]= identifier[prop] [ literal[string] ]+ identifier[P] / identifier[prop] [ literal[string] ]
identifier[prop] [ literal[string] ]= identifier[prop] [ literal[string] ]- identifier[T] * identifier[prop] [ literal[string] ]
identifier[prop] [ literal[string] ]= identifier[prop] [ literal[string] ]/ identifier[rho0] ** literal[int] / identifier[R] * literal[int] / identifier[Tll] *(
( identifier[tau] + literal[int] )/ literal[int] *( identifier[Xi] *( identifier[Lp] - identifier[omega0] * identifier[fi] )** literal[int] -( identifier[fi] + literal[int] )* identifier[Lpp] )- identifier[phirpp] )
identifier[prop] [ literal[string] ]= identifier[prop] [ literal[string] ]/ identifier[rho0] / identifier[Tll] *(
identifier[Ltp] / literal[int] *( identifier[tau] + literal[int] )*( identifier[fi] + literal[int] )+( identifier[omega0] *( literal[int] - identifier[fi] ** literal[int] )/ literal[int] + identifier[Lp] *( identifier[fi] + literal[int] ))/ literal[int] -
( identifier[tau] + literal[int] )* identifier[Lt] / literal[int] * identifier[Xi] *( identifier[Lp] - identifier[omega0] * identifier[fi] )+ identifier[phirtp] )
identifier[prop] [ literal[string] ]=- identifier[R] *( identifier[tau] + literal[int] )*( identifier[Lt] *( identifier[fi] + literal[int] )+( identifier[tau] + literal[int] )/ literal[int] *( identifier[Ltt] *( identifier[fi] + literal[int] )- identifier[Lt] ** literal[int] * identifier[Xi] )+ identifier[phirtt] )
identifier[prop] [ literal[string] ]= identifier[prop] [ literal[string] ]- identifier[T] * identifier[prop] [ literal[string] ]** literal[int] / identifier[prop] [ literal[string] ]/ identifier[prop] [ literal[string] ]* literal[int]
identifier[prop] [ literal[string] ]=( identifier[prop] [ literal[string] ]* identifier[prop] [ literal[string] ]* literal[int] * identifier[prop] [ literal[string] ]/ identifier[prop] [ literal[string] ])**- literal[int]
keyword[return] identifier[prop] | def _Supercooled(T, P):
"""Guideline on thermodynamic properties of supercooled water
Parameters
----------
T : float
Temperature, [K]
P : float
Pressure, [MPa]
Returns
-------
prop : dict
Dict with calculated properties of water. The available properties are:
* L: Ordering field, [-]
* x: Mole fraction of low-density structure, [-]
* rho: Density, [kg/m³]
* s: Specific entropy, [kJ/kgK]
* h: Specific enthalpy, [kJ/kg]
* u: Specific internal energy, [kJ/kg]
* a: Specific Helmholtz energy, [kJ/kg]
* g: Specific Gibbs energy, [kJ/kg]
* alfap: Thermal expansion coefficient, [1/K]
* xkappa : Isothermal compressibility, [1/MPa]
* cp: Specific isobaric heat capacity, [kJ/kgK]
* cv: Specific isochoric heat capacity, [kJ/kgK]
* w: Speed of sound, [m/s²]
Notes
------
Raise :class:`NotImplementedError` if input isn't in limit:
* Tm ≤ T ≤ 300
* 0 < P ≤ 1000
The minimum temperature in range of validity is the melting temperature, it
depend of pressure
Examples
--------
>>> liq = _Supercooled(235.15, 0.101325)
>>> liq["rho"], liq["cp"], liq["w"]
968.09999 5.997563 1134.5855
References
----------
IAPWS, Guideline on Thermodynamic Properties of Supercooled Water,
http://iapws.org/relguide/Supercooled.html
"""
# Check input in range of validity
if P < 198.9:
Tita = T / 235.15
Ph = 0.1 + 228.27 * (1 - Tita ** 6.243) + 15.724 * (1 - Tita ** 79.81)
if P < Ph or T > 300:
raise NotImplementedError('Incoming out of bound') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['P']]
else:
Th = 172.82 + 0.03718 * P + 3.403e-05 * P ** 2 - 1.573e-08 * P ** 3
if T < Th or T > 300 or P > 1000:
raise NotImplementedError('Incoming out of bound') # depends on [control=['if'], data=[]]
# Parameters, Table 1
Tll = 228.2
rho0 = 1081.6482
R = 0.461523087
pi0 = 300000.0 / rho0 / R / Tll
omega0 = 0.5212269
L0 = 0.76317954
k0 = 0.072158686
k1 = -0.31569232
k2 = 5.2992608
# Reducing parameters, Eq 2
tau = T / Tll - 1
p = P * 1000 / rho0 / R / Tll
tau_ = tau + 1
p_ = p + pi0
# Eq 3
ci = [-8.1570681381655, 1.2875032, 7.0901673598012, -0.032779161, 0.73703949, -0.21628622, -5.1782479, 0.00042293517, 0.023592109, 4.3773754, -0.002996777, -0.96558018, 3.7595286, 1.2632441, 0.28542697, -0.85994947, -0.32916153, 0.090019616, 0.081149726, -3.2788213]
ai = [0, 0, 1, -0.2555, 1.5762, 1.64, 3.6385, -0.3828, 1.6219, 4.3287, 3.4763, 5.1556, -0.3593, 5.0361, 2.9786, 6.2373, 4.046, 5.3558, 9.0157, 1.2194]
bi = [0, 1, 0, 2.1051, 1.1422, 0.951, 0, 3.6402, 2.076, -0.0016, 2.2769, 0.0008, 0.3706, -0.3975, 2.973, -0.318, 2.9805, 2.9265, 0.4456, 0.1298]
di = [0, 0, 0, -0.0016, 0.6894, 0.013, 0.0002, 0.0435, 0.05, 0.0004, 0.0528, 0.0147, 0.8584, 0.9924, 1.0041, 1.0961, 1.0228, 1.0303, 1.618, 0.5213]
phir = phirt = phirp = phirtt = phirtp = phirpp = 0
for (c, a, b, d) in zip(ci, ai, bi, di):
phir += c * tau_ ** a * p_ ** b * exp(-d * p_)
phirt += c * a * tau_ ** (a - 1) * p_ ** b * exp(-d * p_)
phirp += c * tau_ ** a * p_ ** (b - 1) * (b - d * p_) * exp(-d * p_)
phirtt += c * a * (a - 1) * tau_ ** (a - 2) * p_ ** b * exp(-d * p_)
phirtp += c * a * tau_ ** (a - 1) * p_ ** (b - 1) * (b - d * p_) * exp(-d * p_)
phirpp += c * tau_ ** a * p_ ** (b - 2) * ((d * p_ - b) ** 2 - b) * exp(-d * p_) # depends on [control=['for'], data=[]]
# Eq 5
K1 = ((1 + k0 * k2 + k1 * (p - k2 * tau)) ** 2 - 4 * k0 * k1 * k2 * (p - k2 * tau)) ** 0.5
K2 = (1 + k2 ** 2) ** 0.5
# Eq 6
omega = 2 + omega0 * p
# Eq 4
L = L0 * K2 / 2 / k1 / k2 * (1 + k0 * k2 + k1 * (p + k2 * tau) - K1)
# Define interval of solution, Table 4
if omega < 10 / 9 * (log(19) - L):
xmin = 0.049
xmax = 0.5 # depends on [control=['if'], data=[]]
elif 10 / 9 * (log(19) - L) <= omega < 50 / 49 * (log(99) - L):
xmin = 0.0099
xmax = 0.051 # depends on [control=['if'], data=[]]
else:
xmin = 0.99 * exp(-50 / 49 * L - omega)
xmax = min(1.1 * exp(-L - omega), 0.0101)
def f(x):
return abs(L + log(x / (1 - x)) + omega * (1 - 2 * x))
x = minimize(f, ((xmin + xmax) / 2,), bounds=((xmin, xmax),))['x'][0]
# Eq 12
fi = 2 * x - 1
Xi = 1 / (2 / (1 - fi ** 2) - omega)
# Derivatives, Table 3
Lt = L0 * K2 / 2 * (1 + (1 - k0 * k2 + k1 * (p - k2 * tau)) / K1)
Lp = L0 * K2 * (K1 + k0 * k2 - k1 * p + k1 * k2 * tau - 1) / 2 / k2 / K1
Ltt = -2 * L0 * K2 * k0 * k1 * k2 ** 2 / K1 ** 3
Ltp = 2 * L0 * K2 * k0 * k1 * k2 / K1 ** 3
Lpp = -2 * L0 * K2 * k0 * k1 / K1 ** 3
prop = {}
prop['L'] = L
prop['x'] = x
# Eq 13
prop['rho'] = rho0 / ((tau + 1) / 2 * (omega0 / 2 * (1 - fi ** 2) + Lp * (fi + 1)) + phirp)
# Eq 1
prop['g'] = phir + (tau + 1) * (x * L + x * log(x) + (1 - x) * log(1 - x) + omega * x * (1 - x))
# Eq 14
prop['s'] = -R * ((tau + 1) / 2 * Lt * (fi + 1) + (x * L + x * log(x) + (1 - x) * log(1 - x) + omega * x * (1 - x)) + phirt)
# Basic derived state properties
prop['h'] = prop['g'] + T * prop['s']
prop['u'] = prop['h'] + P / prop['rho']
prop['a'] = prop['u'] - T * prop['s']
# Eq 15
prop['xkappa'] = prop['rho'] / rho0 ** 2 / R * 1000 / Tll * ((tau + 1) / 2 * (Xi * (Lp - omega0 * fi) ** 2 - (fi + 1) * Lpp) - phirpp)
prop['alfap'] = prop['rho'] / rho0 / Tll * (Ltp / 2 * (tau + 1) * (fi + 1) + (omega0 * (1 - fi ** 2) / 2 + Lp * (fi + 1)) / 2 - (tau + 1) * Lt / 2 * Xi * (Lp - omega0 * fi) + phirtp)
prop['cp'] = -R * (tau + 1) * (Lt * (fi + 1) + (tau + 1) / 2 * (Ltt * (fi + 1) - Lt ** 2 * Xi) + phirtt)
# Eq 16
prop['cv'] = prop['cp'] - T * prop['alfap'] ** 2 / prop['rho'] / prop['xkappa'] * 1000.0
# Eq 17
prop['w'] = (prop['rho'] * prop['xkappa'] * 1e-06 * prop['cv'] / prop['cp']) ** (-0.5)
return prop |
def stop_instance(self, instance_id):
"""Stops the instance gracefully.
:param str instance_id: instance identifier
"""
instance = self._load_instance(instance_id)
instance.terminate()
del self._instances[instance_id] | def function[stop_instance, parameter[self, instance_id]]:
constant[Stops the instance gracefully.
:param str instance_id: instance identifier
]
variable[instance] assign[=] call[name[self]._load_instance, parameter[name[instance_id]]]
call[name[instance].terminate, parameter[]]
<ast.Delete object at 0x7da1b060df30> | keyword[def] identifier[stop_instance] ( identifier[self] , identifier[instance_id] ):
literal[string]
identifier[instance] = identifier[self] . identifier[_load_instance] ( identifier[instance_id] )
identifier[instance] . identifier[terminate] ()
keyword[del] identifier[self] . identifier[_instances] [ identifier[instance_id] ] | def stop_instance(self, instance_id):
"""Stops the instance gracefully.
:param str instance_id: instance identifier
"""
instance = self._load_instance(instance_id)
instance.terminate()
del self._instances[instance_id] |
def timeFormat(time_from, time_to=None, prefix="", infix=None):
"""
Format the times time_from and optionally time_to, e.g. 10am
"""
retval = ""
if time_from != "" and time_from is not None:
retval += prefix
retval += dateformat.time_format(time_from, "fA").lower()
if time_to != "" and time_to is not None:
to = format(dateformat.time_format(time_to, "fA").lower())
if infix is not None:
retval = "{} {} {}".format(retval, infix, to)
else:
retval = _("{fromTime} to {toTime}").format(fromTime=retval,
toTime=to)
return retval.strip() | def function[timeFormat, parameter[time_from, time_to, prefix, infix]]:
constant[
Format the times time_from and optionally time_to, e.g. 10am
]
variable[retval] assign[=] constant[]
if <ast.BoolOp object at 0x7da204620c70> begin[:]
<ast.AugAssign object at 0x7da204623c40>
<ast.AugAssign object at 0x7da204620df0>
if <ast.BoolOp object at 0x7da204622ec0> begin[:]
variable[to] assign[=] call[name[format], parameter[call[call[name[dateformat].time_format, parameter[name[time_to], constant[fA]]].lower, parameter[]]]]
if compare[name[infix] is_not constant[None]] begin[:]
variable[retval] assign[=] call[constant[{} {} {}].format, parameter[name[retval], name[infix], name[to]]]
return[call[name[retval].strip, parameter[]]] | keyword[def] identifier[timeFormat] ( identifier[time_from] , identifier[time_to] = keyword[None] , identifier[prefix] = literal[string] , identifier[infix] = keyword[None] ):
literal[string]
identifier[retval] = literal[string]
keyword[if] identifier[time_from] != literal[string] keyword[and] identifier[time_from] keyword[is] keyword[not] keyword[None] :
identifier[retval] += identifier[prefix]
identifier[retval] += identifier[dateformat] . identifier[time_format] ( identifier[time_from] , literal[string] ). identifier[lower] ()
keyword[if] identifier[time_to] != literal[string] keyword[and] identifier[time_to] keyword[is] keyword[not] keyword[None] :
identifier[to] = identifier[format] ( identifier[dateformat] . identifier[time_format] ( identifier[time_to] , literal[string] ). identifier[lower] ())
keyword[if] identifier[infix] keyword[is] keyword[not] keyword[None] :
identifier[retval] = literal[string] . identifier[format] ( identifier[retval] , identifier[infix] , identifier[to] )
keyword[else] :
identifier[retval] = identifier[_] ( literal[string] ). identifier[format] ( identifier[fromTime] = identifier[retval] ,
identifier[toTime] = identifier[to] )
keyword[return] identifier[retval] . identifier[strip] () | def timeFormat(time_from, time_to=None, prefix='', infix=None):
"""
Format the times time_from and optionally time_to, e.g. 10am
"""
retval = ''
if time_from != '' and time_from is not None:
retval += prefix
retval += dateformat.time_format(time_from, 'fA').lower() # depends on [control=['if'], data=[]]
if time_to != '' and time_to is not None:
to = format(dateformat.time_format(time_to, 'fA').lower())
if infix is not None:
retval = '{} {} {}'.format(retval, infix, to) # depends on [control=['if'], data=['infix']]
else:
retval = _('{fromTime} to {toTime}').format(fromTime=retval, toTime=to) # depends on [control=['if'], data=[]]
return retval.strip() |
def reset(self):
'''Restores the starting position.'''
self.piece_bb = [
BB_VOID, # NONE
BB_RANK_C | BB_RANK_G, # PAWN
BB_A1 | BB_I1 | BB_A9 | BB_I9, # LANCE
BB_A2 | BB_A8 | BB_I2 | BB_I8, # KNIGHT
BB_A3 | BB_A7 | BB_I3 | BB_I7, # SILVER
BB_A4 | BB_A6 | BB_I4 | BB_I6, # GOLD
BB_B2 | BB_H8, # BISHOP
BB_B8 | BB_H2, # ROOK
BB_A5 | BB_I5, # KING
BB_VOID, # PROM_PAWN
BB_VOID, # PROM_LANCE
BB_VOID, # PROM_KNIGHT
BB_VOID, # PROM_SILVER
BB_VOID, # PROM_BISHOP
BB_VOID, # PROM_ROOK
]
self.pieces_in_hand = [collections.Counter(), collections.Counter()]
self.occupied = Occupied(BB_RANK_G | BB_H2 | BB_H8 | BB_RANK_I, BB_RANK_A | BB_B2 | BB_B8 | BB_RANK_C)
self.king_squares = [I5, A5]
self.pieces = [NONE for i in SQUARES]
for i in SQUARES:
mask = BB_SQUARES[i]
for piece_type in PIECE_TYPES:
if mask & self.piece_bb[piece_type]:
self.pieces[i] = piece_type
self.turn = BLACK
self.move_number = 1
self.captured_piece_stack = collections.deque()
self.move_stack = collections.deque()
self.incremental_zobrist_hash = self.board_zobrist_hash(DEFAULT_RANDOM_ARRAY)
self.transpositions = collections.Counter((self.zobrist_hash(), )) | def function[reset, parameter[self]]:
constant[Restores the starting position.]
name[self].piece_bb assign[=] list[[<ast.Name object at 0x7da1b03fb0a0>, <ast.BinOp object at 0x7da1b03fab60>, <ast.BinOp object at 0x7da1b03f9fc0>, <ast.BinOp object at 0x7da1b03fa1d0>, <ast.BinOp object at 0x7da1b03fb4c0>, <ast.BinOp object at 0x7da1b03fbf40>, <ast.BinOp object at 0x7da1b03fa770>, <ast.BinOp object at 0x7da1b03fb1c0>, <ast.BinOp object at 0x7da1b03f9e40>, <ast.Name object at 0x7da1b03f80a0>, <ast.Name object at 0x7da1b03f95d0>, <ast.Name object at 0x7da1b03f8490>, <ast.Name object at 0x7da1b03f9b10>, <ast.Name object at 0x7da1b03f9090>, <ast.Name object at 0x7da1b03f9f60>]]
name[self].pieces_in_hand assign[=] list[[<ast.Call object at 0x7da1b03faaa0>, <ast.Call object at 0x7da1b03fa6b0>]]
name[self].occupied assign[=] call[name[Occupied], parameter[binary_operation[binary_operation[binary_operation[name[BB_RANK_G] <ast.BitOr object at 0x7da2590d6aa0> name[BB_H2]] <ast.BitOr object at 0x7da2590d6aa0> name[BB_H8]] <ast.BitOr object at 0x7da2590d6aa0> name[BB_RANK_I]], binary_operation[binary_operation[binary_operation[name[BB_RANK_A] <ast.BitOr object at 0x7da2590d6aa0> name[BB_B2]] <ast.BitOr object at 0x7da2590d6aa0> name[BB_B8]] <ast.BitOr object at 0x7da2590d6aa0> name[BB_RANK_C]]]]
name[self].king_squares assign[=] list[[<ast.Name object at 0x7da1b03faec0>, <ast.Name object at 0x7da1b03f9ab0>]]
name[self].pieces assign[=] <ast.ListComp object at 0x7da1b03fb9d0>
for taget[name[i]] in starred[name[SQUARES]] begin[:]
variable[mask] assign[=] call[name[BB_SQUARES]][name[i]]
for taget[name[piece_type]] in starred[name[PIECE_TYPES]] begin[:]
if binary_operation[name[mask] <ast.BitAnd object at 0x7da2590d6b60> call[name[self].piece_bb][name[piece_type]]] begin[:]
call[name[self].pieces][name[i]] assign[=] name[piece_type]
name[self].turn assign[=] name[BLACK]
name[self].move_number assign[=] constant[1]
name[self].captured_piece_stack assign[=] call[name[collections].deque, parameter[]]
name[self].move_stack assign[=] call[name[collections].deque, parameter[]]
name[self].incremental_zobrist_hash assign[=] call[name[self].board_zobrist_hash, parameter[name[DEFAULT_RANDOM_ARRAY]]]
name[self].transpositions assign[=] call[name[collections].Counter, parameter[tuple[[<ast.Call object at 0x7da1b03525c0>]]]] | keyword[def] identifier[reset] ( identifier[self] ):
literal[string]
identifier[self] . identifier[piece_bb] =[
identifier[BB_VOID] ,
identifier[BB_RANK_C] | identifier[BB_RANK_G] ,
identifier[BB_A1] | identifier[BB_I1] | identifier[BB_A9] | identifier[BB_I9] ,
identifier[BB_A2] | identifier[BB_A8] | identifier[BB_I2] | identifier[BB_I8] ,
identifier[BB_A3] | identifier[BB_A7] | identifier[BB_I3] | identifier[BB_I7] ,
identifier[BB_A4] | identifier[BB_A6] | identifier[BB_I4] | identifier[BB_I6] ,
identifier[BB_B2] | identifier[BB_H8] ,
identifier[BB_B8] | identifier[BB_H2] ,
identifier[BB_A5] | identifier[BB_I5] ,
identifier[BB_VOID] ,
identifier[BB_VOID] ,
identifier[BB_VOID] ,
identifier[BB_VOID] ,
identifier[BB_VOID] ,
identifier[BB_VOID] ,
]
identifier[self] . identifier[pieces_in_hand] =[ identifier[collections] . identifier[Counter] (), identifier[collections] . identifier[Counter] ()]
identifier[self] . identifier[occupied] = identifier[Occupied] ( identifier[BB_RANK_G] | identifier[BB_H2] | identifier[BB_H8] | identifier[BB_RANK_I] , identifier[BB_RANK_A] | identifier[BB_B2] | identifier[BB_B8] | identifier[BB_RANK_C] )
identifier[self] . identifier[king_squares] =[ identifier[I5] , identifier[A5] ]
identifier[self] . identifier[pieces] =[ identifier[NONE] keyword[for] identifier[i] keyword[in] identifier[SQUARES] ]
keyword[for] identifier[i] keyword[in] identifier[SQUARES] :
identifier[mask] = identifier[BB_SQUARES] [ identifier[i] ]
keyword[for] identifier[piece_type] keyword[in] identifier[PIECE_TYPES] :
keyword[if] identifier[mask] & identifier[self] . identifier[piece_bb] [ identifier[piece_type] ]:
identifier[self] . identifier[pieces] [ identifier[i] ]= identifier[piece_type]
identifier[self] . identifier[turn] = identifier[BLACK]
identifier[self] . identifier[move_number] = literal[int]
identifier[self] . identifier[captured_piece_stack] = identifier[collections] . identifier[deque] ()
identifier[self] . identifier[move_stack] = identifier[collections] . identifier[deque] ()
identifier[self] . identifier[incremental_zobrist_hash] = identifier[self] . identifier[board_zobrist_hash] ( identifier[DEFAULT_RANDOM_ARRAY] )
identifier[self] . identifier[transpositions] = identifier[collections] . identifier[Counter] (( identifier[self] . identifier[zobrist_hash] (),)) | def reset(self):
"""Restores the starting position.""" # NONE
# PAWN
# LANCE
# KNIGHT
# SILVER
# GOLD
# BISHOP
# ROOK
# KING
# PROM_PAWN
# PROM_LANCE
# PROM_KNIGHT
# PROM_SILVER
# PROM_BISHOP
# PROM_ROOK
self.piece_bb = [BB_VOID, BB_RANK_C | BB_RANK_G, BB_A1 | BB_I1 | BB_A9 | BB_I9, BB_A2 | BB_A8 | BB_I2 | BB_I8, BB_A3 | BB_A7 | BB_I3 | BB_I7, BB_A4 | BB_A6 | BB_I4 | BB_I6, BB_B2 | BB_H8, BB_B8 | BB_H2, BB_A5 | BB_I5, BB_VOID, BB_VOID, BB_VOID, BB_VOID, BB_VOID, BB_VOID]
self.pieces_in_hand = [collections.Counter(), collections.Counter()]
self.occupied = Occupied(BB_RANK_G | BB_H2 | BB_H8 | BB_RANK_I, BB_RANK_A | BB_B2 | BB_B8 | BB_RANK_C)
self.king_squares = [I5, A5]
self.pieces = [NONE for i in SQUARES]
for i in SQUARES:
mask = BB_SQUARES[i]
for piece_type in PIECE_TYPES:
if mask & self.piece_bb[piece_type]:
self.pieces[i] = piece_type # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['piece_type']] # depends on [control=['for'], data=['i']]
self.turn = BLACK
self.move_number = 1
self.captured_piece_stack = collections.deque()
self.move_stack = collections.deque()
self.incremental_zobrist_hash = self.board_zobrist_hash(DEFAULT_RANDOM_ARRAY)
self.transpositions = collections.Counter((self.zobrist_hash(),)) |
def line_ribbon(self):
'''Display the protein secondary structure as a white lines that passes through the
backbone chain.
'''
# Control points are the CA (C alphas)
backbone = np.array(self.topology['atom_names']) == 'CA'
smoothline = self.add_representation('smoothline', {'coordinates': self.coordinates[backbone],
'color': 0xffffff})
def update(self=self, smoothline=smoothline):
self.update_representation(smoothline, {'coordinates': self.coordinates[backbone]})
self.update_callbacks.append(update)
self.autozoom(self.coordinates) | def function[line_ribbon, parameter[self]]:
constant[Display the protein secondary structure as a white lines that passes through the
backbone chain.
]
variable[backbone] assign[=] compare[call[name[np].array, parameter[call[name[self].topology][constant[atom_names]]]] equal[==] constant[CA]]
variable[smoothline] assign[=] call[name[self].add_representation, parameter[constant[smoothline], dictionary[[<ast.Constant object at 0x7da1b1beaaa0>, <ast.Constant object at 0x7da1b1beb070>], [<ast.Subscript object at 0x7da1b1be9f90>, <ast.Constant object at 0x7da1b1be9e10>]]]]
def function[update, parameter[self, smoothline]]:
call[name[self].update_representation, parameter[name[smoothline], dictionary[[<ast.Constant object at 0x7da1b1beaa10>], [<ast.Subscript object at 0x7da1b1bea7a0>]]]]
call[name[self].update_callbacks.append, parameter[name[update]]]
call[name[self].autozoom, parameter[name[self].coordinates]] | keyword[def] identifier[line_ribbon] ( identifier[self] ):
literal[string]
identifier[backbone] = identifier[np] . identifier[array] ( identifier[self] . identifier[topology] [ literal[string] ])== literal[string]
identifier[smoothline] = identifier[self] . identifier[add_representation] ( literal[string] ,{ literal[string] : identifier[self] . identifier[coordinates] [ identifier[backbone] ],
literal[string] : literal[int] })
keyword[def] identifier[update] ( identifier[self] = identifier[self] , identifier[smoothline] = identifier[smoothline] ):
identifier[self] . identifier[update_representation] ( identifier[smoothline] ,{ literal[string] : identifier[self] . identifier[coordinates] [ identifier[backbone] ]})
identifier[self] . identifier[update_callbacks] . identifier[append] ( identifier[update] )
identifier[self] . identifier[autozoom] ( identifier[self] . identifier[coordinates] ) | def line_ribbon(self):
"""Display the protein secondary structure as a white lines that passes through the
backbone chain.
"""
# Control points are the CA (C alphas)
backbone = np.array(self.topology['atom_names']) == 'CA'
smoothline = self.add_representation('smoothline', {'coordinates': self.coordinates[backbone], 'color': 16777215})
def update(self=self, smoothline=smoothline):
self.update_representation(smoothline, {'coordinates': self.coordinates[backbone]})
self.update_callbacks.append(update)
self.autozoom(self.coordinates) |
def _fix_dependendent_params(self, i):
"""Unhide keys if necessary after removing the param at index *i*."""
if not self.params[i].showkey:
for param in self.params[i + 1:]:
if not param.showkey:
param.showkey = True | def function[_fix_dependendent_params, parameter[self, i]]:
constant[Unhide keys if necessary after removing the param at index *i*.]
if <ast.UnaryOp object at 0x7da20cabf430> begin[:]
for taget[name[param]] in starred[call[name[self].params][<ast.Slice object at 0x7da20cabee60>]] begin[:]
if <ast.UnaryOp object at 0x7da1b12091e0> begin[:]
name[param].showkey assign[=] constant[True] | keyword[def] identifier[_fix_dependendent_params] ( identifier[self] , identifier[i] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[params] [ identifier[i] ]. identifier[showkey] :
keyword[for] identifier[param] keyword[in] identifier[self] . identifier[params] [ identifier[i] + literal[int] :]:
keyword[if] keyword[not] identifier[param] . identifier[showkey] :
identifier[param] . identifier[showkey] = keyword[True] | def _fix_dependendent_params(self, i):
"""Unhide keys if necessary after removing the param at index *i*."""
if not self.params[i].showkey:
for param in self.params[i + 1:]:
if not param.showkey:
param.showkey = True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['param']] # depends on [control=['if'], data=[]] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.