code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def _schedule(self, delay: float, event: Callable, *args: Any, **kwargs: Any) -> int:
"""
Schedules a one-time event to be run along the simulation. The event is scheduled relative to current simulator
time, so delay is expected to be a positive simulation time interval. The `event' parameter corresponds to a
callable object (e.g. a function): it will be called so as to "execute" the event, with the positional and
keyword parameters that follow `event` in the call to `_schedule()` (note that the value of these arguments are
evaluated when `_schedule()` is called, not when the event is executed). Once this event function returns, the
simulation carries on to the next event, or stops if none remain.
Remark that this method is private, and is meant for internal usage by the :py:class:`Simulator` and
:py:class:`Process` classes, and helper functions of this module.
:return: Unique identifier for the scheduled event.
"""
if _logger is not None:
self._log(
DEBUG,
"schedule",
delay=delay,
fn=event,
args=args,
kwargs=kwargs,
counter=self._counter,
__now=self.now()
)
delay = float(delay)
if delay < 0.0:
raise ValueError("Delay must be positive.")
# Use counter to strictly order events happening at the same simulated time. This gives a total order on events,
# working around the heap queue not yielding a stable ordering.
id_event = self._counter
heappush(self._events, _Event(self._ts_now + delay, id_event, event, *args, **kwargs))
self._counter += 1
return id_event | def function[_schedule, parameter[self, delay, event]]:
constant[
Schedules a one-time event to be run along the simulation. The event is scheduled relative to current simulator
time, so delay is expected to be a positive simulation time interval. The `event' parameter corresponds to a
callable object (e.g. a function): it will be called so as to "execute" the event, with the positional and
keyword parameters that follow `event` in the call to `_schedule()` (note that the value of these arguments are
evaluated when `_schedule()` is called, not when the event is executed). Once this event function returns, the
simulation carries on to the next event, or stops if none remain.
Remark that this method is private, and is meant for internal usage by the :py:class:`Simulator` and
:py:class:`Process` classes, and helper functions of this module.
:return: Unique identifier for the scheduled event.
]
if compare[name[_logger] is_not constant[None]] begin[:]
call[name[self]._log, parameter[name[DEBUG], constant[schedule]]]
variable[delay] assign[=] call[name[float], parameter[name[delay]]]
if compare[name[delay] less[<] constant[0.0]] begin[:]
<ast.Raise object at 0x7da1b031ca00>
variable[id_event] assign[=] name[self]._counter
call[name[heappush], parameter[name[self]._events, call[name[_Event], parameter[binary_operation[name[self]._ts_now + name[delay]], name[id_event], name[event], <ast.Starred object at 0x7da1b031e140>]]]]
<ast.AugAssign object at 0x7da1b031e170>
return[name[id_event]] | keyword[def] identifier[_schedule] ( identifier[self] , identifier[delay] : identifier[float] , identifier[event] : identifier[Callable] ,* identifier[args] : identifier[Any] ,** identifier[kwargs] : identifier[Any] )-> identifier[int] :
literal[string]
keyword[if] identifier[_logger] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[_log] (
identifier[DEBUG] ,
literal[string] ,
identifier[delay] = identifier[delay] ,
identifier[fn] = identifier[event] ,
identifier[args] = identifier[args] ,
identifier[kwargs] = identifier[kwargs] ,
identifier[counter] = identifier[self] . identifier[_counter] ,
identifier[__now] = identifier[self] . identifier[now] ()
)
identifier[delay] = identifier[float] ( identifier[delay] )
keyword[if] identifier[delay] < literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[id_event] = identifier[self] . identifier[_counter]
identifier[heappush] ( identifier[self] . identifier[_events] , identifier[_Event] ( identifier[self] . identifier[_ts_now] + identifier[delay] , identifier[id_event] , identifier[event] ,* identifier[args] ,** identifier[kwargs] ))
identifier[self] . identifier[_counter] += literal[int]
keyword[return] identifier[id_event] | def _schedule(self, delay: float, event: Callable, *args: Any, **kwargs: Any) -> int:
"""
Schedules a one-time event to be run along the simulation. The event is scheduled relative to current simulator
time, so delay is expected to be a positive simulation time interval. The `event' parameter corresponds to a
callable object (e.g. a function): it will be called so as to "execute" the event, with the positional and
keyword parameters that follow `event` in the call to `_schedule()` (note that the value of these arguments are
evaluated when `_schedule()` is called, not when the event is executed). Once this event function returns, the
simulation carries on to the next event, or stops if none remain.
Remark that this method is private, and is meant for internal usage by the :py:class:`Simulator` and
:py:class:`Process` classes, and helper functions of this module.
:return: Unique identifier for the scheduled event.
"""
if _logger is not None:
self._log(DEBUG, 'schedule', delay=delay, fn=event, args=args, kwargs=kwargs, counter=self._counter, __now=self.now()) # depends on [control=['if'], data=[]]
delay = float(delay)
if delay < 0.0:
raise ValueError('Delay must be positive.') # depends on [control=['if'], data=[]]
# Use counter to strictly order events happening at the same simulated time. This gives a total order on events,
# working around the heap queue not yielding a stable ordering.
id_event = self._counter
heappush(self._events, _Event(self._ts_now + delay, id_event, event, *args, **kwargs))
self._counter += 1
return id_event |
def buildDQMasks(imageObjectList,configObj):
""" Build DQ masks for all input images.
"""
# Insure that input imageObject is a list
if not isinstance(imageObjectList, list):
imageObjectList = [imageObjectList]
for img in imageObjectList:
img.buildMask(configObj['single'], configObj['bits']) | def function[buildDQMasks, parameter[imageObjectList, configObj]]:
constant[ Build DQ masks for all input images.
]
if <ast.UnaryOp object at 0x7da1b1bb8580> begin[:]
variable[imageObjectList] assign[=] list[[<ast.Name object at 0x7da1b1bbab30>]]
for taget[name[img]] in starred[name[imageObjectList]] begin[:]
call[name[img].buildMask, parameter[call[name[configObj]][constant[single]], call[name[configObj]][constant[bits]]]] | keyword[def] identifier[buildDQMasks] ( identifier[imageObjectList] , identifier[configObj] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[imageObjectList] , identifier[list] ):
identifier[imageObjectList] =[ identifier[imageObjectList] ]
keyword[for] identifier[img] keyword[in] identifier[imageObjectList] :
identifier[img] . identifier[buildMask] ( identifier[configObj] [ literal[string] ], identifier[configObj] [ literal[string] ]) | def buildDQMasks(imageObjectList, configObj):
""" Build DQ masks for all input images.
"""
# Insure that input imageObject is a list
if not isinstance(imageObjectList, list):
imageObjectList = [imageObjectList] # depends on [control=['if'], data=[]]
for img in imageObjectList:
img.buildMask(configObj['single'], configObj['bits']) # depends on [control=['for'], data=['img']] |
def thing_type_present(name, thingTypeName, thingTypeDescription,
searchableAttributesList,
region=None, key=None, keyid=None, profile=None):
'''
Ensure thing type exists.
.. versionadded:: 2016.11.0
name
The name of the state definition
thingTypeName
Name of the thing type
thingTypeDescription
Description of the thing type
searchableAttributesList
List of string attributes that are searchable for
the thing type
region
Region to connect to.
key
Secret key to be used.
keyid
Access key to be used
profile
A dict with region, key, keyid, or a pillar key (string) that
contains a dict with region, key, and keyid
'''
ret = {
'name': thingTypeName,
'result': True,
'comment': '',
'changes': {}
}
r = __salt__['boto_iot.thing_type_exists'](
thingTypeName=thingTypeName,
region=region, key=key, keyid=keyid, profile=profile
)
if 'error' in r:
ret['result'] = False
ret['comment'] = 'Failed to create thing type: {0}.'.format(r['error']['message'])
return ret
if r.get('exists'):
ret['result'] = True
ret['comment'] = 'Thing type with given name {0} already exists'.format(thingTypeName)
return ret
if __opts__['test']:
ret['comment'] = 'Thing type {0} is set to be created.'.format(thingTypeName)
ret['result'] = None
return ret
r = __salt__['boto_iot.create_thing_type'](
thingTypeName=thingTypeName,
thingTypeDescription=thingTypeDescription,
searchableAttributesList=searchableAttributesList,
region=region, key=key, keyid=keyid, profile=profile
)
if not r.get('created'):
ret['result'] = False
ret['comment'] = 'Failed to create thing type: {0}.'.format(r['error']['message'])
return ret
_describe = __salt__['boto_iot.describe_thing_type'](
thingTypeName=thingTypeName,
region=region, key=key, keyid=keyid, profile=profile
)
ret['changes']['old'] = {'thing_type': None}
ret['changes']['new'] = _describe
ret['comment'] = 'Thing Type {0} created.'.format(thingTypeName)
return ret | def function[thing_type_present, parameter[name, thingTypeName, thingTypeDescription, searchableAttributesList, region, key, keyid, profile]]:
constant[
Ensure thing type exists.
.. versionadded:: 2016.11.0
name
The name of the state definition
thingTypeName
Name of the thing type
thingTypeDescription
Description of the thing type
searchableAttributesList
List of string attributes that are searchable for
the thing type
region
Region to connect to.
key
Secret key to be used.
keyid
Access key to be used
profile
A dict with region, key, keyid, or a pillar key (string) that
contains a dict with region, key, and keyid
]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da2047e8340>, <ast.Constant object at 0x7da2047eb2e0>, <ast.Constant object at 0x7da2047e9ba0>, <ast.Constant object at 0x7da2047ea710>], [<ast.Name object at 0x7da2047e9810>, <ast.Constant object at 0x7da2047e9300>, <ast.Constant object at 0x7da2047e8f40>, <ast.Dict object at 0x7da2047e8490>]]
variable[r] assign[=] call[call[name[__salt__]][constant[boto_iot.thing_type_exists]], parameter[]]
if compare[constant[error] in name[r]] begin[:]
call[name[ret]][constant[result]] assign[=] constant[False]
call[name[ret]][constant[comment]] assign[=] call[constant[Failed to create thing type: {0}.].format, parameter[call[call[name[r]][constant[error]]][constant[message]]]]
return[name[ret]]
if call[name[r].get, parameter[constant[exists]]] begin[:]
call[name[ret]][constant[result]] assign[=] constant[True]
call[name[ret]][constant[comment]] assign[=] call[constant[Thing type with given name {0} already exists].format, parameter[name[thingTypeName]]]
return[name[ret]]
if call[name[__opts__]][constant[test]] begin[:]
call[name[ret]][constant[comment]] assign[=] call[constant[Thing type {0} is set to be created.].format, parameter[name[thingTypeName]]]
call[name[ret]][constant[result]] assign[=] constant[None]
return[name[ret]]
variable[r] assign[=] call[call[name[__salt__]][constant[boto_iot.create_thing_type]], parameter[]]
if <ast.UnaryOp object at 0x7da18dc04580> begin[:]
call[name[ret]][constant[result]] assign[=] constant[False]
call[name[ret]][constant[comment]] assign[=] call[constant[Failed to create thing type: {0}.].format, parameter[call[call[name[r]][constant[error]]][constant[message]]]]
return[name[ret]]
variable[_describe] assign[=] call[call[name[__salt__]][constant[boto_iot.describe_thing_type]], parameter[]]
call[call[name[ret]][constant[changes]]][constant[old]] assign[=] dictionary[[<ast.Constant object at 0x7da18dc06410>], [<ast.Constant object at 0x7da18dc06ad0>]]
call[call[name[ret]][constant[changes]]][constant[new]] assign[=] name[_describe]
call[name[ret]][constant[comment]] assign[=] call[constant[Thing Type {0} created.].format, parameter[name[thingTypeName]]]
return[name[ret]] | keyword[def] identifier[thing_type_present] ( identifier[name] , identifier[thingTypeName] , identifier[thingTypeDescription] ,
identifier[searchableAttributesList] ,
identifier[region] = keyword[None] , identifier[key] = keyword[None] , identifier[keyid] = keyword[None] , identifier[profile] = keyword[None] ):
literal[string]
identifier[ret] ={
literal[string] : identifier[thingTypeName] ,
literal[string] : keyword[True] ,
literal[string] : literal[string] ,
literal[string] :{}
}
identifier[r] = identifier[__salt__] [ literal[string] ](
identifier[thingTypeName] = identifier[thingTypeName] ,
identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile]
)
keyword[if] literal[string] keyword[in] identifier[r] :
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[r] [ literal[string] ][ literal[string] ])
keyword[return] identifier[ret]
keyword[if] identifier[r] . identifier[get] ( literal[string] ):
identifier[ret] [ literal[string] ]= keyword[True]
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[thingTypeName] )
keyword[return] identifier[ret]
keyword[if] identifier[__opts__] [ literal[string] ]:
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[thingTypeName] )
identifier[ret] [ literal[string] ]= keyword[None]
keyword[return] identifier[ret]
identifier[r] = identifier[__salt__] [ literal[string] ](
identifier[thingTypeName] = identifier[thingTypeName] ,
identifier[thingTypeDescription] = identifier[thingTypeDescription] ,
identifier[searchableAttributesList] = identifier[searchableAttributesList] ,
identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile]
)
keyword[if] keyword[not] identifier[r] . identifier[get] ( literal[string] ):
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[r] [ literal[string] ][ literal[string] ])
keyword[return] identifier[ret]
identifier[_describe] = identifier[__salt__] [ literal[string] ](
identifier[thingTypeName] = identifier[thingTypeName] ,
identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile]
)
identifier[ret] [ literal[string] ][ literal[string] ]={ literal[string] : keyword[None] }
identifier[ret] [ literal[string] ][ literal[string] ]= identifier[_describe]
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[thingTypeName] )
keyword[return] identifier[ret] | def thing_type_present(name, thingTypeName, thingTypeDescription, searchableAttributesList, region=None, key=None, keyid=None, profile=None):
"""
Ensure thing type exists.
.. versionadded:: 2016.11.0
name
The name of the state definition
thingTypeName
Name of the thing type
thingTypeDescription
Description of the thing type
searchableAttributesList
List of string attributes that are searchable for
the thing type
region
Region to connect to.
key
Secret key to be used.
keyid
Access key to be used
profile
A dict with region, key, keyid, or a pillar key (string) that
contains a dict with region, key, and keyid
"""
ret = {'name': thingTypeName, 'result': True, 'comment': '', 'changes': {}}
r = __salt__['boto_iot.thing_type_exists'](thingTypeName=thingTypeName, region=region, key=key, keyid=keyid, profile=profile)
if 'error' in r:
ret['result'] = False
ret['comment'] = 'Failed to create thing type: {0}.'.format(r['error']['message'])
return ret # depends on [control=['if'], data=['r']]
if r.get('exists'):
ret['result'] = True
ret['comment'] = 'Thing type with given name {0} already exists'.format(thingTypeName)
return ret # depends on [control=['if'], data=[]]
if __opts__['test']:
ret['comment'] = 'Thing type {0} is set to be created.'.format(thingTypeName)
ret['result'] = None
return ret # depends on [control=['if'], data=[]]
r = __salt__['boto_iot.create_thing_type'](thingTypeName=thingTypeName, thingTypeDescription=thingTypeDescription, searchableAttributesList=searchableAttributesList, region=region, key=key, keyid=keyid, profile=profile)
if not r.get('created'):
ret['result'] = False
ret['comment'] = 'Failed to create thing type: {0}.'.format(r['error']['message'])
return ret # depends on [control=['if'], data=[]]
_describe = __salt__['boto_iot.describe_thing_type'](thingTypeName=thingTypeName, region=region, key=key, keyid=keyid, profile=profile)
ret['changes']['old'] = {'thing_type': None}
ret['changes']['new'] = _describe
ret['comment'] = 'Thing Type {0} created.'.format(thingTypeName)
return ret |
def remove(self, param, author=None):
"""Remove by url or name"""
if isinstance(param, SkillEntry):
skill = param
else:
skill = self.find_skill(param, author)
skill.remove()
skills = [s for s in self.skills_data['skills']
if s['name'] != skill.name]
self.skills_data['skills'] = skills
return | def function[remove, parameter[self, param, author]]:
constant[Remove by url or name]
if call[name[isinstance], parameter[name[param], name[SkillEntry]]] begin[:]
variable[skill] assign[=] name[param]
call[name[skill].remove, parameter[]]
variable[skills] assign[=] <ast.ListComp object at 0x7da20c6c7d90>
call[name[self].skills_data][constant[skills]] assign[=] name[skills]
return[None] | keyword[def] identifier[remove] ( identifier[self] , identifier[param] , identifier[author] = keyword[None] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[param] , identifier[SkillEntry] ):
identifier[skill] = identifier[param]
keyword[else] :
identifier[skill] = identifier[self] . identifier[find_skill] ( identifier[param] , identifier[author] )
identifier[skill] . identifier[remove] ()
identifier[skills] =[ identifier[s] keyword[for] identifier[s] keyword[in] identifier[self] . identifier[skills_data] [ literal[string] ]
keyword[if] identifier[s] [ literal[string] ]!= identifier[skill] . identifier[name] ]
identifier[self] . identifier[skills_data] [ literal[string] ]= identifier[skills]
keyword[return] | def remove(self, param, author=None):
"""Remove by url or name"""
if isinstance(param, SkillEntry):
skill = param # depends on [control=['if'], data=[]]
else:
skill = self.find_skill(param, author)
skill.remove()
skills = [s for s in self.skills_data['skills'] if s['name'] != skill.name]
self.skills_data['skills'] = skills
return |
def select(self, *args, **kwargs):
''' Query this object and all of its references for objects that
match the given selector.
There are a few different ways to call the ``select`` method.
The most general is to supply a JSON-like query dictionary as the
single argument or as keyword arguments:
Args:
selector (JSON-like) : some sample text
Keyword Arguments:
kwargs : query dict key/values as keyword arguments
Additionally, for compatibility with ``Model.select``, a selector
dict may be passed as ``selector`` keyword argument, in which case
the value of ``kwargs['selector']`` is used for the query.
For convenience, queries on just names can be made by supplying
the ``name`` string as the single parameter:
Args:
name (str) : the name to query on
Also queries on just type can be made simply by supplying the
``Model`` subclass as the single parameter:
Args:
type (Model) : the type to query on
Returns:
seq[Model]
Examples:
.. code-block:: python
# These three are equivalent
p.select(selector={"type": HoverTool})
p.select({"type": HoverTool})
p.select(HoverTool)
# These two are also equivalent
p.select({"name": "mycircle"})
p.select("mycircle")
# Keyword arguments can be supplied in place of selector dict
p.select({"name": "foo", "type": HoverTool})
p.select(name="foo", type=HoverTool)
'''
selector = _select_helper(args, kwargs)
# Want to pass selector that is a dictionary
return _list_attr_splat(find(self.references(), selector, {'plot': self})) | def function[select, parameter[self]]:
constant[ Query this object and all of its references for objects that
match the given selector.
There are a few different ways to call the ``select`` method.
The most general is to supply a JSON-like query dictionary as the
single argument or as keyword arguments:
Args:
selector (JSON-like) : some sample text
Keyword Arguments:
kwargs : query dict key/values as keyword arguments
Additionally, for compatibility with ``Model.select``, a selector
dict may be passed as ``selector`` keyword argument, in which case
the value of ``kwargs['selector']`` is used for the query.
For convenience, queries on just names can be made by supplying
the ``name`` string as the single parameter:
Args:
name (str) : the name to query on
Also queries on just type can be made simply by supplying the
``Model`` subclass as the single parameter:
Args:
type (Model) : the type to query on
Returns:
seq[Model]
Examples:
.. code-block:: python
# These three are equivalent
p.select(selector={"type": HoverTool})
p.select({"type": HoverTool})
p.select(HoverTool)
# These two are also equivalent
p.select({"name": "mycircle"})
p.select("mycircle")
# Keyword arguments can be supplied in place of selector dict
p.select({"name": "foo", "type": HoverTool})
p.select(name="foo", type=HoverTool)
]
variable[selector] assign[=] call[name[_select_helper], parameter[name[args], name[kwargs]]]
return[call[name[_list_attr_splat], parameter[call[name[find], parameter[call[name[self].references, parameter[]], name[selector], dictionary[[<ast.Constant object at 0x7da20c6c5990>], [<ast.Name object at 0x7da20c6c5360>]]]]]]] | keyword[def] identifier[select] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[selector] = identifier[_select_helper] ( identifier[args] , identifier[kwargs] )
keyword[return] identifier[_list_attr_splat] ( identifier[find] ( identifier[self] . identifier[references] (), identifier[selector] ,{ literal[string] : identifier[self] })) | def select(self, *args, **kwargs):
""" Query this object and all of its references for objects that
match the given selector.
There are a few different ways to call the ``select`` method.
The most general is to supply a JSON-like query dictionary as the
single argument or as keyword arguments:
Args:
selector (JSON-like) : some sample text
Keyword Arguments:
kwargs : query dict key/values as keyword arguments
Additionally, for compatibility with ``Model.select``, a selector
dict may be passed as ``selector`` keyword argument, in which case
the value of ``kwargs['selector']`` is used for the query.
For convenience, queries on just names can be made by supplying
the ``name`` string as the single parameter:
Args:
name (str) : the name to query on
Also queries on just type can be made simply by supplying the
``Model`` subclass as the single parameter:
Args:
type (Model) : the type to query on
Returns:
seq[Model]
Examples:
.. code-block:: python
# These three are equivalent
p.select(selector={"type": HoverTool})
p.select({"type": HoverTool})
p.select(HoverTool)
# These two are also equivalent
p.select({"name": "mycircle"})
p.select("mycircle")
# Keyword arguments can be supplied in place of selector dict
p.select({"name": "foo", "type": HoverTool})
p.select(name="foo", type=HoverTool)
"""
selector = _select_helper(args, kwargs)
# Want to pass selector that is a dictionary
return _list_attr_splat(find(self.references(), selector, {'plot': self})) |
def step(self, **args):
"""
SRN.step()
Extends network step method by automatically copying hidden
layer activations to the context layer.
"""
if self.sequenceType == None:
raise AttributeError("""sequenceType not set! Use SRN.setSequenceType() """)
# take care of any params other than layer names:
# two ways to clear context:
# 1. force it to right now with arg initContext = 1:
if 'initContext' in args:
if args['initContext']:
self.setContext()
del args['initContext']
# 2. have initContext be true
elif self.initContext:
self.setContext()
# if initContext is off, then we assume user knows that,
# so we reset the flags on all context layers:
if self.initContext == 0:
for context in list(self.contextLayers.values()):
context.activationSet = 1
# replace all patterns
for key in args:
args[key] = self.replacePatterns( args[key], key )
# Get all of the input/output layer names:
inputBankNames = [layer.name for layer in self.layers if layer.kind == 'Input']
outputBankNames = [layer.name for layer in self.layers if layer.kind == 'Output']
inputBankSizes = [layer.size for layer in self.layers if layer.kind == 'Input']
inputBankTotalSize = sum(inputBankSizes)
inputArgSizes = [len(args[name]) for name in inputBankNames if name in args]
inputArgTotalSize = sum(inputArgSizes)
sequenceLength = inputArgTotalSize // inputBankTotalSize
learning = self.learning
totalRetvals = (0.0, 0, 0) # error, correct, total
totalPCorrect = {}
for step in range(sequenceLength):
if self.verbosity >= 1 or self.interactive:
print("-----------------------------------Step #", step + 1)
dict = {}
dict.update(args) # in case context, or others
# now, overwrite input and output, if necessary
for name in inputBankNames:
if name in args:
patternLength = self[name].size
offset = step * patternLength
if (offset + patternLength) >= len(args[name]):
# if this seq is too big, use last part:
dict[name] = args[name][-patternLength:]
else:
# else, go to the right spot in seq:
dict[name] = args[name][offset:offset+patternLength]
for name in outputBankNames:
if name in args:
patternLength = self[name].size
offset = step * patternLength
if (offset + patternLength) >= len(args[name]):
# if this seq is too big, use last part:
dict[name] = args[name][-patternLength:]
else:
# else, go to the right spot in seq:
dict[name] = args[name][offset:offset+patternLength]
# get info for predicition -------------------------
for p in self.prediction:
(inName, outName) = p
inLayer = self.getLayer(inName)
if not inLayer.type == 'Input':
raise LayerError('Prediction input layer not type \'Input\'.', inLayer.type)
outLayer = self.getLayer(outName)
if not outLayer.type == 'Output':
raise LayerError('Prediction output layer not type \'Output\'.', outLayer.type)
if step == sequenceLength - 1: # last one in sequence; what do we do?
start = 0 # wrap to next input vector
if not self._sweeping: # not in sweep, in step, no target
raise LayerError("Attempting to predict last item in sequence, but using step(). Use sweep() instead.")
else: # in a sweep, so get the next pattern if one:
if self.currentSweepCount == None: # last item in epoch, predict back to first pattern
# Train it to predict first pattern, first sequence item
pattern = self.getData(self.loadOrder[0])
for key in pattern:
pattern[key] = self.replacePatterns( pattern[key], key )
if inName in inputBankNames:
if inName in pattern:
dict[outName] = pattern[inName][start:start+patternLength]
#dict[outName] = pattern["input"][start:start+patternLength]
else:
pattern = self.getData(self.loadOrder[self.currentSweepCount+1])
for key in pattern:
pattern[key] = self.replacePatterns( pattern[key], key )
if inName in inputBankNames:
if inName in pattern:
dict[outName] = pattern[inName][start:start+patternLength]
#dict[outName] = pattern["input"][start:start+patternLength]
else: # in middle of sequence
start = (step + 1) * inLayer.size
dict[outName] = args[inName][start:start+patternLength]
# end predicition code -----------------------------
if step < sequenceLength - 1: # not the last one
if not self.learnDuringSequence:
self.learning = 0
retvals = self.networkStep(**dict)
self.learning = learning # in case we turned it off
totalRetvals = list(map(lambda x,y: x+y, totalRetvals[:3], retvals[:3]))
sumMerge(totalPCorrect, retvals[3])
totalRetvals.append( totalPCorrect)
return totalRetvals | def function[step, parameter[self]]:
constant[
SRN.step()
Extends network step method by automatically copying hidden
layer activations to the context layer.
]
if compare[name[self].sequenceType equal[==] constant[None]] begin[:]
<ast.Raise object at 0x7da1b03905b0>
if compare[constant[initContext] in name[args]] begin[:]
if call[name[args]][constant[initContext]] begin[:]
call[name[self].setContext, parameter[]]
<ast.Delete object at 0x7da1b0390130>
if compare[name[self].initContext equal[==] constant[0]] begin[:]
for taget[name[context]] in starred[call[name[list], parameter[call[name[self].contextLayers.values, parameter[]]]]] begin[:]
name[context].activationSet assign[=] constant[1]
for taget[name[key]] in starred[name[args]] begin[:]
call[name[args]][name[key]] assign[=] call[name[self].replacePatterns, parameter[call[name[args]][name[key]], name[key]]]
variable[inputBankNames] assign[=] <ast.ListComp object at 0x7da1b0391300>
variable[outputBankNames] assign[=] <ast.ListComp object at 0x7da1b0390700>
variable[inputBankSizes] assign[=] <ast.ListComp object at 0x7da1b0393940>
variable[inputBankTotalSize] assign[=] call[name[sum], parameter[name[inputBankSizes]]]
variable[inputArgSizes] assign[=] <ast.ListComp object at 0x7da1b03935e0>
variable[inputArgTotalSize] assign[=] call[name[sum], parameter[name[inputArgSizes]]]
variable[sequenceLength] assign[=] binary_operation[name[inputArgTotalSize] <ast.FloorDiv object at 0x7da2590d6bc0> name[inputBankTotalSize]]
variable[learning] assign[=] name[self].learning
variable[totalRetvals] assign[=] tuple[[<ast.Constant object at 0x7da1b0393fa0>, <ast.Constant object at 0x7da1b0390460>, <ast.Constant object at 0x7da1b03934f0>]]
variable[totalPCorrect] assign[=] dictionary[[], []]
for taget[name[step]] in starred[call[name[range], parameter[name[sequenceLength]]]] begin[:]
if <ast.BoolOp object at 0x7da1b0391030> begin[:]
call[name[print], parameter[constant[-----------------------------------Step #], binary_operation[name[step] + constant[1]]]]
variable[dict] assign[=] dictionary[[], []]
call[name[dict].update, parameter[name[args]]]
for taget[name[name]] in starred[name[inputBankNames]] begin[:]
if compare[name[name] in name[args]] begin[:]
variable[patternLength] assign[=] call[name[self]][name[name]].size
variable[offset] assign[=] binary_operation[name[step] * name[patternLength]]
if compare[binary_operation[name[offset] + name[patternLength]] greater_or_equal[>=] call[name[len], parameter[call[name[args]][name[name]]]]] begin[:]
call[name[dict]][name[name]] assign[=] call[call[name[args]][name[name]]][<ast.Slice object at 0x7da1b0390040>]
for taget[name[name]] in starred[name[outputBankNames]] begin[:]
if compare[name[name] in name[args]] begin[:]
variable[patternLength] assign[=] call[name[self]][name[name]].size
variable[offset] assign[=] binary_operation[name[step] * name[patternLength]]
if compare[binary_operation[name[offset] + name[patternLength]] greater_or_equal[>=] call[name[len], parameter[call[name[args]][name[name]]]]] begin[:]
call[name[dict]][name[name]] assign[=] call[call[name[args]][name[name]]][<ast.Slice object at 0x7da1b06c9930>]
for taget[name[p]] in starred[name[self].prediction] begin[:]
<ast.Tuple object at 0x7da1b06c8df0> assign[=] name[p]
variable[inLayer] assign[=] call[name[self].getLayer, parameter[name[inName]]]
if <ast.UnaryOp object at 0x7da1b06cbe20> begin[:]
<ast.Raise object at 0x7da1b06cae60>
variable[outLayer] assign[=] call[name[self].getLayer, parameter[name[outName]]]
if <ast.UnaryOp object at 0x7da1b06cb970> begin[:]
<ast.Raise object at 0x7da1b06ca230>
if compare[name[step] equal[==] binary_operation[name[sequenceLength] - constant[1]]] begin[:]
variable[start] assign[=] constant[0]
if <ast.UnaryOp object at 0x7da1b06cb1f0> begin[:]
<ast.Raise object at 0x7da1b06c97e0>
if compare[name[step] less[<] binary_operation[name[sequenceLength] - constant[1]]] begin[:]
if <ast.UnaryOp object at 0x7da1b0359750> begin[:]
name[self].learning assign[=] constant[0]
variable[retvals] assign[=] call[name[self].networkStep, parameter[]]
name[self].learning assign[=] name[learning]
variable[totalRetvals] assign[=] call[name[list], parameter[call[name[map], parameter[<ast.Lambda object at 0x7da1b035a470>, call[name[totalRetvals]][<ast.Slice object at 0x7da1b035b3a0>], call[name[retvals]][<ast.Slice object at 0x7da1b035a080>]]]]]
call[name[sumMerge], parameter[name[totalPCorrect], call[name[retvals]][constant[3]]]]
call[name[totalRetvals].append, parameter[name[totalPCorrect]]]
return[name[totalRetvals]] | keyword[def] identifier[step] ( identifier[self] ,** identifier[args] ):
literal[string]
keyword[if] identifier[self] . identifier[sequenceType] == keyword[None] :
keyword[raise] identifier[AttributeError] ( literal[string] )
keyword[if] literal[string] keyword[in] identifier[args] :
keyword[if] identifier[args] [ literal[string] ]:
identifier[self] . identifier[setContext] ()
keyword[del] identifier[args] [ literal[string] ]
keyword[elif] identifier[self] . identifier[initContext] :
identifier[self] . identifier[setContext] ()
keyword[if] identifier[self] . identifier[initContext] == literal[int] :
keyword[for] identifier[context] keyword[in] identifier[list] ( identifier[self] . identifier[contextLayers] . identifier[values] ()):
identifier[context] . identifier[activationSet] = literal[int]
keyword[for] identifier[key] keyword[in] identifier[args] :
identifier[args] [ identifier[key] ]= identifier[self] . identifier[replacePatterns] ( identifier[args] [ identifier[key] ], identifier[key] )
identifier[inputBankNames] =[ identifier[layer] . identifier[name] keyword[for] identifier[layer] keyword[in] identifier[self] . identifier[layers] keyword[if] identifier[layer] . identifier[kind] == literal[string] ]
identifier[outputBankNames] =[ identifier[layer] . identifier[name] keyword[for] identifier[layer] keyword[in] identifier[self] . identifier[layers] keyword[if] identifier[layer] . identifier[kind] == literal[string] ]
identifier[inputBankSizes] =[ identifier[layer] . identifier[size] keyword[for] identifier[layer] keyword[in] identifier[self] . identifier[layers] keyword[if] identifier[layer] . identifier[kind] == literal[string] ]
identifier[inputBankTotalSize] = identifier[sum] ( identifier[inputBankSizes] )
identifier[inputArgSizes] =[ identifier[len] ( identifier[args] [ identifier[name] ]) keyword[for] identifier[name] keyword[in] identifier[inputBankNames] keyword[if] identifier[name] keyword[in] identifier[args] ]
identifier[inputArgTotalSize] = identifier[sum] ( identifier[inputArgSizes] )
identifier[sequenceLength] = identifier[inputArgTotalSize] // identifier[inputBankTotalSize]
identifier[learning] = identifier[self] . identifier[learning]
identifier[totalRetvals] =( literal[int] , literal[int] , literal[int] )
identifier[totalPCorrect] ={}
keyword[for] identifier[step] keyword[in] identifier[range] ( identifier[sequenceLength] ):
keyword[if] identifier[self] . identifier[verbosity] >= literal[int] keyword[or] identifier[self] . identifier[interactive] :
identifier[print] ( literal[string] , identifier[step] + literal[int] )
identifier[dict] ={}
identifier[dict] . identifier[update] ( identifier[args] )
keyword[for] identifier[name] keyword[in] identifier[inputBankNames] :
keyword[if] identifier[name] keyword[in] identifier[args] :
identifier[patternLength] = identifier[self] [ identifier[name] ]. identifier[size]
identifier[offset] = identifier[step] * identifier[patternLength]
keyword[if] ( identifier[offset] + identifier[patternLength] )>= identifier[len] ( identifier[args] [ identifier[name] ]):
identifier[dict] [ identifier[name] ]= identifier[args] [ identifier[name] ][- identifier[patternLength] :]
keyword[else] :
identifier[dict] [ identifier[name] ]= identifier[args] [ identifier[name] ][ identifier[offset] : identifier[offset] + identifier[patternLength] ]
keyword[for] identifier[name] keyword[in] identifier[outputBankNames] :
keyword[if] identifier[name] keyword[in] identifier[args] :
identifier[patternLength] = identifier[self] [ identifier[name] ]. identifier[size]
identifier[offset] = identifier[step] * identifier[patternLength]
keyword[if] ( identifier[offset] + identifier[patternLength] )>= identifier[len] ( identifier[args] [ identifier[name] ]):
identifier[dict] [ identifier[name] ]= identifier[args] [ identifier[name] ][- identifier[patternLength] :]
keyword[else] :
identifier[dict] [ identifier[name] ]= identifier[args] [ identifier[name] ][ identifier[offset] : identifier[offset] + identifier[patternLength] ]
keyword[for] identifier[p] keyword[in] identifier[self] . identifier[prediction] :
( identifier[inName] , identifier[outName] )= identifier[p]
identifier[inLayer] = identifier[self] . identifier[getLayer] ( identifier[inName] )
keyword[if] keyword[not] identifier[inLayer] . identifier[type] == literal[string] :
keyword[raise] identifier[LayerError] ( literal[string] , identifier[inLayer] . identifier[type] )
identifier[outLayer] = identifier[self] . identifier[getLayer] ( identifier[outName] )
keyword[if] keyword[not] identifier[outLayer] . identifier[type] == literal[string] :
keyword[raise] identifier[LayerError] ( literal[string] , identifier[outLayer] . identifier[type] )
keyword[if] identifier[step] == identifier[sequenceLength] - literal[int] :
identifier[start] = literal[int]
keyword[if] keyword[not] identifier[self] . identifier[_sweeping] :
keyword[raise] identifier[LayerError] ( literal[string] )
keyword[else] :
keyword[if] identifier[self] . identifier[currentSweepCount] == keyword[None] :
identifier[pattern] = identifier[self] . identifier[getData] ( identifier[self] . identifier[loadOrder] [ literal[int] ])
keyword[for] identifier[key] keyword[in] identifier[pattern] :
identifier[pattern] [ identifier[key] ]= identifier[self] . identifier[replacePatterns] ( identifier[pattern] [ identifier[key] ], identifier[key] )
keyword[if] identifier[inName] keyword[in] identifier[inputBankNames] :
keyword[if] identifier[inName] keyword[in] identifier[pattern] :
identifier[dict] [ identifier[outName] ]= identifier[pattern] [ identifier[inName] ][ identifier[start] : identifier[start] + identifier[patternLength] ]
keyword[else] :
identifier[pattern] = identifier[self] . identifier[getData] ( identifier[self] . identifier[loadOrder] [ identifier[self] . identifier[currentSweepCount] + literal[int] ])
keyword[for] identifier[key] keyword[in] identifier[pattern] :
identifier[pattern] [ identifier[key] ]= identifier[self] . identifier[replacePatterns] ( identifier[pattern] [ identifier[key] ], identifier[key] )
keyword[if] identifier[inName] keyword[in] identifier[inputBankNames] :
keyword[if] identifier[inName] keyword[in] identifier[pattern] :
identifier[dict] [ identifier[outName] ]= identifier[pattern] [ identifier[inName] ][ identifier[start] : identifier[start] + identifier[patternLength] ]
keyword[else] :
identifier[start] =( identifier[step] + literal[int] )* identifier[inLayer] . identifier[size]
identifier[dict] [ identifier[outName] ]= identifier[args] [ identifier[inName] ][ identifier[start] : identifier[start] + identifier[patternLength] ]
keyword[if] identifier[step] < identifier[sequenceLength] - literal[int] :
keyword[if] keyword[not] identifier[self] . identifier[learnDuringSequence] :
identifier[self] . identifier[learning] = literal[int]
identifier[retvals] = identifier[self] . identifier[networkStep] (** identifier[dict] )
identifier[self] . identifier[learning] = identifier[learning]
identifier[totalRetvals] = identifier[list] ( identifier[map] ( keyword[lambda] identifier[x] , identifier[y] : identifier[x] + identifier[y] , identifier[totalRetvals] [: literal[int] ], identifier[retvals] [: literal[int] ]))
identifier[sumMerge] ( identifier[totalPCorrect] , identifier[retvals] [ literal[int] ])
identifier[totalRetvals] . identifier[append] ( identifier[totalPCorrect] )
keyword[return] identifier[totalRetvals] | def step(self, **args):
"""
SRN.step()
Extends network step method by automatically copying hidden
layer activations to the context layer.
"""
if self.sequenceType == None:
raise AttributeError('sequenceType not set! Use SRN.setSequenceType() ') # depends on [control=['if'], data=[]]
# take care of any params other than layer names:
# two ways to clear context:
# 1. force it to right now with arg initContext = 1:
if 'initContext' in args:
if args['initContext']:
self.setContext() # depends on [control=['if'], data=[]]
del args['initContext'] # depends on [control=['if'], data=['args']]
# 2. have initContext be true
elif self.initContext:
self.setContext() # depends on [control=['if'], data=[]]
# if initContext is off, then we assume user knows that,
# so we reset the flags on all context layers:
if self.initContext == 0:
for context in list(self.contextLayers.values()):
context.activationSet = 1 # depends on [control=['for'], data=['context']] # depends on [control=['if'], data=[]]
# replace all patterns
for key in args:
args[key] = self.replacePatterns(args[key], key) # depends on [control=['for'], data=['key']]
# Get all of the input/output layer names:
inputBankNames = [layer.name for layer in self.layers if layer.kind == 'Input']
outputBankNames = [layer.name for layer in self.layers if layer.kind == 'Output']
inputBankSizes = [layer.size for layer in self.layers if layer.kind == 'Input']
inputBankTotalSize = sum(inputBankSizes)
inputArgSizes = [len(args[name]) for name in inputBankNames if name in args]
inputArgTotalSize = sum(inputArgSizes)
sequenceLength = inputArgTotalSize // inputBankTotalSize
learning = self.learning
totalRetvals = (0.0, 0, 0) # error, correct, total
totalPCorrect = {}
for step in range(sequenceLength):
if self.verbosity >= 1 or self.interactive:
print('-----------------------------------Step #', step + 1) # depends on [control=['if'], data=[]]
dict = {}
dict.update(args) # in case context, or others
# now, overwrite input and output, if necessary
for name in inputBankNames:
if name in args:
patternLength = self[name].size
offset = step * patternLength
if offset + patternLength >= len(args[name]):
# if this seq is too big, use last part:
dict[name] = args[name][-patternLength:] # depends on [control=['if'], data=[]]
else:
# else, go to the right spot in seq:
dict[name] = args[name][offset:offset + patternLength] # depends on [control=['if'], data=['name', 'args']] # depends on [control=['for'], data=['name']]
for name in outputBankNames:
if name in args:
patternLength = self[name].size
offset = step * patternLength
if offset + patternLength >= len(args[name]):
# if this seq is too big, use last part:
dict[name] = args[name][-patternLength:] # depends on [control=['if'], data=[]]
else:
# else, go to the right spot in seq:
dict[name] = args[name][offset:offset + patternLength] # depends on [control=['if'], data=['name', 'args']] # depends on [control=['for'], data=['name']]
# get info for predicition -------------------------
for p in self.prediction:
(inName, outName) = p
inLayer = self.getLayer(inName)
if not inLayer.type == 'Input':
raise LayerError("Prediction input layer not type 'Input'.", inLayer.type) # depends on [control=['if'], data=[]]
outLayer = self.getLayer(outName)
if not outLayer.type == 'Output':
raise LayerError("Prediction output layer not type 'Output'.", outLayer.type) # depends on [control=['if'], data=[]]
if step == sequenceLength - 1: # last one in sequence; what do we do?
start = 0 # wrap to next input vector
if not self._sweeping: # not in sweep, in step, no target
raise LayerError('Attempting to predict last item in sequence, but using step(). Use sweep() instead.') # depends on [control=['if'], data=[]] # in a sweep, so get the next pattern if one:
elif self.currentSweepCount == None: # last item in epoch, predict back to first pattern
# Train it to predict first pattern, first sequence item
pattern = self.getData(self.loadOrder[0])
for key in pattern:
pattern[key] = self.replacePatterns(pattern[key], key) # depends on [control=['for'], data=['key']]
if inName in inputBankNames:
if inName in pattern:
dict[outName] = pattern[inName][start:start + patternLength] # depends on [control=['if'], data=['inName', 'pattern']] # depends on [control=['if'], data=['inName']] # depends on [control=['if'], data=[]]
else:
#dict[outName] = pattern["input"][start:start+patternLength]
pattern = self.getData(self.loadOrder[self.currentSweepCount + 1])
for key in pattern:
pattern[key] = self.replacePatterns(pattern[key], key) # depends on [control=['for'], data=['key']]
if inName in inputBankNames:
if inName in pattern:
dict[outName] = pattern[inName][start:start + patternLength] # depends on [control=['if'], data=['inName', 'pattern']] # depends on [control=['if'], data=['inName']] # depends on [control=['if'], data=[]]
else:
#dict[outName] = pattern["input"][start:start+patternLength]
# in middle of sequence
start = (step + 1) * inLayer.size
dict[outName] = args[inName][start:start + patternLength] # depends on [control=['for'], data=['p']]
# end predicition code -----------------------------
if step < sequenceLength - 1: # not the last one
if not self.learnDuringSequence:
self.learning = 0 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
retvals = self.networkStep(**dict)
self.learning = learning # in case we turned it off
totalRetvals = list(map(lambda x, y: x + y, totalRetvals[:3], retvals[:3]))
sumMerge(totalPCorrect, retvals[3])
totalRetvals.append(totalPCorrect) # depends on [control=['for'], data=['step']]
return totalRetvals |
def warp_image_by_corner_points_projection(corner_points, image):
"""Given corner points of a Sudoku, warps original selection to a square image.
:param corner_points:
:type: corner_points: list
:param image:
:type image:
:return:
:rtype:
"""
# Clarify by storing in named variables.
top_left, top_right, bottom_left, bottom_right = np.array(corner_points)
top_edge = np.linalg.norm(top_right - top_left)
bottom_edge = np.linalg.norm(bottom_right - bottom_left)
left_edge = np.linalg.norm(top_left - bottom_left)
right_edge = np.linalg.norm(top_right - bottom_right)
L = int(np.ceil(max([top_edge, bottom_edge, left_edge, right_edge])))
src = np.array([top_left, top_right, bottom_left, bottom_right])
dst = np.array([[0, 0], [L - 1, 0], [0, L - 1], [L - 1, L - 1]])
tr = ProjectiveTransform()
tr.estimate(dst, src)
warped_image = warp(image, tr, output_shape=(L, L))
out = resize(warped_image, (500, 500))
return out | def function[warp_image_by_corner_points_projection, parameter[corner_points, image]]:
constant[Given corner points of a Sudoku, warps original selection to a square image.
:param corner_points:
:type: corner_points: list
:param image:
:type image:
:return:
:rtype:
]
<ast.Tuple object at 0x7da1b0bd02e0> assign[=] call[name[np].array, parameter[name[corner_points]]]
variable[top_edge] assign[=] call[name[np].linalg.norm, parameter[binary_operation[name[top_right] - name[top_left]]]]
variable[bottom_edge] assign[=] call[name[np].linalg.norm, parameter[binary_operation[name[bottom_right] - name[bottom_left]]]]
variable[left_edge] assign[=] call[name[np].linalg.norm, parameter[binary_operation[name[top_left] - name[bottom_left]]]]
variable[right_edge] assign[=] call[name[np].linalg.norm, parameter[binary_operation[name[top_right] - name[bottom_right]]]]
variable[L] assign[=] call[name[int], parameter[call[name[np].ceil, parameter[call[name[max], parameter[list[[<ast.Name object at 0x7da1b0bd1ba0>, <ast.Name object at 0x7da1b0bd2320>, <ast.Name object at 0x7da1b0bd2ad0>, <ast.Name object at 0x7da1b0bd3a30>]]]]]]]]
variable[src] assign[=] call[name[np].array, parameter[list[[<ast.Name object at 0x7da1b0bd1840>, <ast.Name object at 0x7da1b0bd03a0>, <ast.Name object at 0x7da1b0bd07c0>, <ast.Name object at 0x7da1b0bd2530>]]]]
variable[dst] assign[=] call[name[np].array, parameter[list[[<ast.List object at 0x7da1b0bd1a20>, <ast.List object at 0x7da1b0bd2860>, <ast.List object at 0x7da1b0bd1d50>, <ast.List object at 0x7da1b0bd13f0>]]]]
variable[tr] assign[=] call[name[ProjectiveTransform], parameter[]]
call[name[tr].estimate, parameter[name[dst], name[src]]]
variable[warped_image] assign[=] call[name[warp], parameter[name[image], name[tr]]]
variable[out] assign[=] call[name[resize], parameter[name[warped_image], tuple[[<ast.Constant object at 0x7da1b0a71de0>, <ast.Constant object at 0x7da1b0a72020>]]]]
return[name[out]] | keyword[def] identifier[warp_image_by_corner_points_projection] ( identifier[corner_points] , identifier[image] ):
literal[string]
identifier[top_left] , identifier[top_right] , identifier[bottom_left] , identifier[bottom_right] = identifier[np] . identifier[array] ( identifier[corner_points] )
identifier[top_edge] = identifier[np] . identifier[linalg] . identifier[norm] ( identifier[top_right] - identifier[top_left] )
identifier[bottom_edge] = identifier[np] . identifier[linalg] . identifier[norm] ( identifier[bottom_right] - identifier[bottom_left] )
identifier[left_edge] = identifier[np] . identifier[linalg] . identifier[norm] ( identifier[top_left] - identifier[bottom_left] )
identifier[right_edge] = identifier[np] . identifier[linalg] . identifier[norm] ( identifier[top_right] - identifier[bottom_right] )
identifier[L] = identifier[int] ( identifier[np] . identifier[ceil] ( identifier[max] ([ identifier[top_edge] , identifier[bottom_edge] , identifier[left_edge] , identifier[right_edge] ])))
identifier[src] = identifier[np] . identifier[array] ([ identifier[top_left] , identifier[top_right] , identifier[bottom_left] , identifier[bottom_right] ])
identifier[dst] = identifier[np] . identifier[array] ([[ literal[int] , literal[int] ],[ identifier[L] - literal[int] , literal[int] ],[ literal[int] , identifier[L] - literal[int] ],[ identifier[L] - literal[int] , identifier[L] - literal[int] ]])
identifier[tr] = identifier[ProjectiveTransform] ()
identifier[tr] . identifier[estimate] ( identifier[dst] , identifier[src] )
identifier[warped_image] = identifier[warp] ( identifier[image] , identifier[tr] , identifier[output_shape] =( identifier[L] , identifier[L] ))
identifier[out] = identifier[resize] ( identifier[warped_image] ,( literal[int] , literal[int] ))
keyword[return] identifier[out] | def warp_image_by_corner_points_projection(corner_points, image):
"""Given corner points of a Sudoku, warps original selection to a square image.
:param corner_points:
:type: corner_points: list
:param image:
:type image:
:return:
:rtype:
"""
# Clarify by storing in named variables.
(top_left, top_right, bottom_left, bottom_right) = np.array(corner_points)
top_edge = np.linalg.norm(top_right - top_left)
bottom_edge = np.linalg.norm(bottom_right - bottom_left)
left_edge = np.linalg.norm(top_left - bottom_left)
right_edge = np.linalg.norm(top_right - bottom_right)
L = int(np.ceil(max([top_edge, bottom_edge, left_edge, right_edge])))
src = np.array([top_left, top_right, bottom_left, bottom_right])
dst = np.array([[0, 0], [L - 1, 0], [0, L - 1], [L - 1, L - 1]])
tr = ProjectiveTransform()
tr.estimate(dst, src)
warped_image = warp(image, tr, output_shape=(L, L))
out = resize(warped_image, (500, 500))
return out |
def return_hdr(self):
"""Return the header for further use.
Returns
-------
subj_id : str
subject identification code
start_time : datetime
start time of the dataset
s_freq : float
sampling frequency
chan_name : list of str
list of all the channels
n_samples : int
number of samples in the dataset
orig : dict
the json file
"""
with open(self.filename, 'r') as f:
orig = load(f)
start_time = datetime.strptime(orig['start_time'],
'%Y-%m-%d %H:%M:%S.%f')
self.memshape = (len(orig['chan_name']),
orig['n_samples'])
self.dtype = orig.get('dtype', 'float64')
return (orig['subj_id'], start_time, orig['s_freq'], orig['chan_name'],
orig['n_samples'], orig) | def function[return_hdr, parameter[self]]:
constant[Return the header for further use.
Returns
-------
subj_id : str
subject identification code
start_time : datetime
start time of the dataset
s_freq : float
sampling frequency
chan_name : list of str
list of all the channels
n_samples : int
number of samples in the dataset
orig : dict
the json file
]
with call[name[open], parameter[name[self].filename, constant[r]]] begin[:]
variable[orig] assign[=] call[name[load], parameter[name[f]]]
variable[start_time] assign[=] call[name[datetime].strptime, parameter[call[name[orig]][constant[start_time]], constant[%Y-%m-%d %H:%M:%S.%f]]]
name[self].memshape assign[=] tuple[[<ast.Call object at 0x7da1b0ef0850>, <ast.Subscript object at 0x7da1b0ef09d0>]]
name[self].dtype assign[=] call[name[orig].get, parameter[constant[dtype], constant[float64]]]
return[tuple[[<ast.Subscript object at 0x7da1b0ddc850>, <ast.Name object at 0x7da1b0ddcb50>, <ast.Subscript object at 0x7da1b0ddd1b0>, <ast.Subscript object at 0x7da1b0ddcf40>, <ast.Subscript object at 0x7da1b0e071f0>, <ast.Name object at 0x7da1b0e040a0>]]] | keyword[def] identifier[return_hdr] ( identifier[self] ):
literal[string]
keyword[with] identifier[open] ( identifier[self] . identifier[filename] , literal[string] ) keyword[as] identifier[f] :
identifier[orig] = identifier[load] ( identifier[f] )
identifier[start_time] = identifier[datetime] . identifier[strptime] ( identifier[orig] [ literal[string] ],
literal[string] )
identifier[self] . identifier[memshape] =( identifier[len] ( identifier[orig] [ literal[string] ]),
identifier[orig] [ literal[string] ])
identifier[self] . identifier[dtype] = identifier[orig] . identifier[get] ( literal[string] , literal[string] )
keyword[return] ( identifier[orig] [ literal[string] ], identifier[start_time] , identifier[orig] [ literal[string] ], identifier[orig] [ literal[string] ],
identifier[orig] [ literal[string] ], identifier[orig] ) | def return_hdr(self):
"""Return the header for further use.
Returns
-------
subj_id : str
subject identification code
start_time : datetime
start time of the dataset
s_freq : float
sampling frequency
chan_name : list of str
list of all the channels
n_samples : int
number of samples in the dataset
orig : dict
the json file
"""
with open(self.filename, 'r') as f:
orig = load(f) # depends on [control=['with'], data=['f']]
start_time = datetime.strptime(orig['start_time'], '%Y-%m-%d %H:%M:%S.%f')
self.memshape = (len(orig['chan_name']), orig['n_samples'])
self.dtype = orig.get('dtype', 'float64')
return (orig['subj_id'], start_time, orig['s_freq'], orig['chan_name'], orig['n_samples'], orig) |
def main():
"""Entry point when module is run from command line"""
parser = argparse.ArgumentParser(description='Run the chaid algorithm on a'
' csv/sav file.')
parser.add_argument('file')
parser.add_argument('dependent_variable', nargs=1)
parser.add_argument('--dependent-variable-type', type=str)
var = parser.add_argument_group('Independent Variable Specification')
var.add_argument('nominal_variables', nargs='*', help='The names of '
'independent variables to use that have no intrinsic '
'order to them')
var.add_argument('--ordinal-variables', type=str, nargs='*',
help='The names of independent variables to use that '
'have an intrinsic order but a finite amount of states')
parser.add_argument('--weights', type=str, help='Name of weight column')
parser.add_argument('--max-depth', type=int, help='Max depth of generated '
'tree')
parser.add_argument('--min-parent-node-size', type=int, help='Minimum number of '
'samples required to split the parent node')
parser.add_argument('--min-child-node-size', type=int, help='Minimum number of '
'samples required to split the child node')
parser.add_argument('--alpha-merge', type=float, help='Alpha Merge')
group = parser.add_mutually_exclusive_group(required=False)
group.add_argument('--classify', action='store_true', help='Add column to'
' input with the node id of the node that that '
'respondent has been placed into')
group.add_argument('--predict', action='store_true', help='Add column to '
'input with the value of the dependent variable that '
'the majority of respondents in that node selected')
group.add_argument('--rules', action='store_true')
group.add_argument('--export', action='store_true', help='Whether to export the chart to pdf/dot')
group.add_argument('--export-path', type=str, help='Path to store chart output')
nspace = parser.parse_args()
if nspace.file[-4:] == '.csv':
data = pd.read_csv(nspace.file)
elif nspace.file[-4:] == '.sav':
import savReaderWriter as spss
raw_data = spss.SavReader(nspace.file, returnHeader=True)
raw_data_list = list(raw_data)
data = pd.DataFrame(raw_data_list)
data = data.rename(columns=data.loc[0]).iloc[1:]
else:
print('Unknown file type')
exit(1)
config = {}
if nspace.max_depth:
config['max_depth'] = nspace.max_depth
if nspace.alpha_merge:
config['alpha_merge'] = nspace.alpha_merge
if nspace.min_parent_node_size:
config['min_parent_node_size'] = nspace.min_parent_node_size
if nspace.min_child_node_size:
config['min_child_node_size'] = nspace.min_child_node_size
if nspace.weights:
config['weight'] = nspace.weights
if nspace.dependent_variable_type:
config['dep_variable_type'] = nspace.dependent_variable_type
ordinal = nspace.ordinal_variables or []
nominal = nspace.nominal_variables or []
independent_variables = nominal + ordinal
types = dict(zip(nominal + ordinal, ['nominal'] * len(nominal) + ['ordinal'] * len(ordinal)))
if len(independent_variables) == 0:
print('Need to provide at least one independent variable')
exit(1)
tree = Tree.from_pandas_df(data, types, nspace.dependent_variable[0],
**config)
if nspace.export or nspace.export_path:
tree.render(nspace.export_path, True)
if nspace.classify:
predictions = pd.Series(tree.node_predictions())
predictions.name = 'node_id'
data = pd.concat([data, predictions], axis=1)
print(data.to_csv())
elif nspace.predict:
predictions = pd.Series(tree.model_predictions())
predictions.name = 'predicted'
data = pd.concat([data, predictions], axis=1)
print(data.to_csv())
elif nspace.rules:
print('\n'.join(str(x) for x in tree.classification_rules()))
else:
tree.print_tree()
print('Accuracy: ', tree.accuracy()) | def function[main, parameter[]]:
constant[Entry point when module is run from command line]
variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[]]
call[name[parser].add_argument, parameter[constant[file]]]
call[name[parser].add_argument, parameter[constant[dependent_variable]]]
call[name[parser].add_argument, parameter[constant[--dependent-variable-type]]]
variable[var] assign[=] call[name[parser].add_argument_group, parameter[constant[Independent Variable Specification]]]
call[name[var].add_argument, parameter[constant[nominal_variables]]]
call[name[var].add_argument, parameter[constant[--ordinal-variables]]]
call[name[parser].add_argument, parameter[constant[--weights]]]
call[name[parser].add_argument, parameter[constant[--max-depth]]]
call[name[parser].add_argument, parameter[constant[--min-parent-node-size]]]
call[name[parser].add_argument, parameter[constant[--min-child-node-size]]]
call[name[parser].add_argument, parameter[constant[--alpha-merge]]]
variable[group] assign[=] call[name[parser].add_mutually_exclusive_group, parameter[]]
call[name[group].add_argument, parameter[constant[--classify]]]
call[name[group].add_argument, parameter[constant[--predict]]]
call[name[group].add_argument, parameter[constant[--rules]]]
call[name[group].add_argument, parameter[constant[--export]]]
call[name[group].add_argument, parameter[constant[--export-path]]]
variable[nspace] assign[=] call[name[parser].parse_args, parameter[]]
if compare[call[name[nspace].file][<ast.Slice object at 0x7da18f09fb20>] equal[==] constant[.csv]] begin[:]
variable[data] assign[=] call[name[pd].read_csv, parameter[name[nspace].file]]
variable[config] assign[=] dictionary[[], []]
if name[nspace].max_depth begin[:]
call[name[config]][constant[max_depth]] assign[=] name[nspace].max_depth
if name[nspace].alpha_merge begin[:]
call[name[config]][constant[alpha_merge]] assign[=] name[nspace].alpha_merge
if name[nspace].min_parent_node_size begin[:]
call[name[config]][constant[min_parent_node_size]] assign[=] name[nspace].min_parent_node_size
if name[nspace].min_child_node_size begin[:]
call[name[config]][constant[min_child_node_size]] assign[=] name[nspace].min_child_node_size
if name[nspace].weights begin[:]
call[name[config]][constant[weight]] assign[=] name[nspace].weights
if name[nspace].dependent_variable_type begin[:]
call[name[config]][constant[dep_variable_type]] assign[=] name[nspace].dependent_variable_type
variable[ordinal] assign[=] <ast.BoolOp object at 0x7da18dc05d80>
variable[nominal] assign[=] <ast.BoolOp object at 0x7da18dc06b90>
variable[independent_variables] assign[=] binary_operation[name[nominal] + name[ordinal]]
variable[types] assign[=] call[name[dict], parameter[call[name[zip], parameter[binary_operation[name[nominal] + name[ordinal]], binary_operation[binary_operation[list[[<ast.Constant object at 0x7da18dc06d70>]] * call[name[len], parameter[name[nominal]]]] + binary_operation[list[[<ast.Constant object at 0x7da18dc07970>]] * call[name[len], parameter[name[ordinal]]]]]]]]]
if compare[call[name[len], parameter[name[independent_variables]]] equal[==] constant[0]] begin[:]
call[name[print], parameter[constant[Need to provide at least one independent variable]]]
call[name[exit], parameter[constant[1]]]
variable[tree] assign[=] call[name[Tree].from_pandas_df, parameter[name[data], name[types], call[name[nspace].dependent_variable][constant[0]]]]
if <ast.BoolOp object at 0x7da18dc067d0> begin[:]
call[name[tree].render, parameter[name[nspace].export_path, constant[True]]]
if name[nspace].classify begin[:]
variable[predictions] assign[=] call[name[pd].Series, parameter[call[name[tree].node_predictions, parameter[]]]]
name[predictions].name assign[=] constant[node_id]
variable[data] assign[=] call[name[pd].concat, parameter[list[[<ast.Name object at 0x7da18dc06110>, <ast.Name object at 0x7da18dc073d0>]]]]
call[name[print], parameter[call[name[data].to_csv, parameter[]]]] | keyword[def] identifier[main] ():
literal[string]
identifier[parser] = identifier[argparse] . identifier[ArgumentParser] ( identifier[description] = literal[string]
literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[nargs] = literal[int] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[str] )
identifier[var] = identifier[parser] . identifier[add_argument_group] ( literal[string] )
identifier[var] . identifier[add_argument] ( literal[string] , identifier[nargs] = literal[string] , identifier[help] = literal[string]
literal[string]
literal[string] )
identifier[var] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[str] , identifier[nargs] = literal[string] ,
identifier[help] = literal[string]
literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[str] , identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[int] , identifier[help] = literal[string]
literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[int] , identifier[help] = literal[string]
literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[int] , identifier[help] = literal[string]
literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[float] , identifier[help] = literal[string] )
identifier[group] = identifier[parser] . identifier[add_mutually_exclusive_group] ( identifier[required] = keyword[False] )
identifier[group] . identifier[add_argument] ( literal[string] , identifier[action] = literal[string] , identifier[help] = literal[string]
literal[string]
literal[string] )
identifier[group] . identifier[add_argument] ( literal[string] , identifier[action] = literal[string] , identifier[help] = literal[string]
literal[string]
literal[string] )
identifier[group] . identifier[add_argument] ( literal[string] , identifier[action] = literal[string] )
identifier[group] . identifier[add_argument] ( literal[string] , identifier[action] = literal[string] , identifier[help] = literal[string] )
identifier[group] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[str] , identifier[help] = literal[string] )
identifier[nspace] = identifier[parser] . identifier[parse_args] ()
keyword[if] identifier[nspace] . identifier[file] [- literal[int] :]== literal[string] :
identifier[data] = identifier[pd] . identifier[read_csv] ( identifier[nspace] . identifier[file] )
keyword[elif] identifier[nspace] . identifier[file] [- literal[int] :]== literal[string] :
keyword[import] identifier[savReaderWriter] keyword[as] identifier[spss]
identifier[raw_data] = identifier[spss] . identifier[SavReader] ( identifier[nspace] . identifier[file] , identifier[returnHeader] = keyword[True] )
identifier[raw_data_list] = identifier[list] ( identifier[raw_data] )
identifier[data] = identifier[pd] . identifier[DataFrame] ( identifier[raw_data_list] )
identifier[data] = identifier[data] . identifier[rename] ( identifier[columns] = identifier[data] . identifier[loc] [ literal[int] ]). identifier[iloc] [ literal[int] :]
keyword[else] :
identifier[print] ( literal[string] )
identifier[exit] ( literal[int] )
identifier[config] ={}
keyword[if] identifier[nspace] . identifier[max_depth] :
identifier[config] [ literal[string] ]= identifier[nspace] . identifier[max_depth]
keyword[if] identifier[nspace] . identifier[alpha_merge] :
identifier[config] [ literal[string] ]= identifier[nspace] . identifier[alpha_merge]
keyword[if] identifier[nspace] . identifier[min_parent_node_size] :
identifier[config] [ literal[string] ]= identifier[nspace] . identifier[min_parent_node_size]
keyword[if] identifier[nspace] . identifier[min_child_node_size] :
identifier[config] [ literal[string] ]= identifier[nspace] . identifier[min_child_node_size]
keyword[if] identifier[nspace] . identifier[weights] :
identifier[config] [ literal[string] ]= identifier[nspace] . identifier[weights]
keyword[if] identifier[nspace] . identifier[dependent_variable_type] :
identifier[config] [ literal[string] ]= identifier[nspace] . identifier[dependent_variable_type]
identifier[ordinal] = identifier[nspace] . identifier[ordinal_variables] keyword[or] []
identifier[nominal] = identifier[nspace] . identifier[nominal_variables] keyword[or] []
identifier[independent_variables] = identifier[nominal] + identifier[ordinal]
identifier[types] = identifier[dict] ( identifier[zip] ( identifier[nominal] + identifier[ordinal] ,[ literal[string] ]* identifier[len] ( identifier[nominal] )+[ literal[string] ]* identifier[len] ( identifier[ordinal] )))
keyword[if] identifier[len] ( identifier[independent_variables] )== literal[int] :
identifier[print] ( literal[string] )
identifier[exit] ( literal[int] )
identifier[tree] = identifier[Tree] . identifier[from_pandas_df] ( identifier[data] , identifier[types] , identifier[nspace] . identifier[dependent_variable] [ literal[int] ],
** identifier[config] )
keyword[if] identifier[nspace] . identifier[export] keyword[or] identifier[nspace] . identifier[export_path] :
identifier[tree] . identifier[render] ( identifier[nspace] . identifier[export_path] , keyword[True] )
keyword[if] identifier[nspace] . identifier[classify] :
identifier[predictions] = identifier[pd] . identifier[Series] ( identifier[tree] . identifier[node_predictions] ())
identifier[predictions] . identifier[name] = literal[string]
identifier[data] = identifier[pd] . identifier[concat] ([ identifier[data] , identifier[predictions] ], identifier[axis] = literal[int] )
identifier[print] ( identifier[data] . identifier[to_csv] ())
keyword[elif] identifier[nspace] . identifier[predict] :
identifier[predictions] = identifier[pd] . identifier[Series] ( identifier[tree] . identifier[model_predictions] ())
identifier[predictions] . identifier[name] = literal[string]
identifier[data] = identifier[pd] . identifier[concat] ([ identifier[data] , identifier[predictions] ], identifier[axis] = literal[int] )
identifier[print] ( identifier[data] . identifier[to_csv] ())
keyword[elif] identifier[nspace] . identifier[rules] :
identifier[print] ( literal[string] . identifier[join] ( identifier[str] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[tree] . identifier[classification_rules] ()))
keyword[else] :
identifier[tree] . identifier[print_tree] ()
identifier[print] ( literal[string] , identifier[tree] . identifier[accuracy] ()) | def main():
"""Entry point when module is run from command line"""
parser = argparse.ArgumentParser(description='Run the chaid algorithm on a csv/sav file.')
parser.add_argument('file')
parser.add_argument('dependent_variable', nargs=1)
parser.add_argument('--dependent-variable-type', type=str)
var = parser.add_argument_group('Independent Variable Specification')
var.add_argument('nominal_variables', nargs='*', help='The names of independent variables to use that have no intrinsic order to them')
var.add_argument('--ordinal-variables', type=str, nargs='*', help='The names of independent variables to use that have an intrinsic order but a finite amount of states')
parser.add_argument('--weights', type=str, help='Name of weight column')
parser.add_argument('--max-depth', type=int, help='Max depth of generated tree')
parser.add_argument('--min-parent-node-size', type=int, help='Minimum number of samples required to split the parent node')
parser.add_argument('--min-child-node-size', type=int, help='Minimum number of samples required to split the child node')
parser.add_argument('--alpha-merge', type=float, help='Alpha Merge')
group = parser.add_mutually_exclusive_group(required=False)
group.add_argument('--classify', action='store_true', help='Add column to input with the node id of the node that that respondent has been placed into')
group.add_argument('--predict', action='store_true', help='Add column to input with the value of the dependent variable that the majority of respondents in that node selected')
group.add_argument('--rules', action='store_true')
group.add_argument('--export', action='store_true', help='Whether to export the chart to pdf/dot')
group.add_argument('--export-path', type=str, help='Path to store chart output')
nspace = parser.parse_args()
if nspace.file[-4:] == '.csv':
data = pd.read_csv(nspace.file) # depends on [control=['if'], data=[]]
elif nspace.file[-4:] == '.sav':
import savReaderWriter as spss
raw_data = spss.SavReader(nspace.file, returnHeader=True)
raw_data_list = list(raw_data)
data = pd.DataFrame(raw_data_list)
data = data.rename(columns=data.loc[0]).iloc[1:] # depends on [control=['if'], data=[]]
else:
print('Unknown file type')
exit(1)
config = {}
if nspace.max_depth:
config['max_depth'] = nspace.max_depth # depends on [control=['if'], data=[]]
if nspace.alpha_merge:
config['alpha_merge'] = nspace.alpha_merge # depends on [control=['if'], data=[]]
if nspace.min_parent_node_size:
config['min_parent_node_size'] = nspace.min_parent_node_size # depends on [control=['if'], data=[]]
if nspace.min_child_node_size:
config['min_child_node_size'] = nspace.min_child_node_size # depends on [control=['if'], data=[]]
if nspace.weights:
config['weight'] = nspace.weights # depends on [control=['if'], data=[]]
if nspace.dependent_variable_type:
config['dep_variable_type'] = nspace.dependent_variable_type # depends on [control=['if'], data=[]]
ordinal = nspace.ordinal_variables or []
nominal = nspace.nominal_variables or []
independent_variables = nominal + ordinal
types = dict(zip(nominal + ordinal, ['nominal'] * len(nominal) + ['ordinal'] * len(ordinal)))
if len(independent_variables) == 0:
print('Need to provide at least one independent variable')
exit(1) # depends on [control=['if'], data=[]]
tree = Tree.from_pandas_df(data, types, nspace.dependent_variable[0], **config)
if nspace.export or nspace.export_path:
tree.render(nspace.export_path, True) # depends on [control=['if'], data=[]]
if nspace.classify:
predictions = pd.Series(tree.node_predictions())
predictions.name = 'node_id'
data = pd.concat([data, predictions], axis=1)
print(data.to_csv()) # depends on [control=['if'], data=[]]
elif nspace.predict:
predictions = pd.Series(tree.model_predictions())
predictions.name = 'predicted'
data = pd.concat([data, predictions], axis=1)
print(data.to_csv()) # depends on [control=['if'], data=[]]
elif nspace.rules:
print('\n'.join((str(x) for x in tree.classification_rules()))) # depends on [control=['if'], data=[]]
else:
tree.print_tree()
print('Accuracy: ', tree.accuracy()) |
def _default_error_handler(msg, _):
"""Default error handler callback for libopenjp2."""
msg = "OpenJPEG library error: {0}".format(msg.decode('utf-8').rstrip())
opj2.set_error_message(msg) | def function[_default_error_handler, parameter[msg, _]]:
constant[Default error handler callback for libopenjp2.]
variable[msg] assign[=] call[constant[OpenJPEG library error: {0}].format, parameter[call[call[name[msg].decode, parameter[constant[utf-8]]].rstrip, parameter[]]]]
call[name[opj2].set_error_message, parameter[name[msg]]] | keyword[def] identifier[_default_error_handler] ( identifier[msg] , identifier[_] ):
literal[string]
identifier[msg] = literal[string] . identifier[format] ( identifier[msg] . identifier[decode] ( literal[string] ). identifier[rstrip] ())
identifier[opj2] . identifier[set_error_message] ( identifier[msg] ) | def _default_error_handler(msg, _):
"""Default error handler callback for libopenjp2."""
msg = 'OpenJPEG library error: {0}'.format(msg.decode('utf-8').rstrip())
opj2.set_error_message(msg) |
def to_dict_list(df, use_ordered_dict=True):
"""Transform each row to dict, and put them into a list.
**中文文档**
将 ``pandas.DataFrame`` 转换成一个字典的列表。列表的长度与行数相同, 其中
每一个字典相当于表中的一行, 相当于一个 ``pandas.Series`` 对象。
"""
if use_ordered_dict:
dict = OrderedDict
columns = df.columns
data = list()
for tp in itertuple(df):
data.append(dict(zip(columns, tp)))
return data | def function[to_dict_list, parameter[df, use_ordered_dict]]:
constant[Transform each row to dict, and put them into a list.
**中文文档**
将 ``pandas.DataFrame`` 转换成一个字典的列表。列表的长度与行数相同, 其中
每一个字典相当于表中的一行, 相当于一个 ``pandas.Series`` 对象。
]
if name[use_ordered_dict] begin[:]
variable[dict] assign[=] name[OrderedDict]
variable[columns] assign[=] name[df].columns
variable[data] assign[=] call[name[list], parameter[]]
for taget[name[tp]] in starred[call[name[itertuple], parameter[name[df]]]] begin[:]
call[name[data].append, parameter[call[name[dict], parameter[call[name[zip], parameter[name[columns], name[tp]]]]]]]
return[name[data]] | keyword[def] identifier[to_dict_list] ( identifier[df] , identifier[use_ordered_dict] = keyword[True] ):
literal[string]
keyword[if] identifier[use_ordered_dict] :
identifier[dict] = identifier[OrderedDict]
identifier[columns] = identifier[df] . identifier[columns]
identifier[data] = identifier[list] ()
keyword[for] identifier[tp] keyword[in] identifier[itertuple] ( identifier[df] ):
identifier[data] . identifier[append] ( identifier[dict] ( identifier[zip] ( identifier[columns] , identifier[tp] )))
keyword[return] identifier[data] | def to_dict_list(df, use_ordered_dict=True):
"""Transform each row to dict, and put them into a list.
**中文文档**
将 ``pandas.DataFrame`` 转换成一个字典的列表。列表的长度与行数相同, 其中
每一个字典相当于表中的一行, 相当于一个 ``pandas.Series`` 对象。
"""
if use_ordered_dict:
dict = OrderedDict # depends on [control=['if'], data=[]]
columns = df.columns
data = list()
for tp in itertuple(df):
data.append(dict(zip(columns, tp))) # depends on [control=['for'], data=['tp']]
return data |
async def start(self, *args, **kwargs):
"""|coro|
A shorthand coroutine for :meth:`login` + :meth:`connect`.
"""
bot = kwargs.pop('bot', True)
reconnect = kwargs.pop('reconnect', True)
await self.login(*args, bot=bot)
await self.connect(reconnect=reconnect) | <ast.AsyncFunctionDef object at 0x7da1b20407f0> | keyword[async] keyword[def] identifier[start] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[bot] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[True] )
identifier[reconnect] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[True] )
keyword[await] identifier[self] . identifier[login] (* identifier[args] , identifier[bot] = identifier[bot] )
keyword[await] identifier[self] . identifier[connect] ( identifier[reconnect] = identifier[reconnect] ) | async def start(self, *args, **kwargs):
"""|coro|
A shorthand coroutine for :meth:`login` + :meth:`connect`.
"""
bot = kwargs.pop('bot', True)
reconnect = kwargs.pop('reconnect', True)
await self.login(*args, bot=bot)
await self.connect(reconnect=reconnect) |
def copy_tree(src, dst, symlinks=False, ignore=[]):
"""Copy a full directory structure.
:param src: Source path
:param dst: Destination path
:param symlinks: Copy symlinks
:param ignore: Subdirs/filenames to ignore
"""
names = os.listdir(src)
if not os.path.exists(dst):
os.makedirs(dst)
errors = []
for name in names:
if name in ignore:
continue
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
try:
if symlinks and os.path.islink(srcname):
linkto = os.readlink(srcname)
os.symlink(linkto, dstname)
elif os.path.isdir(srcname):
copy_tree(srcname, dstname, symlinks, ignore)
else:
copy_file(srcname, dstname)
except (IOError, os.error) as exc:
errors.append((srcname, dstname, str(exc)))
except CTError as exc:
errors.extend(exc.errors)
if errors:
raise CTError(errors) | def function[copy_tree, parameter[src, dst, symlinks, ignore]]:
constant[Copy a full directory structure.
:param src: Source path
:param dst: Destination path
:param symlinks: Copy symlinks
:param ignore: Subdirs/filenames to ignore
]
variable[names] assign[=] call[name[os].listdir, parameter[name[src]]]
if <ast.UnaryOp object at 0x7da1b1292500> begin[:]
call[name[os].makedirs, parameter[name[dst]]]
variable[errors] assign[=] list[[]]
for taget[name[name]] in starred[name[names]] begin[:]
if compare[name[name] in name[ignore]] begin[:]
continue
variable[srcname] assign[=] call[name[os].path.join, parameter[name[src], name[name]]]
variable[dstname] assign[=] call[name[os].path.join, parameter[name[dst], name[name]]]
<ast.Try object at 0x7da1b1292d70>
if name[errors] begin[:]
<ast.Raise object at 0x7da1b11a5de0> | keyword[def] identifier[copy_tree] ( identifier[src] , identifier[dst] , identifier[symlinks] = keyword[False] , identifier[ignore] =[]):
literal[string]
identifier[names] = identifier[os] . identifier[listdir] ( identifier[src] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[dst] ):
identifier[os] . identifier[makedirs] ( identifier[dst] )
identifier[errors] =[]
keyword[for] identifier[name] keyword[in] identifier[names] :
keyword[if] identifier[name] keyword[in] identifier[ignore] :
keyword[continue]
identifier[srcname] = identifier[os] . identifier[path] . identifier[join] ( identifier[src] , identifier[name] )
identifier[dstname] = identifier[os] . identifier[path] . identifier[join] ( identifier[dst] , identifier[name] )
keyword[try] :
keyword[if] identifier[symlinks] keyword[and] identifier[os] . identifier[path] . identifier[islink] ( identifier[srcname] ):
identifier[linkto] = identifier[os] . identifier[readlink] ( identifier[srcname] )
identifier[os] . identifier[symlink] ( identifier[linkto] , identifier[dstname] )
keyword[elif] identifier[os] . identifier[path] . identifier[isdir] ( identifier[srcname] ):
identifier[copy_tree] ( identifier[srcname] , identifier[dstname] , identifier[symlinks] , identifier[ignore] )
keyword[else] :
identifier[copy_file] ( identifier[srcname] , identifier[dstname] )
keyword[except] ( identifier[IOError] , identifier[os] . identifier[error] ) keyword[as] identifier[exc] :
identifier[errors] . identifier[append] (( identifier[srcname] , identifier[dstname] , identifier[str] ( identifier[exc] )))
keyword[except] identifier[CTError] keyword[as] identifier[exc] :
identifier[errors] . identifier[extend] ( identifier[exc] . identifier[errors] )
keyword[if] identifier[errors] :
keyword[raise] identifier[CTError] ( identifier[errors] ) | def copy_tree(src, dst, symlinks=False, ignore=[]):
"""Copy a full directory structure.
:param src: Source path
:param dst: Destination path
:param symlinks: Copy symlinks
:param ignore: Subdirs/filenames to ignore
"""
names = os.listdir(src)
if not os.path.exists(dst):
os.makedirs(dst) # depends on [control=['if'], data=[]]
errors = []
for name in names:
if name in ignore:
continue # depends on [control=['if'], data=[]]
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
try:
if symlinks and os.path.islink(srcname):
linkto = os.readlink(srcname)
os.symlink(linkto, dstname) # depends on [control=['if'], data=[]]
elif os.path.isdir(srcname):
copy_tree(srcname, dstname, symlinks, ignore) # depends on [control=['if'], data=[]]
else:
copy_file(srcname, dstname) # depends on [control=['try'], data=[]]
except (IOError, os.error) as exc:
errors.append((srcname, dstname, str(exc))) # depends on [control=['except'], data=['exc']]
except CTError as exc:
errors.extend(exc.errors) # depends on [control=['except'], data=['exc']] # depends on [control=['for'], data=['name']]
if errors:
raise CTError(errors) # depends on [control=['if'], data=[]] |
def _parse_os_release_content(lines):
"""
Parse the lines of an os-release file.
Parameters:
* lines: Iterable through the lines in the os-release file.
Each line must be a unicode string or a UTF-8 encoded byte
string.
Returns:
A dictionary containing all information items.
"""
props = {}
lexer = shlex.shlex(lines, posix=True)
lexer.whitespace_split = True
# The shlex module defines its `wordchars` variable using literals,
# making it dependent on the encoding of the Python source file.
# In Python 2.6 and 2.7, the shlex source file is encoded in
# 'iso-8859-1', and the `wordchars` variable is defined as a byte
# string. This causes a UnicodeDecodeError to be raised when the
# parsed content is a unicode object. The following fix resolves that
# (... but it should be fixed in shlex...):
if sys.version_info[0] == 2 and isinstance(lexer.wordchars, bytes):
lexer.wordchars = lexer.wordchars.decode('iso-8859-1')
tokens = list(lexer)
for token in tokens:
# At this point, all shell-like parsing has been done (i.e.
# comments processed, quotes and backslash escape sequences
# processed, multi-line values assembled, trailing newlines
# stripped, etc.), so the tokens are now either:
# * variable assignments: var=value
# * commands or their arguments (not allowed in os-release)
if '=' in token:
k, v = token.split('=', 1)
if isinstance(v, bytes):
v = v.decode('utf-8')
props[k.lower()] = v
else:
# Ignore any tokens that are not variable assignments
pass
if 'version_codename' in props:
# os-release added a version_codename field. Use that in
# preference to anything else Note that some distros purposefully
# do not have code names. They should be setting
# version_codename=""
props['codename'] = props['version_codename']
elif 'ubuntu_codename' in props:
# Same as above but a non-standard field name used on older Ubuntus
props['codename'] = props['ubuntu_codename']
elif 'version' in props:
# If there is no version_codename, parse it from the version
codename = re.search(r'(\(\D+\))|,(\s+)?\D+', props['version'])
if codename:
codename = codename.group()
codename = codename.strip('()')
codename = codename.strip(',')
codename = codename.strip()
# codename appears within paranthese.
props['codename'] = codename
return props | def function[_parse_os_release_content, parameter[lines]]:
constant[
Parse the lines of an os-release file.
Parameters:
* lines: Iterable through the lines in the os-release file.
Each line must be a unicode string or a UTF-8 encoded byte
string.
Returns:
A dictionary containing all information items.
]
variable[props] assign[=] dictionary[[], []]
variable[lexer] assign[=] call[name[shlex].shlex, parameter[name[lines]]]
name[lexer].whitespace_split assign[=] constant[True]
if <ast.BoolOp object at 0x7da1b0396b00> begin[:]
name[lexer].wordchars assign[=] call[name[lexer].wordchars.decode, parameter[constant[iso-8859-1]]]
variable[tokens] assign[=] call[name[list], parameter[name[lexer]]]
for taget[name[token]] in starred[name[tokens]] begin[:]
if compare[constant[=] in name[token]] begin[:]
<ast.Tuple object at 0x7da1b0396200> assign[=] call[name[token].split, parameter[constant[=], constant[1]]]
if call[name[isinstance], parameter[name[v], name[bytes]]] begin[:]
variable[v] assign[=] call[name[v].decode, parameter[constant[utf-8]]]
call[name[props]][call[name[k].lower, parameter[]]] assign[=] name[v]
if compare[constant[version_codename] in name[props]] begin[:]
call[name[props]][constant[codename]] assign[=] call[name[props]][constant[version_codename]]
return[name[props]] | keyword[def] identifier[_parse_os_release_content] ( identifier[lines] ):
literal[string]
identifier[props] ={}
identifier[lexer] = identifier[shlex] . identifier[shlex] ( identifier[lines] , identifier[posix] = keyword[True] )
identifier[lexer] . identifier[whitespace_split] = keyword[True]
keyword[if] identifier[sys] . identifier[version_info] [ literal[int] ]== literal[int] keyword[and] identifier[isinstance] ( identifier[lexer] . identifier[wordchars] , identifier[bytes] ):
identifier[lexer] . identifier[wordchars] = identifier[lexer] . identifier[wordchars] . identifier[decode] ( literal[string] )
identifier[tokens] = identifier[list] ( identifier[lexer] )
keyword[for] identifier[token] keyword[in] identifier[tokens] :
keyword[if] literal[string] keyword[in] identifier[token] :
identifier[k] , identifier[v] = identifier[token] . identifier[split] ( literal[string] , literal[int] )
keyword[if] identifier[isinstance] ( identifier[v] , identifier[bytes] ):
identifier[v] = identifier[v] . identifier[decode] ( literal[string] )
identifier[props] [ identifier[k] . identifier[lower] ()]= identifier[v]
keyword[else] :
keyword[pass]
keyword[if] literal[string] keyword[in] identifier[props] :
identifier[props] [ literal[string] ]= identifier[props] [ literal[string] ]
keyword[elif] literal[string] keyword[in] identifier[props] :
identifier[props] [ literal[string] ]= identifier[props] [ literal[string] ]
keyword[elif] literal[string] keyword[in] identifier[props] :
identifier[codename] = identifier[re] . identifier[search] ( literal[string] , identifier[props] [ literal[string] ])
keyword[if] identifier[codename] :
identifier[codename] = identifier[codename] . identifier[group] ()
identifier[codename] = identifier[codename] . identifier[strip] ( literal[string] )
identifier[codename] = identifier[codename] . identifier[strip] ( literal[string] )
identifier[codename] = identifier[codename] . identifier[strip] ()
identifier[props] [ literal[string] ]= identifier[codename]
keyword[return] identifier[props] | def _parse_os_release_content(lines):
"""
Parse the lines of an os-release file.
Parameters:
* lines: Iterable through the lines in the os-release file.
Each line must be a unicode string or a UTF-8 encoded byte
string.
Returns:
A dictionary containing all information items.
"""
props = {}
lexer = shlex.shlex(lines, posix=True)
lexer.whitespace_split = True
# The shlex module defines its `wordchars` variable using literals,
# making it dependent on the encoding of the Python source file.
# In Python 2.6 and 2.7, the shlex source file is encoded in
# 'iso-8859-1', and the `wordchars` variable is defined as a byte
# string. This causes a UnicodeDecodeError to be raised when the
# parsed content is a unicode object. The following fix resolves that
# (... but it should be fixed in shlex...):
if sys.version_info[0] == 2 and isinstance(lexer.wordchars, bytes):
lexer.wordchars = lexer.wordchars.decode('iso-8859-1') # depends on [control=['if'], data=[]]
tokens = list(lexer)
for token in tokens:
# At this point, all shell-like parsing has been done (i.e.
# comments processed, quotes and backslash escape sequences
# processed, multi-line values assembled, trailing newlines
# stripped, etc.), so the tokens are now either:
# * variable assignments: var=value
# * commands or their arguments (not allowed in os-release)
if '=' in token:
(k, v) = token.split('=', 1)
if isinstance(v, bytes):
v = v.decode('utf-8') # depends on [control=['if'], data=[]]
props[k.lower()] = v # depends on [control=['if'], data=['token']]
else:
# Ignore any tokens that are not variable assignments
pass # depends on [control=['for'], data=['token']]
if 'version_codename' in props:
# os-release added a version_codename field. Use that in
# preference to anything else Note that some distros purposefully
# do not have code names. They should be setting
# version_codename=""
props['codename'] = props['version_codename'] # depends on [control=['if'], data=['props']]
elif 'ubuntu_codename' in props:
# Same as above but a non-standard field name used on older Ubuntus
props['codename'] = props['ubuntu_codename'] # depends on [control=['if'], data=['props']]
elif 'version' in props:
# If there is no version_codename, parse it from the version
codename = re.search('(\\(\\D+\\))|,(\\s+)?\\D+', props['version'])
if codename:
codename = codename.group()
codename = codename.strip('()')
codename = codename.strip(',')
codename = codename.strip()
# codename appears within paranthese.
props['codename'] = codename # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['props']]
return props |
def auth(username, password):
'''
Simple LDAP auth
'''
if not HAS_LDAP:
log.error('LDAP authentication requires python-ldap module')
return False
bind = None
# If bind credentials are configured, verify that we receive a valid bind
if _config('binddn', mandatory=False) and _config('bindpw', mandatory=False):
search_bind = _bind_for_search(anonymous=_config('anonymous', mandatory=False))
# If username & password are not None, attempt to verify they are valid
if search_bind and username and password:
bind = _bind(username, password,
anonymous=_config('auth_by_group_membership_only', mandatory=False)
and _config('anonymous', mandatory=False))
else:
bind = _bind(username, password,
anonymous=_config('auth_by_group_membership_only', mandatory=False)
and _config('anonymous', mandatory=False))
if bind:
log.debug('LDAP authentication successful')
return bind
log.error('LDAP _bind authentication FAILED')
return False | def function[auth, parameter[username, password]]:
constant[
Simple LDAP auth
]
if <ast.UnaryOp object at 0x7da1b20456c0> begin[:]
call[name[log].error, parameter[constant[LDAP authentication requires python-ldap module]]]
return[constant[False]]
variable[bind] assign[=] constant[None]
if <ast.BoolOp object at 0x7da1b2044910> begin[:]
variable[search_bind] assign[=] call[name[_bind_for_search], parameter[]]
if <ast.BoolOp object at 0x7da1b21636d0> begin[:]
variable[bind] assign[=] call[name[_bind], parameter[name[username], name[password]]]
if name[bind] begin[:]
call[name[log].debug, parameter[constant[LDAP authentication successful]]]
return[name[bind]]
call[name[log].error, parameter[constant[LDAP _bind authentication FAILED]]]
return[constant[False]] | keyword[def] identifier[auth] ( identifier[username] , identifier[password] ):
literal[string]
keyword[if] keyword[not] identifier[HAS_LDAP] :
identifier[log] . identifier[error] ( literal[string] )
keyword[return] keyword[False]
identifier[bind] = keyword[None]
keyword[if] identifier[_config] ( literal[string] , identifier[mandatory] = keyword[False] ) keyword[and] identifier[_config] ( literal[string] , identifier[mandatory] = keyword[False] ):
identifier[search_bind] = identifier[_bind_for_search] ( identifier[anonymous] = identifier[_config] ( literal[string] , identifier[mandatory] = keyword[False] ))
keyword[if] identifier[search_bind] keyword[and] identifier[username] keyword[and] identifier[password] :
identifier[bind] = identifier[_bind] ( identifier[username] , identifier[password] ,
identifier[anonymous] = identifier[_config] ( literal[string] , identifier[mandatory] = keyword[False] )
keyword[and] identifier[_config] ( literal[string] , identifier[mandatory] = keyword[False] ))
keyword[else] :
identifier[bind] = identifier[_bind] ( identifier[username] , identifier[password] ,
identifier[anonymous] = identifier[_config] ( literal[string] , identifier[mandatory] = keyword[False] )
keyword[and] identifier[_config] ( literal[string] , identifier[mandatory] = keyword[False] ))
keyword[if] identifier[bind] :
identifier[log] . identifier[debug] ( literal[string] )
keyword[return] identifier[bind]
identifier[log] . identifier[error] ( literal[string] )
keyword[return] keyword[False] | def auth(username, password):
"""
Simple LDAP auth
"""
if not HAS_LDAP:
log.error('LDAP authentication requires python-ldap module')
return False # depends on [control=['if'], data=[]]
bind = None
# If bind credentials are configured, verify that we receive a valid bind
if _config('binddn', mandatory=False) and _config('bindpw', mandatory=False):
search_bind = _bind_for_search(anonymous=_config('anonymous', mandatory=False))
# If username & password are not None, attempt to verify they are valid
if search_bind and username and password:
bind = _bind(username, password, anonymous=_config('auth_by_group_membership_only', mandatory=False) and _config('anonymous', mandatory=False)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
bind = _bind(username, password, anonymous=_config('auth_by_group_membership_only', mandatory=False) and _config('anonymous', mandatory=False))
if bind:
log.debug('LDAP authentication successful')
return bind # depends on [control=['if'], data=[]]
log.error('LDAP _bind authentication FAILED')
return False |
def footprint(self):
"""Return product footprint."""
product_footprint = self._product_metadata.iter("Product_Footprint")
# I don't know why two "Product_Footprint" items are found.
for element in product_footprint:
global_footprint = None
for global_footprint in element.iter("Global_Footprint"):
coords = global_footprint.findtext("EXT_POS_LIST").split()
return _polygon_from_coords(coords) | def function[footprint, parameter[self]]:
constant[Return product footprint.]
variable[product_footprint] assign[=] call[name[self]._product_metadata.iter, parameter[constant[Product_Footprint]]]
for taget[name[element]] in starred[name[product_footprint]] begin[:]
variable[global_footprint] assign[=] constant[None]
for taget[name[global_footprint]] in starred[call[name[element].iter, parameter[constant[Global_Footprint]]]] begin[:]
variable[coords] assign[=] call[call[name[global_footprint].findtext, parameter[constant[EXT_POS_LIST]]].split, parameter[]]
return[call[name[_polygon_from_coords], parameter[name[coords]]]] | keyword[def] identifier[footprint] ( identifier[self] ):
literal[string]
identifier[product_footprint] = identifier[self] . identifier[_product_metadata] . identifier[iter] ( literal[string] )
keyword[for] identifier[element] keyword[in] identifier[product_footprint] :
identifier[global_footprint] = keyword[None]
keyword[for] identifier[global_footprint] keyword[in] identifier[element] . identifier[iter] ( literal[string] ):
identifier[coords] = identifier[global_footprint] . identifier[findtext] ( literal[string] ). identifier[split] ()
keyword[return] identifier[_polygon_from_coords] ( identifier[coords] ) | def footprint(self):
"""Return product footprint."""
product_footprint = self._product_metadata.iter('Product_Footprint')
# I don't know why two "Product_Footprint" items are found.
for element in product_footprint:
global_footprint = None
for global_footprint in element.iter('Global_Footprint'):
coords = global_footprint.findtext('EXT_POS_LIST').split()
return _polygon_from_coords(coords) # depends on [control=['for'], data=['global_footprint']] # depends on [control=['for'], data=['element']] |
def copystat(src, dst):
"""Copy all stat info (mode bits, atime, mtime, flags) from src to dst"""
st = os.stat(src)
mode = stat.S_IMODE(st.st_mode)
if hasattr(os, 'utime'):
os.utime(dst, (st.st_atime, st.st_mtime))
if hasattr(os, 'chmod'):
os.chmod(dst, mode)
if hasattr(os, 'chflags') and hasattr(st, 'st_flags'):
try:
os.chflags(dst, st.st_flags)
except OSError as why:
if (not hasattr(errno, 'EOPNOTSUPP') or
why.errno != errno.EOPNOTSUPP):
raise | def function[copystat, parameter[src, dst]]:
constant[Copy all stat info (mode bits, atime, mtime, flags) from src to dst]
variable[st] assign[=] call[name[os].stat, parameter[name[src]]]
variable[mode] assign[=] call[name[stat].S_IMODE, parameter[name[st].st_mode]]
if call[name[hasattr], parameter[name[os], constant[utime]]] begin[:]
call[name[os].utime, parameter[name[dst], tuple[[<ast.Attribute object at 0x7da2054a7400>, <ast.Attribute object at 0x7da2054a54e0>]]]]
if call[name[hasattr], parameter[name[os], constant[chmod]]] begin[:]
call[name[os].chmod, parameter[name[dst], name[mode]]]
if <ast.BoolOp object at 0x7da2054a6920> begin[:]
<ast.Try object at 0x7da2054a72b0> | keyword[def] identifier[copystat] ( identifier[src] , identifier[dst] ):
literal[string]
identifier[st] = identifier[os] . identifier[stat] ( identifier[src] )
identifier[mode] = identifier[stat] . identifier[S_IMODE] ( identifier[st] . identifier[st_mode] )
keyword[if] identifier[hasattr] ( identifier[os] , literal[string] ):
identifier[os] . identifier[utime] ( identifier[dst] ,( identifier[st] . identifier[st_atime] , identifier[st] . identifier[st_mtime] ))
keyword[if] identifier[hasattr] ( identifier[os] , literal[string] ):
identifier[os] . identifier[chmod] ( identifier[dst] , identifier[mode] )
keyword[if] identifier[hasattr] ( identifier[os] , literal[string] ) keyword[and] identifier[hasattr] ( identifier[st] , literal[string] ):
keyword[try] :
identifier[os] . identifier[chflags] ( identifier[dst] , identifier[st] . identifier[st_flags] )
keyword[except] identifier[OSError] keyword[as] identifier[why] :
keyword[if] ( keyword[not] identifier[hasattr] ( identifier[errno] , literal[string] ) keyword[or]
identifier[why] . identifier[errno] != identifier[errno] . identifier[EOPNOTSUPP] ):
keyword[raise] | def copystat(src, dst):
"""Copy all stat info (mode bits, atime, mtime, flags) from src to dst"""
st = os.stat(src)
mode = stat.S_IMODE(st.st_mode)
if hasattr(os, 'utime'):
os.utime(dst, (st.st_atime, st.st_mtime)) # depends on [control=['if'], data=[]]
if hasattr(os, 'chmod'):
os.chmod(dst, mode) # depends on [control=['if'], data=[]]
if hasattr(os, 'chflags') and hasattr(st, 'st_flags'):
try:
os.chflags(dst, st.st_flags) # depends on [control=['try'], data=[]]
except OSError as why:
if not hasattr(errno, 'EOPNOTSUPP') or why.errno != errno.EOPNOTSUPP:
raise # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['why']] # depends on [control=['if'], data=[]] |
def for_data_and_tracer(cls, lens_data, tracer, padded_tracer=None):
"""Fit lens data with a model tracer, automatically determining the type of fit based on the \
properties of the galaxies in the tracer.
Parameters
-----------
lens_data : lens_data.LensData or lens_data.LensDataHyper
The lens-images that is fitted.
tracer : ray_tracing.TracerNonStack
The tracer, which describes the ray-tracing and strong lens configuration.
padded_tracer : ray_tracing.Tracer or None
A tracer with an identical strong lens configuration to the tracer above, but using the lens data's \
padded grid_stack such that unmasked model-images can be computed.
"""
if tracer.has_light_profile and not tracer.has_pixelization:
return LensProfileFit(lens_data=lens_data, tracer=tracer, padded_tracer=padded_tracer)
elif not tracer.has_light_profile and tracer.has_pixelization:
return LensInversionFit(lens_data=lens_data, tracer=tracer, padded_tracer=None)
elif tracer.has_light_profile and tracer.has_pixelization:
return LensProfileInversionFit(lens_data=lens_data, tracer=tracer, padded_tracer=None)
else:
raise exc.FittingException('The fit routine did not call a Fit class - check the '
'properties of the tracer') | def function[for_data_and_tracer, parameter[cls, lens_data, tracer, padded_tracer]]:
constant[Fit lens data with a model tracer, automatically determining the type of fit based on the properties of the galaxies in the tracer.
Parameters
-----------
lens_data : lens_data.LensData or lens_data.LensDataHyper
The lens-images that is fitted.
tracer : ray_tracing.TracerNonStack
The tracer, which describes the ray-tracing and strong lens configuration.
padded_tracer : ray_tracing.Tracer or None
A tracer with an identical strong lens configuration to the tracer above, but using the lens data's padded grid_stack such that unmasked model-images can be computed.
]
if <ast.BoolOp object at 0x7da204622410> begin[:]
return[call[name[LensProfileFit], parameter[]]] | keyword[def] identifier[for_data_and_tracer] ( identifier[cls] , identifier[lens_data] , identifier[tracer] , identifier[padded_tracer] = keyword[None] ):
literal[string]
keyword[if] identifier[tracer] . identifier[has_light_profile] keyword[and] keyword[not] identifier[tracer] . identifier[has_pixelization] :
keyword[return] identifier[LensProfileFit] ( identifier[lens_data] = identifier[lens_data] , identifier[tracer] = identifier[tracer] , identifier[padded_tracer] = identifier[padded_tracer] )
keyword[elif] keyword[not] identifier[tracer] . identifier[has_light_profile] keyword[and] identifier[tracer] . identifier[has_pixelization] :
keyword[return] identifier[LensInversionFit] ( identifier[lens_data] = identifier[lens_data] , identifier[tracer] = identifier[tracer] , identifier[padded_tracer] = keyword[None] )
keyword[elif] identifier[tracer] . identifier[has_light_profile] keyword[and] identifier[tracer] . identifier[has_pixelization] :
keyword[return] identifier[LensProfileInversionFit] ( identifier[lens_data] = identifier[lens_data] , identifier[tracer] = identifier[tracer] , identifier[padded_tracer] = keyword[None] )
keyword[else] :
keyword[raise] identifier[exc] . identifier[FittingException] ( literal[string]
literal[string] ) | def for_data_and_tracer(cls, lens_data, tracer, padded_tracer=None):
"""Fit lens data with a model tracer, automatically determining the type of fit based on the properties of the galaxies in the tracer.
Parameters
-----------
lens_data : lens_data.LensData or lens_data.LensDataHyper
The lens-images that is fitted.
tracer : ray_tracing.TracerNonStack
The tracer, which describes the ray-tracing and strong lens configuration.
padded_tracer : ray_tracing.Tracer or None
A tracer with an identical strong lens configuration to the tracer above, but using the lens data's padded grid_stack such that unmasked model-images can be computed.
"""
if tracer.has_light_profile and (not tracer.has_pixelization):
return LensProfileFit(lens_data=lens_data, tracer=tracer, padded_tracer=padded_tracer) # depends on [control=['if'], data=[]]
elif not tracer.has_light_profile and tracer.has_pixelization:
return LensInversionFit(lens_data=lens_data, tracer=tracer, padded_tracer=None) # depends on [control=['if'], data=[]]
elif tracer.has_light_profile and tracer.has_pixelization:
return LensProfileInversionFit(lens_data=lens_data, tracer=tracer, padded_tracer=None) # depends on [control=['if'], data=[]]
else:
raise exc.FittingException('The fit routine did not call a Fit class - check the properties of the tracer') |
def convert(self, txn):
"""
Convert an OFX Transaction to a posting
"""
ofxid = self.mk_ofxid(txn.id)
metadata = {}
posting_metadata = {"ofxid": ofxid}
if isinstance(txn, OfxTransaction):
posting = Posting(self.name,
Amount(txn.amount, self.currency),
metadata=posting_metadata)
return Transaction(
date=txn.date,
payee=self.format_payee(txn),
postings=[
posting,
posting.clone_inverted(
self.mk_dynamic_account(self.format_payee(txn),
exclude=self.name))])
elif isinstance(txn, InvestmentTransaction):
acct1 = self.name
acct2 = self.name
posting1 = None
posting2 = None
security = self.maybe_get_ticker(txn.security)
if isinstance(txn.type, str):
# recent versions of ofxparse
if re.match('^(buy|sell)', txn.type):
acct2 = self.unknownaccount or 'Assets:Unknown'
elif txn.type == 'transfer':
acct2 = 'Transfer'
elif txn.type == 'reinvest':
# reinvestment of income
# TODO: make this configurable
acct2 = 'Income:Interest'
elif txn.type == 'income' and txn.income_type == 'DIV':
# Fidelity lists non-reinvested dividend income as
# type: income, income_type: DIV
# TODO: determine how dividend income is listed from other institutions
# income/DIV transactions do not involve buying or selling a security
# so their postings need special handling compared to
# others
metadata['dividend_from'] = security
acct2 = 'Income:Dividends'
posting1 = Posting(acct1,
Amount(txn.total, self.currency),
metadata=posting_metadata)
posting2 = posting1.clone_inverted(acct2)
else:
# ???
pass
else:
# Old version of ofxparse
if (txn.type in [0, 1, 3, 4]):
# buymf, sellmf, buystock, sellstock
acct2 = self.unknownaccount or 'Assets:Unknown'
elif (txn.type == 2):
# reinvest
acct2 = 'Income:Interest'
else:
# ???
pass
aux_date = None
if txn.settleDate is not None and \
txn.settleDate != txn.tradeDate:
aux_date = txn.settleDate
# income/DIV already defined above;
# this block defines all other posting types
if posting1 is None and posting2 is None:
posting1 = Posting(
acct1,
Amount(
txn.units,
security,
unlimited=True),
unit_price=Amount(
txn.unit_price,
self.currency,
unlimited=True),
metadata=posting_metadata)
posting2 = Posting(
acct2,
Amount(
txn.units *
txn.unit_price,
self.currency,
reverse=True))
else:
# Previously defined if type:income income_type/DIV
pass
return Transaction(
date=txn.tradeDate,
aux_date=aux_date,
payee=self.format_payee(txn),
metadata=metadata,
postings=[posting1, posting2]
) | def function[convert, parameter[self, txn]]:
constant[
Convert an OFX Transaction to a posting
]
variable[ofxid] assign[=] call[name[self].mk_ofxid, parameter[name[txn].id]]
variable[metadata] assign[=] dictionary[[], []]
variable[posting_metadata] assign[=] dictionary[[<ast.Constant object at 0x7da20c9924d0>], [<ast.Name object at 0x7da20c990790>]]
if call[name[isinstance], parameter[name[txn], name[OfxTransaction]]] begin[:]
variable[posting] assign[=] call[name[Posting], parameter[name[self].name, call[name[Amount], parameter[name[txn].amount, name[self].currency]]]]
return[call[name[Transaction], parameter[]]] | keyword[def] identifier[convert] ( identifier[self] , identifier[txn] ):
literal[string]
identifier[ofxid] = identifier[self] . identifier[mk_ofxid] ( identifier[txn] . identifier[id] )
identifier[metadata] ={}
identifier[posting_metadata] ={ literal[string] : identifier[ofxid] }
keyword[if] identifier[isinstance] ( identifier[txn] , identifier[OfxTransaction] ):
identifier[posting] = identifier[Posting] ( identifier[self] . identifier[name] ,
identifier[Amount] ( identifier[txn] . identifier[amount] , identifier[self] . identifier[currency] ),
identifier[metadata] = identifier[posting_metadata] )
keyword[return] identifier[Transaction] (
identifier[date] = identifier[txn] . identifier[date] ,
identifier[payee] = identifier[self] . identifier[format_payee] ( identifier[txn] ),
identifier[postings] =[
identifier[posting] ,
identifier[posting] . identifier[clone_inverted] (
identifier[self] . identifier[mk_dynamic_account] ( identifier[self] . identifier[format_payee] ( identifier[txn] ),
identifier[exclude] = identifier[self] . identifier[name] ))])
keyword[elif] identifier[isinstance] ( identifier[txn] , identifier[InvestmentTransaction] ):
identifier[acct1] = identifier[self] . identifier[name]
identifier[acct2] = identifier[self] . identifier[name]
identifier[posting1] = keyword[None]
identifier[posting2] = keyword[None]
identifier[security] = identifier[self] . identifier[maybe_get_ticker] ( identifier[txn] . identifier[security] )
keyword[if] identifier[isinstance] ( identifier[txn] . identifier[type] , identifier[str] ):
keyword[if] identifier[re] . identifier[match] ( literal[string] , identifier[txn] . identifier[type] ):
identifier[acct2] = identifier[self] . identifier[unknownaccount] keyword[or] literal[string]
keyword[elif] identifier[txn] . identifier[type] == literal[string] :
identifier[acct2] = literal[string]
keyword[elif] identifier[txn] . identifier[type] == literal[string] :
identifier[acct2] = literal[string]
keyword[elif] identifier[txn] . identifier[type] == literal[string] keyword[and] identifier[txn] . identifier[income_type] == literal[string] :
identifier[metadata] [ literal[string] ]= identifier[security]
identifier[acct2] = literal[string]
identifier[posting1] = identifier[Posting] ( identifier[acct1] ,
identifier[Amount] ( identifier[txn] . identifier[total] , identifier[self] . identifier[currency] ),
identifier[metadata] = identifier[posting_metadata] )
identifier[posting2] = identifier[posting1] . identifier[clone_inverted] ( identifier[acct2] )
keyword[else] :
keyword[pass]
keyword[else] :
keyword[if] ( identifier[txn] . identifier[type] keyword[in] [ literal[int] , literal[int] , literal[int] , literal[int] ]):
identifier[acct2] = identifier[self] . identifier[unknownaccount] keyword[or] literal[string]
keyword[elif] ( identifier[txn] . identifier[type] == literal[int] ):
identifier[acct2] = literal[string]
keyword[else] :
keyword[pass]
identifier[aux_date] = keyword[None]
keyword[if] identifier[txn] . identifier[settleDate] keyword[is] keyword[not] keyword[None] keyword[and] identifier[txn] . identifier[settleDate] != identifier[txn] . identifier[tradeDate] :
identifier[aux_date] = identifier[txn] . identifier[settleDate]
keyword[if] identifier[posting1] keyword[is] keyword[None] keyword[and] identifier[posting2] keyword[is] keyword[None] :
identifier[posting1] = identifier[Posting] (
identifier[acct1] ,
identifier[Amount] (
identifier[txn] . identifier[units] ,
identifier[security] ,
identifier[unlimited] = keyword[True] ),
identifier[unit_price] = identifier[Amount] (
identifier[txn] . identifier[unit_price] ,
identifier[self] . identifier[currency] ,
identifier[unlimited] = keyword[True] ),
identifier[metadata] = identifier[posting_metadata] )
identifier[posting2] = identifier[Posting] (
identifier[acct2] ,
identifier[Amount] (
identifier[txn] . identifier[units] *
identifier[txn] . identifier[unit_price] ,
identifier[self] . identifier[currency] ,
identifier[reverse] = keyword[True] ))
keyword[else] :
keyword[pass]
keyword[return] identifier[Transaction] (
identifier[date] = identifier[txn] . identifier[tradeDate] ,
identifier[aux_date] = identifier[aux_date] ,
identifier[payee] = identifier[self] . identifier[format_payee] ( identifier[txn] ),
identifier[metadata] = identifier[metadata] ,
identifier[postings] =[ identifier[posting1] , identifier[posting2] ]
) | def convert(self, txn):
"""
Convert an OFX Transaction to a posting
"""
ofxid = self.mk_ofxid(txn.id)
metadata = {}
posting_metadata = {'ofxid': ofxid}
if isinstance(txn, OfxTransaction):
posting = Posting(self.name, Amount(txn.amount, self.currency), metadata=posting_metadata)
return Transaction(date=txn.date, payee=self.format_payee(txn), postings=[posting, posting.clone_inverted(self.mk_dynamic_account(self.format_payee(txn), exclude=self.name))]) # depends on [control=['if'], data=[]]
elif isinstance(txn, InvestmentTransaction):
acct1 = self.name
acct2 = self.name
posting1 = None
posting2 = None
security = self.maybe_get_ticker(txn.security)
if isinstance(txn.type, str):
# recent versions of ofxparse
if re.match('^(buy|sell)', txn.type):
acct2 = self.unknownaccount or 'Assets:Unknown' # depends on [control=['if'], data=[]]
elif txn.type == 'transfer':
acct2 = 'Transfer' # depends on [control=['if'], data=[]]
elif txn.type == 'reinvest':
# reinvestment of income
# TODO: make this configurable
acct2 = 'Income:Interest' # depends on [control=['if'], data=[]]
elif txn.type == 'income' and txn.income_type == 'DIV':
# Fidelity lists non-reinvested dividend income as
# type: income, income_type: DIV
# TODO: determine how dividend income is listed from other institutions
# income/DIV transactions do not involve buying or selling a security
# so their postings need special handling compared to
# others
metadata['dividend_from'] = security
acct2 = 'Income:Dividends'
posting1 = Posting(acct1, Amount(txn.total, self.currency), metadata=posting_metadata)
posting2 = posting1.clone_inverted(acct2) # depends on [control=['if'], data=[]]
else:
# ???
pass # depends on [control=['if'], data=[]]
# Old version of ofxparse
elif txn.type in [0, 1, 3, 4]:
# buymf, sellmf, buystock, sellstock
acct2 = self.unknownaccount or 'Assets:Unknown' # depends on [control=['if'], data=[]]
elif txn.type == 2:
# reinvest
acct2 = 'Income:Interest' # depends on [control=['if'], data=[]]
else:
# ???
pass
aux_date = None
if txn.settleDate is not None and txn.settleDate != txn.tradeDate:
aux_date = txn.settleDate # depends on [control=['if'], data=[]]
# income/DIV already defined above;
# this block defines all other posting types
if posting1 is None and posting2 is None:
posting1 = Posting(acct1, Amount(txn.units, security, unlimited=True), unit_price=Amount(txn.unit_price, self.currency, unlimited=True), metadata=posting_metadata)
posting2 = Posting(acct2, Amount(txn.units * txn.unit_price, self.currency, reverse=True)) # depends on [control=['if'], data=[]]
else:
# Previously defined if type:income income_type/DIV
pass
return Transaction(date=txn.tradeDate, aux_date=aux_date, payee=self.format_payee(txn), metadata=metadata, postings=[posting1, posting2]) # depends on [control=['if'], data=[]] |
def update_channels(self):
"""Update the GUI to reflect channels and image listing.
"""
if not self.gui_up:
return
self.logger.debug("channel configuration has changed--updating gui")
try:
channel = self.fv.get_channel(self.chname)
except KeyError:
channel = self.fv.get_channel_info()
if channel is None:
raise ValueError('No channel available')
self.chname = channel.name
w = self.w.channel_name
w.clear()
self.chnames = list(self.fv.get_channel_names())
#self.chnames.sort()
for chname in self.chnames:
w.append_text(chname)
# select the channel that is the current one
try:
i = self.chnames.index(channel.name)
except IndexError:
i = 0
self.w.channel_name.set_index(i)
# update the image listing
self.redo() | def function[update_channels, parameter[self]]:
constant[Update the GUI to reflect channels and image listing.
]
if <ast.UnaryOp object at 0x7da1b0d1b160> begin[:]
return[None]
call[name[self].logger.debug, parameter[constant[channel configuration has changed--updating gui]]]
<ast.Try object at 0x7da1b0d18a90>
if compare[name[channel] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b0dc1510>
name[self].chname assign[=] name[channel].name
variable[w] assign[=] name[self].w.channel_name
call[name[w].clear, parameter[]]
name[self].chnames assign[=] call[name[list], parameter[call[name[self].fv.get_channel_names, parameter[]]]]
for taget[name[chname]] in starred[name[self].chnames] begin[:]
call[name[w].append_text, parameter[name[chname]]]
<ast.Try object at 0x7da207f99a20>
call[name[self].w.channel_name.set_index, parameter[name[i]]]
call[name[self].redo, parameter[]] | keyword[def] identifier[update_channels] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[gui_up] :
keyword[return]
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] )
keyword[try] :
identifier[channel] = identifier[self] . identifier[fv] . identifier[get_channel] ( identifier[self] . identifier[chname] )
keyword[except] identifier[KeyError] :
identifier[channel] = identifier[self] . identifier[fv] . identifier[get_channel_info] ()
keyword[if] identifier[channel] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[self] . identifier[chname] = identifier[channel] . identifier[name]
identifier[w] = identifier[self] . identifier[w] . identifier[channel_name]
identifier[w] . identifier[clear] ()
identifier[self] . identifier[chnames] = identifier[list] ( identifier[self] . identifier[fv] . identifier[get_channel_names] ())
keyword[for] identifier[chname] keyword[in] identifier[self] . identifier[chnames] :
identifier[w] . identifier[append_text] ( identifier[chname] )
keyword[try] :
identifier[i] = identifier[self] . identifier[chnames] . identifier[index] ( identifier[channel] . identifier[name] )
keyword[except] identifier[IndexError] :
identifier[i] = literal[int]
identifier[self] . identifier[w] . identifier[channel_name] . identifier[set_index] ( identifier[i] )
identifier[self] . identifier[redo] () | def update_channels(self):
"""Update the GUI to reflect channels and image listing.
"""
if not self.gui_up:
return # depends on [control=['if'], data=[]]
self.logger.debug('channel configuration has changed--updating gui')
try:
channel = self.fv.get_channel(self.chname) # depends on [control=['try'], data=[]]
except KeyError:
channel = self.fv.get_channel_info() # depends on [control=['except'], data=[]]
if channel is None:
raise ValueError('No channel available') # depends on [control=['if'], data=[]]
self.chname = channel.name
w = self.w.channel_name
w.clear()
self.chnames = list(self.fv.get_channel_names())
#self.chnames.sort()
for chname in self.chnames:
w.append_text(chname) # depends on [control=['for'], data=['chname']]
# select the channel that is the current one
try:
i = self.chnames.index(channel.name) # depends on [control=['try'], data=[]]
except IndexError:
i = 0 # depends on [control=['except'], data=[]]
self.w.channel_name.set_index(i)
# update the image listing
self.redo() |
def AddArguments(cls, argument_group):
"""Adds command line arguments the helper supports to an argument group.
This function takes an argument parser or an argument group object and adds
to it all the command line arguments this helper supports.
Args:
argument_group (argparse._ArgumentGroup|argparse.ArgumentParser):
argparse group.
"""
default_fields = ','.join(cls._DEFAULT_FIELDS)
argument_group.add_argument(
'--fields', dest='fields', type=str, action='store',
default=default_fields, help=(
'Defines which fields should be included in the output.'))
default_fields = ', '.join(cls._DEFAULT_FIELDS)
argument_group.add_argument(
'--additional_fields', dest='additional_fields', type=str,
action='store', default='', help=(
'Defines extra fields to be included in the output, in addition to'
' the default fields, which are {0:s}.'.format(default_fields))) | def function[AddArguments, parameter[cls, argument_group]]:
constant[Adds command line arguments the helper supports to an argument group.
This function takes an argument parser or an argument group object and adds
to it all the command line arguments this helper supports.
Args:
argument_group (argparse._ArgumentGroup|argparse.ArgumentParser):
argparse group.
]
variable[default_fields] assign[=] call[constant[,].join, parameter[name[cls]._DEFAULT_FIELDS]]
call[name[argument_group].add_argument, parameter[constant[--fields]]]
variable[default_fields] assign[=] call[constant[, ].join, parameter[name[cls]._DEFAULT_FIELDS]]
call[name[argument_group].add_argument, parameter[constant[--additional_fields]]] | keyword[def] identifier[AddArguments] ( identifier[cls] , identifier[argument_group] ):
literal[string]
identifier[default_fields] = literal[string] . identifier[join] ( identifier[cls] . identifier[_DEFAULT_FIELDS] )
identifier[argument_group] . identifier[add_argument] (
literal[string] , identifier[dest] = literal[string] , identifier[type] = identifier[str] , identifier[action] = literal[string] ,
identifier[default] = identifier[default_fields] , identifier[help] =(
literal[string] ))
identifier[default_fields] = literal[string] . identifier[join] ( identifier[cls] . identifier[_DEFAULT_FIELDS] )
identifier[argument_group] . identifier[add_argument] (
literal[string] , identifier[dest] = literal[string] , identifier[type] = identifier[str] ,
identifier[action] = literal[string] , identifier[default] = literal[string] , identifier[help] =(
literal[string]
literal[string] . identifier[format] ( identifier[default_fields] ))) | def AddArguments(cls, argument_group):
"""Adds command line arguments the helper supports to an argument group.
This function takes an argument parser or an argument group object and adds
to it all the command line arguments this helper supports.
Args:
argument_group (argparse._ArgumentGroup|argparse.ArgumentParser):
argparse group.
"""
default_fields = ','.join(cls._DEFAULT_FIELDS)
argument_group.add_argument('--fields', dest='fields', type=str, action='store', default=default_fields, help='Defines which fields should be included in the output.')
default_fields = ', '.join(cls._DEFAULT_FIELDS)
argument_group.add_argument('--additional_fields', dest='additional_fields', type=str, action='store', default='', help='Defines extra fields to be included in the output, in addition to the default fields, which are {0:s}.'.format(default_fields)) |
def otherwise(self, value):
"""
Evaluates a list of conditions and returns one of multiple possible result expressions.
If :func:`Column.otherwise` is not invoked, None is returned for unmatched conditions.
See :func:`pyspark.sql.functions.when` for example usage.
:param value: a literal value, or a :class:`Column` expression.
>>> from pyspark.sql import functions as F
>>> df.select(df.name, F.when(df.age > 3, 1).otherwise(0)).show()
+-----+-------------------------------------+
| name|CASE WHEN (age > 3) THEN 1 ELSE 0 END|
+-----+-------------------------------------+
|Alice| 0|
| Bob| 1|
+-----+-------------------------------------+
"""
v = value._jc if isinstance(value, Column) else value
jc = self._jc.otherwise(v)
return Column(jc) | def function[otherwise, parameter[self, value]]:
constant[
Evaluates a list of conditions and returns one of multiple possible result expressions.
If :func:`Column.otherwise` is not invoked, None is returned for unmatched conditions.
See :func:`pyspark.sql.functions.when` for example usage.
:param value: a literal value, or a :class:`Column` expression.
>>> from pyspark.sql import functions as F
>>> df.select(df.name, F.when(df.age > 3, 1).otherwise(0)).show()
+-----+-------------------------------------+
| name|CASE WHEN (age > 3) THEN 1 ELSE 0 END|
+-----+-------------------------------------+
|Alice| 0|
| Bob| 1|
+-----+-------------------------------------+
]
variable[v] assign[=] <ast.IfExp object at 0x7da20c9912a0>
variable[jc] assign[=] call[name[self]._jc.otherwise, parameter[name[v]]]
return[call[name[Column], parameter[name[jc]]]] | keyword[def] identifier[otherwise] ( identifier[self] , identifier[value] ):
literal[string]
identifier[v] = identifier[value] . identifier[_jc] keyword[if] identifier[isinstance] ( identifier[value] , identifier[Column] ) keyword[else] identifier[value]
identifier[jc] = identifier[self] . identifier[_jc] . identifier[otherwise] ( identifier[v] )
keyword[return] identifier[Column] ( identifier[jc] ) | def otherwise(self, value):
"""
Evaluates a list of conditions and returns one of multiple possible result expressions.
If :func:`Column.otherwise` is not invoked, None is returned for unmatched conditions.
See :func:`pyspark.sql.functions.when` for example usage.
:param value: a literal value, or a :class:`Column` expression.
>>> from pyspark.sql import functions as F
>>> df.select(df.name, F.when(df.age > 3, 1).otherwise(0)).show()
+-----+-------------------------------------+
| name|CASE WHEN (age > 3) THEN 1 ELSE 0 END|
+-----+-------------------------------------+
|Alice| 0|
| Bob| 1|
+-----+-------------------------------------+
"""
v = value._jc if isinstance(value, Column) else value
jc = self._jc.otherwise(v)
return Column(jc) |
def add_resize_bilinear(self, name, input_name, output_name, target_height=1, target_width=1,
mode='ALIGN_ENDPOINTS_MODE'):
"""
Add resize bilinear layer to the model. A layer that resizes the input to a given spatial size using bilinear interpolation.
Parameters
----------
name: str
The name of this layer.
input_name: str
The input blob name of this layer.
output_name: str
The output blob name of this layer.
target_height: int
Output height dimension.
target_width: int
Output width dimension.
mode: str
Following values are supported: 'STRICT_ALIGN_ENDPOINTS_MODE', 'ALIGN_ENDPOINTS_MODE', 'UPSAMPLE_MODE', 'ROI_ALIGN_MODE'.
This parameter determines the sampling grid used for bilinear interpolation. Kindly refer to NeuralNetwork.proto for details.
See Also
--------
add_upsample
"""
spec = self.spec
nn_spec = self.nn_spec
# Add a new inner-product layer
spec_layer = nn_spec.layers.add()
spec_layer.name = name
spec_layer.input.append(input_name)
spec_layer.output.append(output_name)
spec_layer_params = spec_layer.resizeBilinear
spec_layer_params.targetSize.append(target_height)
spec_layer_params.targetSize.append(target_width)
if mode == 'ALIGN_ENDPOINTS_MODE':
spec_layer_params.mode.samplingMethod = _NeuralNetwork_pb2.SamplingMode.Method.Value('ALIGN_ENDPOINTS_MODE')
elif mode == 'STRICT_ALIGN_ENDPOINTS_MODE':
spec_layer_params.mode.samplingMethod = _NeuralNetwork_pb2.SamplingMode.Method.Value('STRICT_ALIGN_ENDPOINTS_MODE')
elif mode == 'UPSAMPLE_MODE':
spec_layer_params.mode.samplingMethod = _NeuralNetwork_pb2.SamplingMode.Method.Value('UPSAMPLE_MODE')
elif mode == 'ROI_ALIGN_MODE':
spec_layer_params.mode.samplingMethod = _NeuralNetwork_pb2.SamplingMode.Method.Value('ROI_ALIGN_MODE')
else:
raise ValueError("Unspported resize bilinear mode %s" % mode) | def function[add_resize_bilinear, parameter[self, name, input_name, output_name, target_height, target_width, mode]]:
constant[
Add resize bilinear layer to the model. A layer that resizes the input to a given spatial size using bilinear interpolation.
Parameters
----------
name: str
The name of this layer.
input_name: str
The input blob name of this layer.
output_name: str
The output blob name of this layer.
target_height: int
Output height dimension.
target_width: int
Output width dimension.
mode: str
Following values are supported: 'STRICT_ALIGN_ENDPOINTS_MODE', 'ALIGN_ENDPOINTS_MODE', 'UPSAMPLE_MODE', 'ROI_ALIGN_MODE'.
This parameter determines the sampling grid used for bilinear interpolation. Kindly refer to NeuralNetwork.proto for details.
See Also
--------
add_upsample
]
variable[spec] assign[=] name[self].spec
variable[nn_spec] assign[=] name[self].nn_spec
variable[spec_layer] assign[=] call[name[nn_spec].layers.add, parameter[]]
name[spec_layer].name assign[=] name[name]
call[name[spec_layer].input.append, parameter[name[input_name]]]
call[name[spec_layer].output.append, parameter[name[output_name]]]
variable[spec_layer_params] assign[=] name[spec_layer].resizeBilinear
call[name[spec_layer_params].targetSize.append, parameter[name[target_height]]]
call[name[spec_layer_params].targetSize.append, parameter[name[target_width]]]
if compare[name[mode] equal[==] constant[ALIGN_ENDPOINTS_MODE]] begin[:]
name[spec_layer_params].mode.samplingMethod assign[=] call[name[_NeuralNetwork_pb2].SamplingMode.Method.Value, parameter[constant[ALIGN_ENDPOINTS_MODE]]] | keyword[def] identifier[add_resize_bilinear] ( identifier[self] , identifier[name] , identifier[input_name] , identifier[output_name] , identifier[target_height] = literal[int] , identifier[target_width] = literal[int] ,
identifier[mode] = literal[string] ):
literal[string]
identifier[spec] = identifier[self] . identifier[spec]
identifier[nn_spec] = identifier[self] . identifier[nn_spec]
identifier[spec_layer] = identifier[nn_spec] . identifier[layers] . identifier[add] ()
identifier[spec_layer] . identifier[name] = identifier[name]
identifier[spec_layer] . identifier[input] . identifier[append] ( identifier[input_name] )
identifier[spec_layer] . identifier[output] . identifier[append] ( identifier[output_name] )
identifier[spec_layer_params] = identifier[spec_layer] . identifier[resizeBilinear]
identifier[spec_layer_params] . identifier[targetSize] . identifier[append] ( identifier[target_height] )
identifier[spec_layer_params] . identifier[targetSize] . identifier[append] ( identifier[target_width] )
keyword[if] identifier[mode] == literal[string] :
identifier[spec_layer_params] . identifier[mode] . identifier[samplingMethod] = identifier[_NeuralNetwork_pb2] . identifier[SamplingMode] . identifier[Method] . identifier[Value] ( literal[string] )
keyword[elif] identifier[mode] == literal[string] :
identifier[spec_layer_params] . identifier[mode] . identifier[samplingMethod] = identifier[_NeuralNetwork_pb2] . identifier[SamplingMode] . identifier[Method] . identifier[Value] ( literal[string] )
keyword[elif] identifier[mode] == literal[string] :
identifier[spec_layer_params] . identifier[mode] . identifier[samplingMethod] = identifier[_NeuralNetwork_pb2] . identifier[SamplingMode] . identifier[Method] . identifier[Value] ( literal[string] )
keyword[elif] identifier[mode] == literal[string] :
identifier[spec_layer_params] . identifier[mode] . identifier[samplingMethod] = identifier[_NeuralNetwork_pb2] . identifier[SamplingMode] . identifier[Method] . identifier[Value] ( literal[string] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[mode] ) | def add_resize_bilinear(self, name, input_name, output_name, target_height=1, target_width=1, mode='ALIGN_ENDPOINTS_MODE'):
"""
Add resize bilinear layer to the model. A layer that resizes the input to a given spatial size using bilinear interpolation.
Parameters
----------
name: str
The name of this layer.
input_name: str
The input blob name of this layer.
output_name: str
The output blob name of this layer.
target_height: int
Output height dimension.
target_width: int
Output width dimension.
mode: str
Following values are supported: 'STRICT_ALIGN_ENDPOINTS_MODE', 'ALIGN_ENDPOINTS_MODE', 'UPSAMPLE_MODE', 'ROI_ALIGN_MODE'.
This parameter determines the sampling grid used for bilinear interpolation. Kindly refer to NeuralNetwork.proto for details.
See Also
--------
add_upsample
"""
spec = self.spec
nn_spec = self.nn_spec
# Add a new inner-product layer
spec_layer = nn_spec.layers.add()
spec_layer.name = name
spec_layer.input.append(input_name)
spec_layer.output.append(output_name)
spec_layer_params = spec_layer.resizeBilinear
spec_layer_params.targetSize.append(target_height)
spec_layer_params.targetSize.append(target_width)
if mode == 'ALIGN_ENDPOINTS_MODE':
spec_layer_params.mode.samplingMethod = _NeuralNetwork_pb2.SamplingMode.Method.Value('ALIGN_ENDPOINTS_MODE') # depends on [control=['if'], data=[]]
elif mode == 'STRICT_ALIGN_ENDPOINTS_MODE':
spec_layer_params.mode.samplingMethod = _NeuralNetwork_pb2.SamplingMode.Method.Value('STRICT_ALIGN_ENDPOINTS_MODE') # depends on [control=['if'], data=[]]
elif mode == 'UPSAMPLE_MODE':
spec_layer_params.mode.samplingMethod = _NeuralNetwork_pb2.SamplingMode.Method.Value('UPSAMPLE_MODE') # depends on [control=['if'], data=[]]
elif mode == 'ROI_ALIGN_MODE':
spec_layer_params.mode.samplingMethod = _NeuralNetwork_pb2.SamplingMode.Method.Value('ROI_ALIGN_MODE') # depends on [control=['if'], data=[]]
else:
raise ValueError('Unspported resize bilinear mode %s' % mode) |
def set_password(self, raw_password):
"""Calls :py:func:`~xmpp_backends.base.XmppBackendBase.set_password` for the user.
If password is ``None``, calls :py:func:`~xmpp_backends.base.XmppBackendBase.set_unusable_password`.
"""
if raw_password is None:
self.set_unusable_password()
else:
xmpp_backend.set_password(self.node, self.domain, raw_password) | def function[set_password, parameter[self, raw_password]]:
constant[Calls :py:func:`~xmpp_backends.base.XmppBackendBase.set_password` for the user.
If password is ``None``, calls :py:func:`~xmpp_backends.base.XmppBackendBase.set_unusable_password`.
]
if compare[name[raw_password] is constant[None]] begin[:]
call[name[self].set_unusable_password, parameter[]] | keyword[def] identifier[set_password] ( identifier[self] , identifier[raw_password] ):
literal[string]
keyword[if] identifier[raw_password] keyword[is] keyword[None] :
identifier[self] . identifier[set_unusable_password] ()
keyword[else] :
identifier[xmpp_backend] . identifier[set_password] ( identifier[self] . identifier[node] , identifier[self] . identifier[domain] , identifier[raw_password] ) | def set_password(self, raw_password):
"""Calls :py:func:`~xmpp_backends.base.XmppBackendBase.set_password` for the user.
If password is ``None``, calls :py:func:`~xmpp_backends.base.XmppBackendBase.set_unusable_password`.
"""
if raw_password is None:
self.set_unusable_password() # depends on [control=['if'], data=[]]
else:
xmpp_backend.set_password(self.node, self.domain, raw_password) |
def plot_station_mappings(mapping_results): # pragma: no cover
""" Plot a list of mapping results on a map.
Requires matplotlib and cartopy.
Parameters
----------
mapping_results : list of MappingResult objects
Mapping results to plot
"""
try:
import matplotlib.pyplot as plt
except ImportError:
raise ImportError("Plotting requires matplotlib.")
try:
import cartopy.crs as ccrs
import cartopy.feature as cfeature
except ImportError:
raise ImportError("Plotting requires cartopy.")
lats = []
lngs = []
t_lats = []
t_lngs = []
n_discards = 0
for mapping_result in mapping_results:
if not mapping_result.is_empty():
lat, lng = mapping_result.isd_station.coords
t_lat, t_lng = map(float, mapping_result.target_coords)
lats.append(lat)
lngs.append(lng)
t_lats.append(t_lat)
t_lngs.append(t_lng)
else:
n_discards += 1
print("Discarded {} empty mappings".format(n_discards))
# figure
fig = plt.figure(figsize=(60, 60))
# axes
ax = plt.subplot(1, 1, 1, projection=ccrs.Mercator())
# offsets for labels
all_lngs = lngs + t_lngs
all_lats = lats + t_lats
x_max = max(all_lngs) # lists
x_min = min(all_lngs)
x_diff = x_max - x_min
y_max = max(all_lats)
y_min = min(all_lats)
y_diff = y_max - y_min
# minimum
x_pad = 0.1 * x_diff
y_pad = 0.1 * y_diff
left = x_min - x_pad
right = x_max + x_pad
bottom = y_min - y_pad
top = y_max + y_pad
width_ratio = 2.
height_ratio = 1.
if (right - left) / (top - bottom) > height_ratio / width_ratio:
# too short
goal = (right - left) * height_ratio / width_ratio
diff = goal - (top - bottom)
bottom = bottom - diff / 2.
top = top + diff / 2.
else:
# too skinny
goal = (top - bottom) * width_ratio / height_ratio
diff = goal - (right - left)
left = left - diff / 2.
right = right + diff / 2.
left = max(left, -179.9)
right = min(right, 179.9)
bottom = max([bottom, -89.9])
top = min([top, 89.9])
ax.set_extent([left, right, bottom, top])
# OCEAN
ax.add_feature(
cfeature.NaturalEarthFeature(
"physical",
"ocean",
"50m",
edgecolor="face",
facecolor=cfeature.COLORS["water"],
)
)
# LAND
ax.add_feature(
cfeature.NaturalEarthFeature(
"physical",
"land",
"50m",
edgecolor="face",
facecolor=cfeature.COLORS["land"],
)
)
# BORDERS
ax.add_feature(
cfeature.NaturalEarthFeature(
"cultural",
"admin_0_boundary_lines_land",
"50m",
edgecolor="black",
facecolor="none",
)
)
# LAKES
ax.add_feature(
cfeature.NaturalEarthFeature(
"physical",
"lakes",
"50m",
edgecolor="face",
facecolor=cfeature.COLORS["water"],
)
)
# COASTLINE
ax.add_feature(
cfeature.NaturalEarthFeature(
"physical", "coastline", "50m", edgecolor="black", facecolor="none"
)
)
# lines between
# for lat, t_lat, lng, t_lng in zip(lats, t_lats, lngs, t_lngs):
ax.plot(
[lngs, t_lngs],
[lats, t_lats],
color="k",
linestyle="-",
transform=ccrs.Geodetic(),
linewidth=0.3,
)
# stations
ax.plot(lngs, lats, "bo", markersize=1, transform=ccrs.Geodetic())
plt.title("Location to weather station mapping")
plt.show() | def function[plot_station_mappings, parameter[mapping_results]]:
constant[ Plot a list of mapping results on a map.
Requires matplotlib and cartopy.
Parameters
----------
mapping_results : list of MappingResult objects
Mapping results to plot
]
<ast.Try object at 0x7da1b26ad330>
<ast.Try object at 0x7da1b26aceb0>
variable[lats] assign[=] list[[]]
variable[lngs] assign[=] list[[]]
variable[t_lats] assign[=] list[[]]
variable[t_lngs] assign[=] list[[]]
variable[n_discards] assign[=] constant[0]
for taget[name[mapping_result]] in starred[name[mapping_results]] begin[:]
if <ast.UnaryOp object at 0x7da1b26ac040> begin[:]
<ast.Tuple object at 0x7da1b26ae0b0> assign[=] name[mapping_result].isd_station.coords
<ast.Tuple object at 0x7da1b26accd0> assign[=] call[name[map], parameter[name[float], name[mapping_result].target_coords]]
call[name[lats].append, parameter[name[lat]]]
call[name[lngs].append, parameter[name[lng]]]
call[name[t_lats].append, parameter[name[t_lat]]]
call[name[t_lngs].append, parameter[name[t_lng]]]
call[name[print], parameter[call[constant[Discarded {} empty mappings].format, parameter[name[n_discards]]]]]
variable[fig] assign[=] call[name[plt].figure, parameter[]]
variable[ax] assign[=] call[name[plt].subplot, parameter[constant[1], constant[1], constant[1]]]
variable[all_lngs] assign[=] binary_operation[name[lngs] + name[t_lngs]]
variable[all_lats] assign[=] binary_operation[name[lats] + name[t_lats]]
variable[x_max] assign[=] call[name[max], parameter[name[all_lngs]]]
variable[x_min] assign[=] call[name[min], parameter[name[all_lngs]]]
variable[x_diff] assign[=] binary_operation[name[x_max] - name[x_min]]
variable[y_max] assign[=] call[name[max], parameter[name[all_lats]]]
variable[y_min] assign[=] call[name[min], parameter[name[all_lats]]]
variable[y_diff] assign[=] binary_operation[name[y_max] - name[y_min]]
variable[x_pad] assign[=] binary_operation[constant[0.1] * name[x_diff]]
variable[y_pad] assign[=] binary_operation[constant[0.1] * name[y_diff]]
variable[left] assign[=] binary_operation[name[x_min] - name[x_pad]]
variable[right] assign[=] binary_operation[name[x_max] + name[x_pad]]
variable[bottom] assign[=] binary_operation[name[y_min] - name[y_pad]]
variable[top] assign[=] binary_operation[name[y_max] + name[y_pad]]
variable[width_ratio] assign[=] constant[2.0]
variable[height_ratio] assign[=] constant[1.0]
if compare[binary_operation[binary_operation[name[right] - name[left]] / binary_operation[name[top] - name[bottom]]] greater[>] binary_operation[name[height_ratio] / name[width_ratio]]] begin[:]
variable[goal] assign[=] binary_operation[binary_operation[binary_operation[name[right] - name[left]] * name[height_ratio]] / name[width_ratio]]
variable[diff] assign[=] binary_operation[name[goal] - binary_operation[name[top] - name[bottom]]]
variable[bottom] assign[=] binary_operation[name[bottom] - binary_operation[name[diff] / constant[2.0]]]
variable[top] assign[=] binary_operation[name[top] + binary_operation[name[diff] / constant[2.0]]]
variable[left] assign[=] call[name[max], parameter[name[left], <ast.UnaryOp object at 0x7da204620250>]]
variable[right] assign[=] call[name[min], parameter[name[right], constant[179.9]]]
variable[bottom] assign[=] call[name[max], parameter[list[[<ast.Name object at 0x7da204621300>, <ast.UnaryOp object at 0x7da204623fd0>]]]]
variable[top] assign[=] call[name[min], parameter[list[[<ast.Name object at 0x7da204623b20>, <ast.Constant object at 0x7da204623730>]]]]
call[name[ax].set_extent, parameter[list[[<ast.Name object at 0x7da204621ea0>, <ast.Name object at 0x7da204620520>, <ast.Name object at 0x7da204622c80>, <ast.Name object at 0x7da2046209a0>]]]]
call[name[ax].add_feature, parameter[call[name[cfeature].NaturalEarthFeature, parameter[constant[physical], constant[ocean], constant[50m]]]]]
call[name[ax].add_feature, parameter[call[name[cfeature].NaturalEarthFeature, parameter[constant[physical], constant[land], constant[50m]]]]]
call[name[ax].add_feature, parameter[call[name[cfeature].NaturalEarthFeature, parameter[constant[cultural], constant[admin_0_boundary_lines_land], constant[50m]]]]]
call[name[ax].add_feature, parameter[call[name[cfeature].NaturalEarthFeature, parameter[constant[physical], constant[lakes], constant[50m]]]]]
call[name[ax].add_feature, parameter[call[name[cfeature].NaturalEarthFeature, parameter[constant[physical], constant[coastline], constant[50m]]]]]
call[name[ax].plot, parameter[list[[<ast.Name object at 0x7da207f9be50>, <ast.Name object at 0x7da207f9b1f0>]], list[[<ast.Name object at 0x7da207f981c0>, <ast.Name object at 0x7da207f9abc0>]]]]
call[name[ax].plot, parameter[name[lngs], name[lats], constant[bo]]]
call[name[plt].title, parameter[constant[Location to weather station mapping]]]
call[name[plt].show, parameter[]] | keyword[def] identifier[plot_station_mappings] ( identifier[mapping_results] ):
literal[string]
keyword[try] :
keyword[import] identifier[matplotlib] . identifier[pyplot] keyword[as] identifier[plt]
keyword[except] identifier[ImportError] :
keyword[raise] identifier[ImportError] ( literal[string] )
keyword[try] :
keyword[import] identifier[cartopy] . identifier[crs] keyword[as] identifier[ccrs]
keyword[import] identifier[cartopy] . identifier[feature] keyword[as] identifier[cfeature]
keyword[except] identifier[ImportError] :
keyword[raise] identifier[ImportError] ( literal[string] )
identifier[lats] =[]
identifier[lngs] =[]
identifier[t_lats] =[]
identifier[t_lngs] =[]
identifier[n_discards] = literal[int]
keyword[for] identifier[mapping_result] keyword[in] identifier[mapping_results] :
keyword[if] keyword[not] identifier[mapping_result] . identifier[is_empty] ():
identifier[lat] , identifier[lng] = identifier[mapping_result] . identifier[isd_station] . identifier[coords]
identifier[t_lat] , identifier[t_lng] = identifier[map] ( identifier[float] , identifier[mapping_result] . identifier[target_coords] )
identifier[lats] . identifier[append] ( identifier[lat] )
identifier[lngs] . identifier[append] ( identifier[lng] )
identifier[t_lats] . identifier[append] ( identifier[t_lat] )
identifier[t_lngs] . identifier[append] ( identifier[t_lng] )
keyword[else] :
identifier[n_discards] += literal[int]
identifier[print] ( literal[string] . identifier[format] ( identifier[n_discards] ))
identifier[fig] = identifier[plt] . identifier[figure] ( identifier[figsize] =( literal[int] , literal[int] ))
identifier[ax] = identifier[plt] . identifier[subplot] ( literal[int] , literal[int] , literal[int] , identifier[projection] = identifier[ccrs] . identifier[Mercator] ())
identifier[all_lngs] = identifier[lngs] + identifier[t_lngs]
identifier[all_lats] = identifier[lats] + identifier[t_lats]
identifier[x_max] = identifier[max] ( identifier[all_lngs] )
identifier[x_min] = identifier[min] ( identifier[all_lngs] )
identifier[x_diff] = identifier[x_max] - identifier[x_min]
identifier[y_max] = identifier[max] ( identifier[all_lats] )
identifier[y_min] = identifier[min] ( identifier[all_lats] )
identifier[y_diff] = identifier[y_max] - identifier[y_min]
identifier[x_pad] = literal[int] * identifier[x_diff]
identifier[y_pad] = literal[int] * identifier[y_diff]
identifier[left] = identifier[x_min] - identifier[x_pad]
identifier[right] = identifier[x_max] + identifier[x_pad]
identifier[bottom] = identifier[y_min] - identifier[y_pad]
identifier[top] = identifier[y_max] + identifier[y_pad]
identifier[width_ratio] = literal[int]
identifier[height_ratio] = literal[int]
keyword[if] ( identifier[right] - identifier[left] )/( identifier[top] - identifier[bottom] )> identifier[height_ratio] / identifier[width_ratio] :
identifier[goal] =( identifier[right] - identifier[left] )* identifier[height_ratio] / identifier[width_ratio]
identifier[diff] = identifier[goal] -( identifier[top] - identifier[bottom] )
identifier[bottom] = identifier[bottom] - identifier[diff] / literal[int]
identifier[top] = identifier[top] + identifier[diff] / literal[int]
keyword[else] :
identifier[goal] =( identifier[top] - identifier[bottom] )* identifier[width_ratio] / identifier[height_ratio]
identifier[diff] = identifier[goal] -( identifier[right] - identifier[left] )
identifier[left] = identifier[left] - identifier[diff] / literal[int]
identifier[right] = identifier[right] + identifier[diff] / literal[int]
identifier[left] = identifier[max] ( identifier[left] ,- literal[int] )
identifier[right] = identifier[min] ( identifier[right] , literal[int] )
identifier[bottom] = identifier[max] ([ identifier[bottom] ,- literal[int] ])
identifier[top] = identifier[min] ([ identifier[top] , literal[int] ])
identifier[ax] . identifier[set_extent] ([ identifier[left] , identifier[right] , identifier[bottom] , identifier[top] ])
identifier[ax] . identifier[add_feature] (
identifier[cfeature] . identifier[NaturalEarthFeature] (
literal[string] ,
literal[string] ,
literal[string] ,
identifier[edgecolor] = literal[string] ,
identifier[facecolor] = identifier[cfeature] . identifier[COLORS] [ literal[string] ],
)
)
identifier[ax] . identifier[add_feature] (
identifier[cfeature] . identifier[NaturalEarthFeature] (
literal[string] ,
literal[string] ,
literal[string] ,
identifier[edgecolor] = literal[string] ,
identifier[facecolor] = identifier[cfeature] . identifier[COLORS] [ literal[string] ],
)
)
identifier[ax] . identifier[add_feature] (
identifier[cfeature] . identifier[NaturalEarthFeature] (
literal[string] ,
literal[string] ,
literal[string] ,
identifier[edgecolor] = literal[string] ,
identifier[facecolor] = literal[string] ,
)
)
identifier[ax] . identifier[add_feature] (
identifier[cfeature] . identifier[NaturalEarthFeature] (
literal[string] ,
literal[string] ,
literal[string] ,
identifier[edgecolor] = literal[string] ,
identifier[facecolor] = identifier[cfeature] . identifier[COLORS] [ literal[string] ],
)
)
identifier[ax] . identifier[add_feature] (
identifier[cfeature] . identifier[NaturalEarthFeature] (
literal[string] , literal[string] , literal[string] , identifier[edgecolor] = literal[string] , identifier[facecolor] = literal[string]
)
)
identifier[ax] . identifier[plot] (
[ identifier[lngs] , identifier[t_lngs] ],
[ identifier[lats] , identifier[t_lats] ],
identifier[color] = literal[string] ,
identifier[linestyle] = literal[string] ,
identifier[transform] = identifier[ccrs] . identifier[Geodetic] (),
identifier[linewidth] = literal[int] ,
)
identifier[ax] . identifier[plot] ( identifier[lngs] , identifier[lats] , literal[string] , identifier[markersize] = literal[int] , identifier[transform] = identifier[ccrs] . identifier[Geodetic] ())
identifier[plt] . identifier[title] ( literal[string] )
identifier[plt] . identifier[show] () | def plot_station_mappings(mapping_results): # pragma: no cover
' Plot a list of mapping results on a map.\n\n Requires matplotlib and cartopy.\n\n Parameters\n ----------\n mapping_results : list of MappingResult objects\n Mapping results to plot\n '
try:
import matplotlib.pyplot as plt # depends on [control=['try'], data=[]]
except ImportError:
raise ImportError('Plotting requires matplotlib.') # depends on [control=['except'], data=[]]
try:
import cartopy.crs as ccrs
import cartopy.feature as cfeature # depends on [control=['try'], data=[]]
except ImportError:
raise ImportError('Plotting requires cartopy.') # depends on [control=['except'], data=[]]
lats = []
lngs = []
t_lats = []
t_lngs = []
n_discards = 0
for mapping_result in mapping_results:
if not mapping_result.is_empty():
(lat, lng) = mapping_result.isd_station.coords
(t_lat, t_lng) = map(float, mapping_result.target_coords)
lats.append(lat)
lngs.append(lng)
t_lats.append(t_lat)
t_lngs.append(t_lng) # depends on [control=['if'], data=[]]
else:
n_discards += 1 # depends on [control=['for'], data=['mapping_result']]
print('Discarded {} empty mappings'.format(n_discards))
# figure
fig = plt.figure(figsize=(60, 60))
# axes
ax = plt.subplot(1, 1, 1, projection=ccrs.Mercator())
# offsets for labels
all_lngs = lngs + t_lngs
all_lats = lats + t_lats
x_max = max(all_lngs) # lists
x_min = min(all_lngs)
x_diff = x_max - x_min
y_max = max(all_lats)
y_min = min(all_lats)
y_diff = y_max - y_min
# minimum
x_pad = 0.1 * x_diff
y_pad = 0.1 * y_diff
left = x_min - x_pad
right = x_max + x_pad
bottom = y_min - y_pad
top = y_max + y_pad
width_ratio = 2.0
height_ratio = 1.0
if (right - left) / (top - bottom) > height_ratio / width_ratio:
# too short
goal = (right - left) * height_ratio / width_ratio
diff = goal - (top - bottom)
bottom = bottom - diff / 2.0
top = top + diff / 2.0 # depends on [control=['if'], data=[]]
else:
# too skinny
goal = (top - bottom) * width_ratio / height_ratio
diff = goal - (right - left)
left = left - diff / 2.0
right = right + diff / 2.0
left = max(left, -179.9)
right = min(right, 179.9)
bottom = max([bottom, -89.9])
top = min([top, 89.9])
ax.set_extent([left, right, bottom, top])
# OCEAN
ax.add_feature(cfeature.NaturalEarthFeature('physical', 'ocean', '50m', edgecolor='face', facecolor=cfeature.COLORS['water']))
# LAND
ax.add_feature(cfeature.NaturalEarthFeature('physical', 'land', '50m', edgecolor='face', facecolor=cfeature.COLORS['land']))
# BORDERS
ax.add_feature(cfeature.NaturalEarthFeature('cultural', 'admin_0_boundary_lines_land', '50m', edgecolor='black', facecolor='none'))
# LAKES
ax.add_feature(cfeature.NaturalEarthFeature('physical', 'lakes', '50m', edgecolor='face', facecolor=cfeature.COLORS['water']))
# COASTLINE
ax.add_feature(cfeature.NaturalEarthFeature('physical', 'coastline', '50m', edgecolor='black', facecolor='none'))
# lines between
# for lat, t_lat, lng, t_lng in zip(lats, t_lats, lngs, t_lngs):
ax.plot([lngs, t_lngs], [lats, t_lats], color='k', linestyle='-', transform=ccrs.Geodetic(), linewidth=0.3)
# stations
ax.plot(lngs, lats, 'bo', markersize=1, transform=ccrs.Geodetic())
plt.title('Location to weather station mapping')
plt.show() |
def writeArray(fp, a, delim = " ", nl = 1):
"""
Writes a sequence a of floats to file pointed to by file pointer.
"""
for i in a:
fp.write("%f%s" % (i, delim))
if nl:
fp.write("\n") | def function[writeArray, parameter[fp, a, delim, nl]]:
constant[
Writes a sequence a of floats to file pointed to by file pointer.
]
for taget[name[i]] in starred[name[a]] begin[:]
call[name[fp].write, parameter[binary_operation[constant[%f%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0472320>, <ast.Name object at 0x7da1b0470670>]]]]]
if name[nl] begin[:]
call[name[fp].write, parameter[constant[
]]] | keyword[def] identifier[writeArray] ( identifier[fp] , identifier[a] , identifier[delim] = literal[string] , identifier[nl] = literal[int] ):
literal[string]
keyword[for] identifier[i] keyword[in] identifier[a] :
identifier[fp] . identifier[write] ( literal[string] %( identifier[i] , identifier[delim] ))
keyword[if] identifier[nl] :
identifier[fp] . identifier[write] ( literal[string] ) | def writeArray(fp, a, delim=' ', nl=1):
"""
Writes a sequence a of floats to file pointed to by file pointer.
"""
for i in a:
fp.write('%f%s' % (i, delim)) # depends on [control=['for'], data=['i']]
if nl:
fp.write('\n') # depends on [control=['if'], data=[]] |
def _dictionary(self):
# type: () -> Dict[str, Any]
"""A dictionary representing the loaded configuration.
"""
# NOTE: Dictionaries are not populated if not loaded. So, conditionals
# are not needed here.
retval = {}
for variant in self._override_order:
retval.update(self._config[variant])
return retval | def function[_dictionary, parameter[self]]:
constant[A dictionary representing the loaded configuration.
]
variable[retval] assign[=] dictionary[[], []]
for taget[name[variant]] in starred[name[self]._override_order] begin[:]
call[name[retval].update, parameter[call[name[self]._config][name[variant]]]]
return[name[retval]] | keyword[def] identifier[_dictionary] ( identifier[self] ):
literal[string]
identifier[retval] ={}
keyword[for] identifier[variant] keyword[in] identifier[self] . identifier[_override_order] :
identifier[retval] . identifier[update] ( identifier[self] . identifier[_config] [ identifier[variant] ])
keyword[return] identifier[retval] | def _dictionary(self):
# type: () -> Dict[str, Any]
'A dictionary representing the loaded configuration.\n '
# NOTE: Dictionaries are not populated if not loaded. So, conditionals
# are not needed here.
retval = {}
for variant in self._override_order:
retval.update(self._config[variant]) # depends on [control=['for'], data=['variant']]
return retval |
def _log_end_transaction(self, start_time, response):
"""Log response from an API request."""
if not self._is_logging: return
elapsed_time = int((time.time() - start_time) * 1000)
msg = "<<< HTTP %d %s (%d ms)\n" % (response.status_code, response.reason, elapsed_time)
if "Content-Type" in response.headers:
msg += " Content-Type: %s\n" % response.headers["Content-Type"]
msg += response.text
self._log_message(msg + "\n\n") | def function[_log_end_transaction, parameter[self, start_time, response]]:
constant[Log response from an API request.]
if <ast.UnaryOp object at 0x7da207f03be0> begin[:]
return[None]
variable[elapsed_time] assign[=] call[name[int], parameter[binary_operation[binary_operation[call[name[time].time, parameter[]] - name[start_time]] * constant[1000]]]]
variable[msg] assign[=] binary_operation[constant[<<< HTTP %d %s (%d ms)
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da207f03dc0>, <ast.Attribute object at 0x7da207f004c0>, <ast.Name object at 0x7da207f024d0>]]]
if compare[constant[Content-Type] in name[response].headers] begin[:]
<ast.AugAssign object at 0x7da207f010c0>
<ast.AugAssign object at 0x7da207f03130>
call[name[self]._log_message, parameter[binary_operation[name[msg] + constant[
]]]] | keyword[def] identifier[_log_end_transaction] ( identifier[self] , identifier[start_time] , identifier[response] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_is_logging] : keyword[return]
identifier[elapsed_time] = identifier[int] (( identifier[time] . identifier[time] ()- identifier[start_time] )* literal[int] )
identifier[msg] = literal[string] %( identifier[response] . identifier[status_code] , identifier[response] . identifier[reason] , identifier[elapsed_time] )
keyword[if] literal[string] keyword[in] identifier[response] . identifier[headers] :
identifier[msg] += literal[string] % identifier[response] . identifier[headers] [ literal[string] ]
identifier[msg] += identifier[response] . identifier[text]
identifier[self] . identifier[_log_message] ( identifier[msg] + literal[string] ) | def _log_end_transaction(self, start_time, response):
"""Log response from an API request."""
if not self._is_logging:
return # depends on [control=['if'], data=[]]
elapsed_time = int((time.time() - start_time) * 1000)
msg = '<<< HTTP %d %s (%d ms)\n' % (response.status_code, response.reason, elapsed_time)
if 'Content-Type' in response.headers:
msg += ' Content-Type: %s\n' % response.headers['Content-Type'] # depends on [control=['if'], data=[]]
msg += response.text
self._log_message(msg + '\n\n') |
def tuple_as_vec(xyzw):
"""
Generates a Vector4 from a tuple or list.
"""
vec = Vector4()
vec[0] = xyzw[0]
vec[1] = xyzw[1]
vec[2] = xyzw[2]
vec[3] = xyzw[3]
return vec | def function[tuple_as_vec, parameter[xyzw]]:
constant[
Generates a Vector4 from a tuple or list.
]
variable[vec] assign[=] call[name[Vector4], parameter[]]
call[name[vec]][constant[0]] assign[=] call[name[xyzw]][constant[0]]
call[name[vec]][constant[1]] assign[=] call[name[xyzw]][constant[1]]
call[name[vec]][constant[2]] assign[=] call[name[xyzw]][constant[2]]
call[name[vec]][constant[3]] assign[=] call[name[xyzw]][constant[3]]
return[name[vec]] | keyword[def] identifier[tuple_as_vec] ( identifier[xyzw] ):
literal[string]
identifier[vec] = identifier[Vector4] ()
identifier[vec] [ literal[int] ]= identifier[xyzw] [ literal[int] ]
identifier[vec] [ literal[int] ]= identifier[xyzw] [ literal[int] ]
identifier[vec] [ literal[int] ]= identifier[xyzw] [ literal[int] ]
identifier[vec] [ literal[int] ]= identifier[xyzw] [ literal[int] ]
keyword[return] identifier[vec] | def tuple_as_vec(xyzw):
"""
Generates a Vector4 from a tuple or list.
"""
vec = Vector4()
vec[0] = xyzw[0]
vec[1] = xyzw[1]
vec[2] = xyzw[2]
vec[3] = xyzw[3]
return vec |
def render_startmenu(self, ctx, data):
"""
For authenticated users, add the start-menu style navigation to the
given tag. For unauthenticated users, remove the given tag from the
output.
@see L{xmantissa.webnav.startMenu}
"""
if self.username is None:
return ''
translator = self._getViewerPrivateApplication()
pageComponents = translator.getPageComponents()
return startMenu(translator, pageComponents.navigation, ctx.tag) | def function[render_startmenu, parameter[self, ctx, data]]:
constant[
For authenticated users, add the start-menu style navigation to the
given tag. For unauthenticated users, remove the given tag from the
output.
@see L{xmantissa.webnav.startMenu}
]
if compare[name[self].username is constant[None]] begin[:]
return[constant[]]
variable[translator] assign[=] call[name[self]._getViewerPrivateApplication, parameter[]]
variable[pageComponents] assign[=] call[name[translator].getPageComponents, parameter[]]
return[call[name[startMenu], parameter[name[translator], name[pageComponents].navigation, name[ctx].tag]]] | keyword[def] identifier[render_startmenu] ( identifier[self] , identifier[ctx] , identifier[data] ):
literal[string]
keyword[if] identifier[self] . identifier[username] keyword[is] keyword[None] :
keyword[return] literal[string]
identifier[translator] = identifier[self] . identifier[_getViewerPrivateApplication] ()
identifier[pageComponents] = identifier[translator] . identifier[getPageComponents] ()
keyword[return] identifier[startMenu] ( identifier[translator] , identifier[pageComponents] . identifier[navigation] , identifier[ctx] . identifier[tag] ) | def render_startmenu(self, ctx, data):
"""
For authenticated users, add the start-menu style navigation to the
given tag. For unauthenticated users, remove the given tag from the
output.
@see L{xmantissa.webnav.startMenu}
"""
if self.username is None:
return '' # depends on [control=['if'], data=[]]
translator = self._getViewerPrivateApplication()
pageComponents = translator.getPageComponents()
return startMenu(translator, pageComponents.navigation, ctx.tag) |
def tenants_list(**kwargs):
'''
.. versionadded:: 2019.2.0
List all tenants for your account.
CLI Example:
.. code-block:: bash
salt-call azurearm_resource.tenants_list
'''
result = {}
subconn = __utils__['azurearm.get_client']('subscription', **kwargs)
try:
tenants = __utils__['azurearm.paged_object_to_list'](subconn.tenants.list())
for tenant in tenants:
result[tenant['tenant_id']] = tenant
except CloudError as exc:
__utils__['azurearm.log_cloud_error']('resource', str(exc), **kwargs)
result = {'error': str(exc)}
return result | def function[tenants_list, parameter[]]:
constant[
.. versionadded:: 2019.2.0
List all tenants for your account.
CLI Example:
.. code-block:: bash
salt-call azurearm_resource.tenants_list
]
variable[result] assign[=] dictionary[[], []]
variable[subconn] assign[=] call[call[name[__utils__]][constant[azurearm.get_client]], parameter[constant[subscription]]]
<ast.Try object at 0x7da1b1ff2380>
return[name[result]] | keyword[def] identifier[tenants_list] (** identifier[kwargs] ):
literal[string]
identifier[result] ={}
identifier[subconn] = identifier[__utils__] [ literal[string] ]( literal[string] ,** identifier[kwargs] )
keyword[try] :
identifier[tenants] = identifier[__utils__] [ literal[string] ]( identifier[subconn] . identifier[tenants] . identifier[list] ())
keyword[for] identifier[tenant] keyword[in] identifier[tenants] :
identifier[result] [ identifier[tenant] [ literal[string] ]]= identifier[tenant]
keyword[except] identifier[CloudError] keyword[as] identifier[exc] :
identifier[__utils__] [ literal[string] ]( literal[string] , identifier[str] ( identifier[exc] ),** identifier[kwargs] )
identifier[result] ={ literal[string] : identifier[str] ( identifier[exc] )}
keyword[return] identifier[result] | def tenants_list(**kwargs):
"""
.. versionadded:: 2019.2.0
List all tenants for your account.
CLI Example:
.. code-block:: bash
salt-call azurearm_resource.tenants_list
"""
result = {}
subconn = __utils__['azurearm.get_client']('subscription', **kwargs)
try:
tenants = __utils__['azurearm.paged_object_to_list'](subconn.tenants.list())
for tenant in tenants:
result[tenant['tenant_id']] = tenant # depends on [control=['for'], data=['tenant']] # depends on [control=['try'], data=[]]
except CloudError as exc:
__utils__['azurearm.log_cloud_error']('resource', str(exc), **kwargs)
result = {'error': str(exc)} # depends on [control=['except'], data=['exc']]
return result |
def write_single_coil(slave_id, address, value):
""" Return ADU for Modbus function code 05: Write Single Coil.
:param slave_id: Number of slave.
:return: Byte array with ADU.
"""
function = WriteSingleCoil()
function.address = address
function.value = value
return _create_request_adu(slave_id, function.request_pdu) | def function[write_single_coil, parameter[slave_id, address, value]]:
constant[ Return ADU for Modbus function code 05: Write Single Coil.
:param slave_id: Number of slave.
:return: Byte array with ADU.
]
variable[function] assign[=] call[name[WriteSingleCoil], parameter[]]
name[function].address assign[=] name[address]
name[function].value assign[=] name[value]
return[call[name[_create_request_adu], parameter[name[slave_id], name[function].request_pdu]]] | keyword[def] identifier[write_single_coil] ( identifier[slave_id] , identifier[address] , identifier[value] ):
literal[string]
identifier[function] = identifier[WriteSingleCoil] ()
identifier[function] . identifier[address] = identifier[address]
identifier[function] . identifier[value] = identifier[value]
keyword[return] identifier[_create_request_adu] ( identifier[slave_id] , identifier[function] . identifier[request_pdu] ) | def write_single_coil(slave_id, address, value):
""" Return ADU for Modbus function code 05: Write Single Coil.
:param slave_id: Number of slave.
:return: Byte array with ADU.
"""
function = WriteSingleCoil()
function.address = address
function.value = value
return _create_request_adu(slave_id, function.request_pdu) |
def replace(self, **kwargs):
"""
Return: a new :class:`AssetLocation` with specific ``kwargs`` replacing
their corresponding values.
Using AssetLocator's replace function results in a mismatch of __init__ args and kwargs.
Replace tries to instantiate an AssetLocation object with AssetLocators args and kwargs.
"""
# NOTE: Deprecation value is hard coded as True in __init__ and therefore does not need to be passed through.
return AssetLocation(
kwargs.pop('org', self.org),
kwargs.pop('course', self.course),
kwargs.pop('run', self.run),
kwargs.pop('category', self.block_type),
kwargs.pop('name', self.block_id),
revision=kwargs.pop('revision', self.branch),
**kwargs
) | def function[replace, parameter[self]]:
constant[
Return: a new :class:`AssetLocation` with specific ``kwargs`` replacing
their corresponding values.
Using AssetLocator's replace function results in a mismatch of __init__ args and kwargs.
Replace tries to instantiate an AssetLocation object with AssetLocators args and kwargs.
]
return[call[name[AssetLocation], parameter[call[name[kwargs].pop, parameter[constant[org], name[self].org]], call[name[kwargs].pop, parameter[constant[course], name[self].course]], call[name[kwargs].pop, parameter[constant[run], name[self].run]], call[name[kwargs].pop, parameter[constant[category], name[self].block_type]], call[name[kwargs].pop, parameter[constant[name], name[self].block_id]]]]] | keyword[def] identifier[replace] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[AssetLocation] (
identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[org] ),
identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[course] ),
identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[run] ),
identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[block_type] ),
identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[block_id] ),
identifier[revision] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[branch] ),
** identifier[kwargs]
) | def replace(self, **kwargs):
"""
Return: a new :class:`AssetLocation` with specific ``kwargs`` replacing
their corresponding values.
Using AssetLocator's replace function results in a mismatch of __init__ args and kwargs.
Replace tries to instantiate an AssetLocation object with AssetLocators args and kwargs.
"""
# NOTE: Deprecation value is hard coded as True in __init__ and therefore does not need to be passed through.
return AssetLocation(kwargs.pop('org', self.org), kwargs.pop('course', self.course), kwargs.pop('run', self.run), kwargs.pop('category', self.block_type), kwargs.pop('name', self.block_id), revision=kwargs.pop('revision', self.branch), **kwargs) |
def from_parse_args(cls, args):
"""Constructor from command line args.
:param args: parse command line arguments
:type args: argparse.ArgumentParser
"""
return cls(args.migration_file,
args.database,
db_user=args.db_user,
db_password=args.db_password,
db_port=args.db_port,
db_host=args.db_host,
mode=args.mode,
allow_serie=args.allow_serie,
force_version=args.force_version,
web_host=args.web_host,
web_port=args.web_port,
web_custom_html=args.web_custom_html,
) | def function[from_parse_args, parameter[cls, args]]:
constant[Constructor from command line args.
:param args: parse command line arguments
:type args: argparse.ArgumentParser
]
return[call[name[cls], parameter[name[args].migration_file, name[args].database]]] | keyword[def] identifier[from_parse_args] ( identifier[cls] , identifier[args] ):
literal[string]
keyword[return] identifier[cls] ( identifier[args] . identifier[migration_file] ,
identifier[args] . identifier[database] ,
identifier[db_user] = identifier[args] . identifier[db_user] ,
identifier[db_password] = identifier[args] . identifier[db_password] ,
identifier[db_port] = identifier[args] . identifier[db_port] ,
identifier[db_host] = identifier[args] . identifier[db_host] ,
identifier[mode] = identifier[args] . identifier[mode] ,
identifier[allow_serie] = identifier[args] . identifier[allow_serie] ,
identifier[force_version] = identifier[args] . identifier[force_version] ,
identifier[web_host] = identifier[args] . identifier[web_host] ,
identifier[web_port] = identifier[args] . identifier[web_port] ,
identifier[web_custom_html] = identifier[args] . identifier[web_custom_html] ,
) | def from_parse_args(cls, args):
"""Constructor from command line args.
:param args: parse command line arguments
:type args: argparse.ArgumentParser
"""
return cls(args.migration_file, args.database, db_user=args.db_user, db_password=args.db_password, db_port=args.db_port, db_host=args.db_host, mode=args.mode, allow_serie=args.allow_serie, force_version=args.force_version, web_host=args.web_host, web_port=args.web_port, web_custom_html=args.web_custom_html) |
def create(cls, schema, name):
"""
Create an object based on the root tag name.
@param schema: A schema object.
@type schema: L{schema.Schema}
@param name: The name.
@type name: str
@return: The created object.
@rtype: L{XBuiltin}
"""
fn = cls.tags.get(name, XBuiltin)
return fn(schema, name) | def function[create, parameter[cls, schema, name]]:
constant[
Create an object based on the root tag name.
@param schema: A schema object.
@type schema: L{schema.Schema}
@param name: The name.
@type name: str
@return: The created object.
@rtype: L{XBuiltin}
]
variable[fn] assign[=] call[name[cls].tags.get, parameter[name[name], name[XBuiltin]]]
return[call[name[fn], parameter[name[schema], name[name]]]] | keyword[def] identifier[create] ( identifier[cls] , identifier[schema] , identifier[name] ):
literal[string]
identifier[fn] = identifier[cls] . identifier[tags] . identifier[get] ( identifier[name] , identifier[XBuiltin] )
keyword[return] identifier[fn] ( identifier[schema] , identifier[name] ) | def create(cls, schema, name):
"""
Create an object based on the root tag name.
@param schema: A schema object.
@type schema: L{schema.Schema}
@param name: The name.
@type name: str
@return: The created object.
@rtype: L{XBuiltin}
"""
fn = cls.tags.get(name, XBuiltin)
return fn(schema, name) |
def delete(self, force=False, volumes=False, **kwargs):
"""
remove this container; kwargs indicate that some container runtimes
might accept more parameters
:param force: bool, if container engine supports this, force the functionality
:param volumes: bool, remove also associated volumes
:return: None
"""
self.d.remove_container(self.get_id(), v=volumes, force=force) | def function[delete, parameter[self, force, volumes]]:
constant[
remove this container; kwargs indicate that some container runtimes
might accept more parameters
:param force: bool, if container engine supports this, force the functionality
:param volumes: bool, remove also associated volumes
:return: None
]
call[name[self].d.remove_container, parameter[call[name[self].get_id, parameter[]]]] | keyword[def] identifier[delete] ( identifier[self] , identifier[force] = keyword[False] , identifier[volumes] = keyword[False] ,** identifier[kwargs] ):
literal[string]
identifier[self] . identifier[d] . identifier[remove_container] ( identifier[self] . identifier[get_id] (), identifier[v] = identifier[volumes] , identifier[force] = identifier[force] ) | def delete(self, force=False, volumes=False, **kwargs):
"""
remove this container; kwargs indicate that some container runtimes
might accept more parameters
:param force: bool, if container engine supports this, force the functionality
:param volumes: bool, remove also associated volumes
:return: None
"""
self.d.remove_container(self.get_id(), v=volumes, force=force) |
def _add_factor(self, factor):
"""
Add a factor to the workflow
:param factor: The factor object
:type factor: Factor | MultiOutputFactor | NodeCreationFactor
:return: None
"""
self.factors.append(factor)
logging.info("Added factor with tool {} ".format(factor.tool)) | def function[_add_factor, parameter[self, factor]]:
constant[
Add a factor to the workflow
:param factor: The factor object
:type factor: Factor | MultiOutputFactor | NodeCreationFactor
:return: None
]
call[name[self].factors.append, parameter[name[factor]]]
call[name[logging].info, parameter[call[constant[Added factor with tool {} ].format, parameter[name[factor].tool]]]] | keyword[def] identifier[_add_factor] ( identifier[self] , identifier[factor] ):
literal[string]
identifier[self] . identifier[factors] . identifier[append] ( identifier[factor] )
identifier[logging] . identifier[info] ( literal[string] . identifier[format] ( identifier[factor] . identifier[tool] )) | def _add_factor(self, factor):
"""
Add a factor to the workflow
:param factor: The factor object
:type factor: Factor | MultiOutputFactor | NodeCreationFactor
:return: None
"""
self.factors.append(factor)
logging.info('Added factor with tool {} '.format(factor.tool)) |
def _set_roi_mask(self, roi_mask):
"""Sets a new ROI mask."""
if isinstance(roi_mask,
np.ndarray): # not (roi_mask is None or roi_mask=='auto'):
self._verify_shape_compatibility(roi_mask, 'ROI set')
self.roi_mask = roi_mask
self.roi_list = np.unique(roi_mask.flatten())
np.setdiff1d(self.roi_list, cfg.background_value)
else:
self.roi_mask = np.ones(self.carpet.shape[:-1]) # last dim is self.fixed_dim already
self.roi_list = [1, ] | def function[_set_roi_mask, parameter[self, roi_mask]]:
constant[Sets a new ROI mask.]
if call[name[isinstance], parameter[name[roi_mask], name[np].ndarray]] begin[:]
call[name[self]._verify_shape_compatibility, parameter[name[roi_mask], constant[ROI set]]]
name[self].roi_mask assign[=] name[roi_mask]
name[self].roi_list assign[=] call[name[np].unique, parameter[call[name[roi_mask].flatten, parameter[]]]]
call[name[np].setdiff1d, parameter[name[self].roi_list, name[cfg].background_value]] | keyword[def] identifier[_set_roi_mask] ( identifier[self] , identifier[roi_mask] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[roi_mask] ,
identifier[np] . identifier[ndarray] ):
identifier[self] . identifier[_verify_shape_compatibility] ( identifier[roi_mask] , literal[string] )
identifier[self] . identifier[roi_mask] = identifier[roi_mask]
identifier[self] . identifier[roi_list] = identifier[np] . identifier[unique] ( identifier[roi_mask] . identifier[flatten] ())
identifier[np] . identifier[setdiff1d] ( identifier[self] . identifier[roi_list] , identifier[cfg] . identifier[background_value] )
keyword[else] :
identifier[self] . identifier[roi_mask] = identifier[np] . identifier[ones] ( identifier[self] . identifier[carpet] . identifier[shape] [:- literal[int] ])
identifier[self] . identifier[roi_list] =[ literal[int] ,] | def _set_roi_mask(self, roi_mask):
"""Sets a new ROI mask."""
if isinstance(roi_mask, np.ndarray): # not (roi_mask is None or roi_mask=='auto'):
self._verify_shape_compatibility(roi_mask, 'ROI set')
self.roi_mask = roi_mask
self.roi_list = np.unique(roi_mask.flatten())
np.setdiff1d(self.roi_list, cfg.background_value) # depends on [control=['if'], data=[]]
else:
self.roi_mask = np.ones(self.carpet.shape[:-1]) # last dim is self.fixed_dim already
self.roi_list = [1] |
def Zabransky_quasi_polynomial_integral_over_T(T, Tc, a1, a2, a3, a4, a5, a6):
r'''Calculates the integral of liquid heat capacity over T using the
quasi-polynomial model developed in [1]_.
Parameters
----------
T : float
Temperature [K]
a1-a6 : float
Coefficients
Returns
-------
S : float
Difference in entropy from 0 K, [J/mol/K]
Notes
-----
The analytical integral was derived with Sympy. It requires the
Polylog(2,x) function, which is unimplemented in SciPy. A very accurate
numerical approximation was implemented as :obj:`thermo.utils.polylog2`.
Relatively slow due to the use of that special function.
Examples
--------
>>> S2 = Zabransky_quasi_polynomial_integral_over_T(300, 591.79, -3.12743,
... 0.0857315, 13.7282, 1.28971, 6.42297, 4.10989)
>>> S1 = Zabransky_quasi_polynomial_integral_over_T(200, 591.79, -3.12743,
... 0.0857315, 13.7282, 1.28971, 6.42297, 4.10989)
>>> S2 - S1
59.16997291893654
References
----------
.. [1] Zabransky, M., V. Ruzicka Jr, V. Majer, and Eugene S. Domalski.
Heat Capacity of Liquids: Critical Review and Recommended Values.
2 Volume Set. Washington, D.C.: Amer Inst of Physics, 1996.
'''
term = T - Tc
logT = log(T)
Tc2 = Tc*Tc
Tc3 = Tc2*Tc
return R*(a3*logT -a1*polylog2(T/Tc) - a2*(-logT + 0.5*log(term*term))
+ T*(T*(T*a6/(3.*Tc3) + a5/(2.*Tc2)) + a4/Tc)) | def function[Zabransky_quasi_polynomial_integral_over_T, parameter[T, Tc, a1, a2, a3, a4, a5, a6]]:
constant[Calculates the integral of liquid heat capacity over T using the
quasi-polynomial model developed in [1]_.
Parameters
----------
T : float
Temperature [K]
a1-a6 : float
Coefficients
Returns
-------
S : float
Difference in entropy from 0 K, [J/mol/K]
Notes
-----
The analytical integral was derived with Sympy. It requires the
Polylog(2,x) function, which is unimplemented in SciPy. A very accurate
numerical approximation was implemented as :obj:`thermo.utils.polylog2`.
Relatively slow due to the use of that special function.
Examples
--------
>>> S2 = Zabransky_quasi_polynomial_integral_over_T(300, 591.79, -3.12743,
... 0.0857315, 13.7282, 1.28971, 6.42297, 4.10989)
>>> S1 = Zabransky_quasi_polynomial_integral_over_T(200, 591.79, -3.12743,
... 0.0857315, 13.7282, 1.28971, 6.42297, 4.10989)
>>> S2 - S1
59.16997291893654
References
----------
.. [1] Zabransky, M., V. Ruzicka Jr, V. Majer, and Eugene S. Domalski.
Heat Capacity of Liquids: Critical Review and Recommended Values.
2 Volume Set. Washington, D.C.: Amer Inst of Physics, 1996.
]
variable[term] assign[=] binary_operation[name[T] - name[Tc]]
variable[logT] assign[=] call[name[log], parameter[name[T]]]
variable[Tc2] assign[=] binary_operation[name[Tc] * name[Tc]]
variable[Tc3] assign[=] binary_operation[name[Tc2] * name[Tc]]
return[binary_operation[name[R] * binary_operation[binary_operation[binary_operation[binary_operation[name[a3] * name[logT]] - binary_operation[name[a1] * call[name[polylog2], parameter[binary_operation[name[T] / name[Tc]]]]]] - binary_operation[name[a2] * binary_operation[<ast.UnaryOp object at 0x7da1b021c100> + binary_operation[constant[0.5] * call[name[log], parameter[binary_operation[name[term] * name[term]]]]]]]] + binary_operation[name[T] * binary_operation[binary_operation[name[T] * binary_operation[binary_operation[binary_operation[name[T] * name[a6]] / binary_operation[constant[3.0] * name[Tc3]]] + binary_operation[name[a5] / binary_operation[constant[2.0] * name[Tc2]]]]] + binary_operation[name[a4] / name[Tc]]]]]]] | keyword[def] identifier[Zabransky_quasi_polynomial_integral_over_T] ( identifier[T] , identifier[Tc] , identifier[a1] , identifier[a2] , identifier[a3] , identifier[a4] , identifier[a5] , identifier[a6] ):
literal[string]
identifier[term] = identifier[T] - identifier[Tc]
identifier[logT] = identifier[log] ( identifier[T] )
identifier[Tc2] = identifier[Tc] * identifier[Tc]
identifier[Tc3] = identifier[Tc2] * identifier[Tc]
keyword[return] identifier[R] *( identifier[a3] * identifier[logT] - identifier[a1] * identifier[polylog2] ( identifier[T] / identifier[Tc] )- identifier[a2] *(- identifier[logT] + literal[int] * identifier[log] ( identifier[term] * identifier[term] ))
+ identifier[T] *( identifier[T] *( identifier[T] * identifier[a6] /( literal[int] * identifier[Tc3] )+ identifier[a5] /( literal[int] * identifier[Tc2] ))+ identifier[a4] / identifier[Tc] )) | def Zabransky_quasi_polynomial_integral_over_T(T, Tc, a1, a2, a3, a4, a5, a6):
"""Calculates the integral of liquid heat capacity over T using the
quasi-polynomial model developed in [1]_.
Parameters
----------
T : float
Temperature [K]
a1-a6 : float
Coefficients
Returns
-------
S : float
Difference in entropy from 0 K, [J/mol/K]
Notes
-----
The analytical integral was derived with Sympy. It requires the
Polylog(2,x) function, which is unimplemented in SciPy. A very accurate
numerical approximation was implemented as :obj:`thermo.utils.polylog2`.
Relatively slow due to the use of that special function.
Examples
--------
>>> S2 = Zabransky_quasi_polynomial_integral_over_T(300, 591.79, -3.12743,
... 0.0857315, 13.7282, 1.28971, 6.42297, 4.10989)
>>> S1 = Zabransky_quasi_polynomial_integral_over_T(200, 591.79, -3.12743,
... 0.0857315, 13.7282, 1.28971, 6.42297, 4.10989)
>>> S2 - S1
59.16997291893654
References
----------
.. [1] Zabransky, M., V. Ruzicka Jr, V. Majer, and Eugene S. Domalski.
Heat Capacity of Liquids: Critical Review and Recommended Values.
2 Volume Set. Washington, D.C.: Amer Inst of Physics, 1996.
"""
term = T - Tc
logT = log(T)
Tc2 = Tc * Tc
Tc3 = Tc2 * Tc
return R * (a3 * logT - a1 * polylog2(T / Tc) - a2 * (-logT + 0.5 * log(term * term)) + T * (T * (T * a6 / (3.0 * Tc3) + a5 / (2.0 * Tc2)) + a4 / Tc)) |
def write_feedback(self, feedback, cr=True):
""" Store feedback. Keep specified feedback as previous output
:param feedback: data to store
:param cr: whether to write carriage return to the end or not
:return: None
"""
self.__previous_data += feedback
if cr is True:
self.__previous_data += '\n' | def function[write_feedback, parameter[self, feedback, cr]]:
constant[ Store feedback. Keep specified feedback as previous output
:param feedback: data to store
:param cr: whether to write carriage return to the end or not
:return: None
]
<ast.AugAssign object at 0x7da1b2585b70>
if compare[name[cr] is constant[True]] begin[:]
<ast.AugAssign object at 0x7da1b2586290> | keyword[def] identifier[write_feedback] ( identifier[self] , identifier[feedback] , identifier[cr] = keyword[True] ):
literal[string]
identifier[self] . identifier[__previous_data] += identifier[feedback]
keyword[if] identifier[cr] keyword[is] keyword[True] :
identifier[self] . identifier[__previous_data] += literal[string] | def write_feedback(self, feedback, cr=True):
""" Store feedback. Keep specified feedback as previous output
:param feedback: data to store
:param cr: whether to write carriage return to the end or not
:return: None
"""
self.__previous_data += feedback
if cr is True:
self.__previous_data += '\n' # depends on [control=['if'], data=[]] |
def _create_s(y, h):
"""Estimate secants"""
s = np.zeros_like(y)
s[:-1] = (y[1:] - y[:-1]) / h[:-1]
s[-1] = 0.0
return s | def function[_create_s, parameter[y, h]]:
constant[Estimate secants]
variable[s] assign[=] call[name[np].zeros_like, parameter[name[y]]]
call[name[s]][<ast.Slice object at 0x7da20c6c6950>] assign[=] binary_operation[binary_operation[call[name[y]][<ast.Slice object at 0x7da20c6c5ae0>] - call[name[y]][<ast.Slice object at 0x7da20c6c4c10>]] / call[name[h]][<ast.Slice object at 0x7da20c6c79d0>]]
call[name[s]][<ast.UnaryOp object at 0x7da20c6c6d10>] assign[=] constant[0.0]
return[name[s]] | keyword[def] identifier[_create_s] ( identifier[y] , identifier[h] ):
literal[string]
identifier[s] = identifier[np] . identifier[zeros_like] ( identifier[y] )
identifier[s] [:- literal[int] ]=( identifier[y] [ literal[int] :]- identifier[y] [:- literal[int] ])/ identifier[h] [:- literal[int] ]
identifier[s] [- literal[int] ]= literal[int]
keyword[return] identifier[s] | def _create_s(y, h):
"""Estimate secants"""
s = np.zeros_like(y)
s[:-1] = (y[1:] - y[:-1]) / h[:-1]
s[-1] = 0.0
return s |
def to_dict(self):
"""
Prepare a JSON serializable dict for read-only purposes.
Includes storages and IP-addresses.
Use prepare_post_body for POST and .save() for PUT.
"""
fields = dict(vars(self).items())
if self.populated:
fields['ip_addresses'] = []
fields['storage_devices'] = []
for ip in self.ip_addresses:
fields['ip_addresses'].append({
'address': ip.address,
'access': ip.access,
'family': ip.family
})
for storage in self.storage_devices:
fields['storage_devices'].append({
'address': storage.address,
'storage': storage.uuid,
'storage_size': storage.size,
'storage_title': storage.title,
'type': storage.type,
})
del fields['populated']
del fields['cloud_manager']
return fields | def function[to_dict, parameter[self]]:
constant[
Prepare a JSON serializable dict for read-only purposes.
Includes storages and IP-addresses.
Use prepare_post_body for POST and .save() for PUT.
]
variable[fields] assign[=] call[name[dict], parameter[call[call[name[vars], parameter[name[self]]].items, parameter[]]]]
if name[self].populated begin[:]
call[name[fields]][constant[ip_addresses]] assign[=] list[[]]
call[name[fields]][constant[storage_devices]] assign[=] list[[]]
for taget[name[ip]] in starred[name[self].ip_addresses] begin[:]
call[call[name[fields]][constant[ip_addresses]].append, parameter[dictionary[[<ast.Constant object at 0x7da1b0eed4b0>, <ast.Constant object at 0x7da1b0eec460>, <ast.Constant object at 0x7da1b0eeeef0>], [<ast.Attribute object at 0x7da1b0eeec50>, <ast.Attribute object at 0x7da1b0eec940>, <ast.Attribute object at 0x7da1b0eeca30>]]]]
for taget[name[storage]] in starred[name[self].storage_devices] begin[:]
call[call[name[fields]][constant[storage_devices]].append, parameter[dictionary[[<ast.Constant object at 0x7da1b0eec6a0>, <ast.Constant object at 0x7da1b0ea1690>, <ast.Constant object at 0x7da1b0ea2830>, <ast.Constant object at 0x7da1b0ea15a0>, <ast.Constant object at 0x7da1b0ea0f70>], [<ast.Attribute object at 0x7da1b0ea1c30>, <ast.Attribute object at 0x7da1b0ea12a0>, <ast.Attribute object at 0x7da1b0ea18d0>, <ast.Attribute object at 0x7da1b0ea24d0>, <ast.Attribute object at 0x7da1b0ea0910>]]]]
<ast.Delete object at 0x7da1b0ea1840>
<ast.Delete object at 0x7da1b0ea2590>
return[name[fields]] | keyword[def] identifier[to_dict] ( identifier[self] ):
literal[string]
identifier[fields] = identifier[dict] ( identifier[vars] ( identifier[self] ). identifier[items] ())
keyword[if] identifier[self] . identifier[populated] :
identifier[fields] [ literal[string] ]=[]
identifier[fields] [ literal[string] ]=[]
keyword[for] identifier[ip] keyword[in] identifier[self] . identifier[ip_addresses] :
identifier[fields] [ literal[string] ]. identifier[append] ({
literal[string] : identifier[ip] . identifier[address] ,
literal[string] : identifier[ip] . identifier[access] ,
literal[string] : identifier[ip] . identifier[family]
})
keyword[for] identifier[storage] keyword[in] identifier[self] . identifier[storage_devices] :
identifier[fields] [ literal[string] ]. identifier[append] ({
literal[string] : identifier[storage] . identifier[address] ,
literal[string] : identifier[storage] . identifier[uuid] ,
literal[string] : identifier[storage] . identifier[size] ,
literal[string] : identifier[storage] . identifier[title] ,
literal[string] : identifier[storage] . identifier[type] ,
})
keyword[del] identifier[fields] [ literal[string] ]
keyword[del] identifier[fields] [ literal[string] ]
keyword[return] identifier[fields] | def to_dict(self):
"""
Prepare a JSON serializable dict for read-only purposes.
Includes storages and IP-addresses.
Use prepare_post_body for POST and .save() for PUT.
"""
fields = dict(vars(self).items())
if self.populated:
fields['ip_addresses'] = []
fields['storage_devices'] = []
for ip in self.ip_addresses:
fields['ip_addresses'].append({'address': ip.address, 'access': ip.access, 'family': ip.family}) # depends on [control=['for'], data=['ip']]
for storage in self.storage_devices:
fields['storage_devices'].append({'address': storage.address, 'storage': storage.uuid, 'storage_size': storage.size, 'storage_title': storage.title, 'type': storage.type}) # depends on [control=['for'], data=['storage']] # depends on [control=['if'], data=[]]
del fields['populated']
del fields['cloud_manager']
return fields |
def build(self, link_type, path):
super(HeadLink, self).build()
"""
:param link_type: Link type
:param target: Link target
"""
self.target = path
self.link_type = link_type
self.autoclosing = True | def function[build, parameter[self, link_type, path]]:
call[call[name[super], parameter[name[HeadLink], name[self]]].build, parameter[]]
constant[
:param link_type: Link type
:param target: Link target
]
name[self].target assign[=] name[path]
name[self].link_type assign[=] name[link_type]
name[self].autoclosing assign[=] constant[True] | keyword[def] identifier[build] ( identifier[self] , identifier[link_type] , identifier[path] ):
identifier[super] ( identifier[HeadLink] , identifier[self] ). identifier[build] ()
literal[string]
identifier[self] . identifier[target] = identifier[path]
identifier[self] . identifier[link_type] = identifier[link_type]
identifier[self] . identifier[autoclosing] = keyword[True] | def build(self, link_type, path):
super(HeadLink, self).build()
'\n :param link_type: Link type\n :param target: Link target\n '
self.target = path
self.link_type = link_type
self.autoclosing = True |
def initWithComplexQuery(query):
"""
create a query using a complex article query
"""
q = QueryArticles()
# provided an instance of ComplexArticleQuery
if isinstance(query, ComplexArticleQuery):
q._setVal("query", json.dumps(query.getQuery()))
# provided query as a string containing the json object
elif isinstance(query, six.string_types):
foo = json.loads(query)
q._setVal("query", query)
# provided query as a python dict
elif isinstance(query, dict):
q._setVal("query", json.dumps(query))
else:
assert False, "The instance of query parameter was not a ComplexArticleQuery, a string or a python dict"
return q | def function[initWithComplexQuery, parameter[query]]:
constant[
create a query using a complex article query
]
variable[q] assign[=] call[name[QueryArticles], parameter[]]
if call[name[isinstance], parameter[name[query], name[ComplexArticleQuery]]] begin[:]
call[name[q]._setVal, parameter[constant[query], call[name[json].dumps, parameter[call[name[query].getQuery, parameter[]]]]]]
return[name[q]] | keyword[def] identifier[initWithComplexQuery] ( identifier[query] ):
literal[string]
identifier[q] = identifier[QueryArticles] ()
keyword[if] identifier[isinstance] ( identifier[query] , identifier[ComplexArticleQuery] ):
identifier[q] . identifier[_setVal] ( literal[string] , identifier[json] . identifier[dumps] ( identifier[query] . identifier[getQuery] ()))
keyword[elif] identifier[isinstance] ( identifier[query] , identifier[six] . identifier[string_types] ):
identifier[foo] = identifier[json] . identifier[loads] ( identifier[query] )
identifier[q] . identifier[_setVal] ( literal[string] , identifier[query] )
keyword[elif] identifier[isinstance] ( identifier[query] , identifier[dict] ):
identifier[q] . identifier[_setVal] ( literal[string] , identifier[json] . identifier[dumps] ( identifier[query] ))
keyword[else] :
keyword[assert] keyword[False] , literal[string]
keyword[return] identifier[q] | def initWithComplexQuery(query):
"""
create a query using a complex article query
"""
q = QueryArticles()
# provided an instance of ComplexArticleQuery
if isinstance(query, ComplexArticleQuery):
q._setVal('query', json.dumps(query.getQuery())) # depends on [control=['if'], data=[]]
# provided query as a string containing the json object
elif isinstance(query, six.string_types):
foo = json.loads(query)
q._setVal('query', query) # depends on [control=['if'], data=[]]
# provided query as a python dict
elif isinstance(query, dict):
q._setVal('query', json.dumps(query)) # depends on [control=['if'], data=[]]
else:
assert False, 'The instance of query parameter was not a ComplexArticleQuery, a string or a python dict'
return q |
def execute(self):
"""
Executes the command.
"""
from vsgen.util.logger import VSGLogger
VSGLogger.info(self._logname, self._message)
start = time.clock()
VSGWriter.write(self._writables, self._parallel)
end = time.clock()
VSGLogger.info(self._logname, "Wrote %s files in %s seconds:", len(self._writables), end - start) | def function[execute, parameter[self]]:
constant[
Executes the command.
]
from relative_module[vsgen.util.logger] import module[VSGLogger]
call[name[VSGLogger].info, parameter[name[self]._logname, name[self]._message]]
variable[start] assign[=] call[name[time].clock, parameter[]]
call[name[VSGWriter].write, parameter[name[self]._writables, name[self]._parallel]]
variable[end] assign[=] call[name[time].clock, parameter[]]
call[name[VSGLogger].info, parameter[name[self]._logname, constant[Wrote %s files in %s seconds:], call[name[len], parameter[name[self]._writables]], binary_operation[name[end] - name[start]]]] | keyword[def] identifier[execute] ( identifier[self] ):
literal[string]
keyword[from] identifier[vsgen] . identifier[util] . identifier[logger] keyword[import] identifier[VSGLogger]
identifier[VSGLogger] . identifier[info] ( identifier[self] . identifier[_logname] , identifier[self] . identifier[_message] )
identifier[start] = identifier[time] . identifier[clock] ()
identifier[VSGWriter] . identifier[write] ( identifier[self] . identifier[_writables] , identifier[self] . identifier[_parallel] )
identifier[end] = identifier[time] . identifier[clock] ()
identifier[VSGLogger] . identifier[info] ( identifier[self] . identifier[_logname] , literal[string] , identifier[len] ( identifier[self] . identifier[_writables] ), identifier[end] - identifier[start] ) | def execute(self):
"""
Executes the command.
"""
from vsgen.util.logger import VSGLogger
VSGLogger.info(self._logname, self._message)
start = time.clock()
VSGWriter.write(self._writables, self._parallel)
end = time.clock()
VSGLogger.info(self._logname, 'Wrote %s files in %s seconds:', len(self._writables), end - start) |
def _prune_beam(states: List[State],
beam_size: int,
sort_states: bool = False) -> List[State]:
"""
This method can be used to prune the set of unfinished states on a beam or finished states
at the end of search. In the former case, the states need not be sorted because the all come
from the same decoding step, which does the sorting. However, if the states are finished and
this method is called at the end of the search, they need to be sorted because they come
from different decoding steps.
"""
states_by_batch_index: Dict[int, List[State]] = defaultdict(list)
for state in states:
assert len(state.batch_indices) == 1
batch_index = state.batch_indices[0]
states_by_batch_index[batch_index].append(state)
pruned_states = []
for _, instance_states in states_by_batch_index.items():
if sort_states:
scores = torch.cat([state.score[0].view(-1) for state in instance_states])
_, sorted_indices = scores.sort(-1, descending=True)
sorted_states = [instance_states[i] for i in sorted_indices.detach().cpu().numpy()]
instance_states = sorted_states
for state in instance_states[:beam_size]:
pruned_states.append(state)
return pruned_states | def function[_prune_beam, parameter[states, beam_size, sort_states]]:
constant[
This method can be used to prune the set of unfinished states on a beam or finished states
at the end of search. In the former case, the states need not be sorted because the all come
from the same decoding step, which does the sorting. However, if the states are finished and
this method is called at the end of the search, they need to be sorted because they come
from different decoding steps.
]
<ast.AnnAssign object at 0x7da2044c3100>
for taget[name[state]] in starred[name[states]] begin[:]
assert[compare[call[name[len], parameter[name[state].batch_indices]] equal[==] constant[1]]]
variable[batch_index] assign[=] call[name[state].batch_indices][constant[0]]
call[call[name[states_by_batch_index]][name[batch_index]].append, parameter[name[state]]]
variable[pruned_states] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b1e17550>, <ast.Name object at 0x7da1b1e15b70>]]] in starred[call[name[states_by_batch_index].items, parameter[]]] begin[:]
if name[sort_states] begin[:]
variable[scores] assign[=] call[name[torch].cat, parameter[<ast.ListComp object at 0x7da1b1e14a00>]]
<ast.Tuple object at 0x7da204960070> assign[=] call[name[scores].sort, parameter[<ast.UnaryOp object at 0x7da204960e50>]]
variable[sorted_states] assign[=] <ast.ListComp object at 0x7da204960610>
variable[instance_states] assign[=] name[sorted_states]
for taget[name[state]] in starred[call[name[instance_states]][<ast.Slice object at 0x7da1b1f94430>]] begin[:]
call[name[pruned_states].append, parameter[name[state]]]
return[name[pruned_states]] | keyword[def] identifier[_prune_beam] ( identifier[states] : identifier[List] [ identifier[State] ],
identifier[beam_size] : identifier[int] ,
identifier[sort_states] : identifier[bool] = keyword[False] )-> identifier[List] [ identifier[State] ]:
literal[string]
identifier[states_by_batch_index] : identifier[Dict] [ identifier[int] , identifier[List] [ identifier[State] ]]= identifier[defaultdict] ( identifier[list] )
keyword[for] identifier[state] keyword[in] identifier[states] :
keyword[assert] identifier[len] ( identifier[state] . identifier[batch_indices] )== literal[int]
identifier[batch_index] = identifier[state] . identifier[batch_indices] [ literal[int] ]
identifier[states_by_batch_index] [ identifier[batch_index] ]. identifier[append] ( identifier[state] )
identifier[pruned_states] =[]
keyword[for] identifier[_] , identifier[instance_states] keyword[in] identifier[states_by_batch_index] . identifier[items] ():
keyword[if] identifier[sort_states] :
identifier[scores] = identifier[torch] . identifier[cat] ([ identifier[state] . identifier[score] [ literal[int] ]. identifier[view] (- literal[int] ) keyword[for] identifier[state] keyword[in] identifier[instance_states] ])
identifier[_] , identifier[sorted_indices] = identifier[scores] . identifier[sort] (- literal[int] , identifier[descending] = keyword[True] )
identifier[sorted_states] =[ identifier[instance_states] [ identifier[i] ] keyword[for] identifier[i] keyword[in] identifier[sorted_indices] . identifier[detach] (). identifier[cpu] (). identifier[numpy] ()]
identifier[instance_states] = identifier[sorted_states]
keyword[for] identifier[state] keyword[in] identifier[instance_states] [: identifier[beam_size] ]:
identifier[pruned_states] . identifier[append] ( identifier[state] )
keyword[return] identifier[pruned_states] | def _prune_beam(states: List[State], beam_size: int, sort_states: bool=False) -> List[State]:
"""
This method can be used to prune the set of unfinished states on a beam or finished states
at the end of search. In the former case, the states need not be sorted because the all come
from the same decoding step, which does the sorting. However, if the states are finished and
this method is called at the end of the search, they need to be sorted because they come
from different decoding steps.
"""
states_by_batch_index: Dict[int, List[State]] = defaultdict(list)
for state in states:
assert len(state.batch_indices) == 1
batch_index = state.batch_indices[0]
states_by_batch_index[batch_index].append(state) # depends on [control=['for'], data=['state']]
pruned_states = []
for (_, instance_states) in states_by_batch_index.items():
if sort_states:
scores = torch.cat([state.score[0].view(-1) for state in instance_states])
(_, sorted_indices) = scores.sort(-1, descending=True)
sorted_states = [instance_states[i] for i in sorted_indices.detach().cpu().numpy()]
instance_states = sorted_states # depends on [control=['if'], data=[]]
for state in instance_states[:beam_size]:
pruned_states.append(state) # depends on [control=['for'], data=['state']] # depends on [control=['for'], data=[]]
return pruned_states |
def import_name_or_class(name):
" Import an obect as either a fully qualified, dotted name, "
if isinstance(name, str):
# for "a.b.c.d" -> [ 'a.b.c', 'd' ]
module_name, object_name = name.rsplit('.',1)
# __import__ loads the multi-level of module, but returns
# the top level, which we have to descend into
mod = __import__(module_name)
components = name.split('.')
for comp in components[1:]: # Already got the top level, so start at 1
mod = getattr(mod, comp)
return mod
else:
return name | def function[import_name_or_class, parameter[name]]:
constant[ Import an obect as either a fully qualified, dotted name, ]
if call[name[isinstance], parameter[name[name], name[str]]] begin[:]
<ast.Tuple object at 0x7da20c76e9e0> assign[=] call[name[name].rsplit, parameter[constant[.], constant[1]]]
variable[mod] assign[=] call[name[__import__], parameter[name[module_name]]]
variable[components] assign[=] call[name[name].split, parameter[constant[.]]]
for taget[name[comp]] in starred[call[name[components]][<ast.Slice object at 0x7da18f09c880>]] begin[:]
variable[mod] assign[=] call[name[getattr], parameter[name[mod], name[comp]]]
return[name[mod]] | keyword[def] identifier[import_name_or_class] ( identifier[name] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[name] , identifier[str] ):
identifier[module_name] , identifier[object_name] = identifier[name] . identifier[rsplit] ( literal[string] , literal[int] )
identifier[mod] = identifier[__import__] ( identifier[module_name] )
identifier[components] = identifier[name] . identifier[split] ( literal[string] )
keyword[for] identifier[comp] keyword[in] identifier[components] [ literal[int] :]:
identifier[mod] = identifier[getattr] ( identifier[mod] , identifier[comp] )
keyword[return] identifier[mod]
keyword[else] :
keyword[return] identifier[name] | def import_name_or_class(name):
""" Import an obect as either a fully qualified, dotted name, """
if isinstance(name, str):
# for "a.b.c.d" -> [ 'a.b.c', 'd' ]
(module_name, object_name) = name.rsplit('.', 1)
# __import__ loads the multi-level of module, but returns
# the top level, which we have to descend into
mod = __import__(module_name)
components = name.split('.')
for comp in components[1:]: # Already got the top level, so start at 1
mod = getattr(mod, comp) # depends on [control=['for'], data=['comp']]
return mod # depends on [control=['if'], data=[]]
else:
return name |
def rewriteLoadCommands(self, changefunc):
"""
Rewrite the load commands based upon a change dictionary
"""
data = changefunc(self.parent.filename)
changed = False
if data is not None:
if self.rewriteInstallNameCommand(
data.encode(sys.getfilesystemencoding())):
changed = True
for idx, name, filename in self.walkRelocatables():
data = changefunc(filename)
if data is not None:
if self.rewriteDataForCommand(idx, data.encode(
sys.getfilesystemencoding())):
changed = True
return changed | def function[rewriteLoadCommands, parameter[self, changefunc]]:
constant[
Rewrite the load commands based upon a change dictionary
]
variable[data] assign[=] call[name[changefunc], parameter[name[self].parent.filename]]
variable[changed] assign[=] constant[False]
if compare[name[data] is_not constant[None]] begin[:]
if call[name[self].rewriteInstallNameCommand, parameter[call[name[data].encode, parameter[call[name[sys].getfilesystemencoding, parameter[]]]]]] begin[:]
variable[changed] assign[=] constant[True]
for taget[tuple[[<ast.Name object at 0x7da18f720520>, <ast.Name object at 0x7da18f720850>, <ast.Name object at 0x7da18f723b50>]]] in starred[call[name[self].walkRelocatables, parameter[]]] begin[:]
variable[data] assign[=] call[name[changefunc], parameter[name[filename]]]
if compare[name[data] is_not constant[None]] begin[:]
if call[name[self].rewriteDataForCommand, parameter[name[idx], call[name[data].encode, parameter[call[name[sys].getfilesystemencoding, parameter[]]]]]] begin[:]
variable[changed] assign[=] constant[True]
return[name[changed]] | keyword[def] identifier[rewriteLoadCommands] ( identifier[self] , identifier[changefunc] ):
literal[string]
identifier[data] = identifier[changefunc] ( identifier[self] . identifier[parent] . identifier[filename] )
identifier[changed] = keyword[False]
keyword[if] identifier[data] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[self] . identifier[rewriteInstallNameCommand] (
identifier[data] . identifier[encode] ( identifier[sys] . identifier[getfilesystemencoding] ())):
identifier[changed] = keyword[True]
keyword[for] identifier[idx] , identifier[name] , identifier[filename] keyword[in] identifier[self] . identifier[walkRelocatables] ():
identifier[data] = identifier[changefunc] ( identifier[filename] )
keyword[if] identifier[data] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[self] . identifier[rewriteDataForCommand] ( identifier[idx] , identifier[data] . identifier[encode] (
identifier[sys] . identifier[getfilesystemencoding] ())):
identifier[changed] = keyword[True]
keyword[return] identifier[changed] | def rewriteLoadCommands(self, changefunc):
"""
Rewrite the load commands based upon a change dictionary
"""
data = changefunc(self.parent.filename)
changed = False
if data is not None:
if self.rewriteInstallNameCommand(data.encode(sys.getfilesystemencoding())):
changed = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['data']]
for (idx, name, filename) in self.walkRelocatables():
data = changefunc(filename)
if data is not None:
if self.rewriteDataForCommand(idx, data.encode(sys.getfilesystemencoding())):
changed = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['data']] # depends on [control=['for'], data=[]]
return changed |
def rebinmask(mask, binx, biny, enlarge=False):
"""Re-bin (shrink or enlarge) a mask matrix.
Inputs
------
mask: np.ndarray
mask matrix.
binx: integer
binning along the 0th axis
biny: integer
binning along the 1st axis
enlarge: bool, optional
direction of binning. If True, the matrix will be enlarged, otherwise
shrinked (this is the default)
Output
------
the binned mask matrix, of shape ``M/binx`` times ``N/biny`` or ``M*binx``
times ``N*biny``, depending on the value of ``enlarge`` (if ``mask`` is
``M`` times ``N`` pixels).
Notes
-----
one is nonmasked, zero is masked.
"""
if not enlarge and ((mask.shape[0] % binx) or (mask.shape[1] % biny)):
raise ValueError(
'The number of pixels of the mask matrix should be divisible by the binning in each direction!')
if enlarge:
return mask.repeat(binx, axis=0).repeat(biny, axis=1)
else:
return mask[::binx, ::biny] | def function[rebinmask, parameter[mask, binx, biny, enlarge]]:
constant[Re-bin (shrink or enlarge) a mask matrix.
Inputs
------
mask: np.ndarray
mask matrix.
binx: integer
binning along the 0th axis
biny: integer
binning along the 1st axis
enlarge: bool, optional
direction of binning. If True, the matrix will be enlarged, otherwise
shrinked (this is the default)
Output
------
the binned mask matrix, of shape ``M/binx`` times ``N/biny`` or ``M*binx``
times ``N*biny``, depending on the value of ``enlarge`` (if ``mask`` is
``M`` times ``N`` pixels).
Notes
-----
one is nonmasked, zero is masked.
]
if <ast.BoolOp object at 0x7da1b10d76a0> begin[:]
<ast.Raise object at 0x7da1b10d5150>
if name[enlarge] begin[:]
return[call[call[name[mask].repeat, parameter[name[binx]]].repeat, parameter[name[biny]]]] | keyword[def] identifier[rebinmask] ( identifier[mask] , identifier[binx] , identifier[biny] , identifier[enlarge] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[enlarge] keyword[and] (( identifier[mask] . identifier[shape] [ literal[int] ]% identifier[binx] ) keyword[or] ( identifier[mask] . identifier[shape] [ literal[int] ]% identifier[biny] )):
keyword[raise] identifier[ValueError] (
literal[string] )
keyword[if] identifier[enlarge] :
keyword[return] identifier[mask] . identifier[repeat] ( identifier[binx] , identifier[axis] = literal[int] ). identifier[repeat] ( identifier[biny] , identifier[axis] = literal[int] )
keyword[else] :
keyword[return] identifier[mask] [:: identifier[binx] ,:: identifier[biny] ] | def rebinmask(mask, binx, biny, enlarge=False):
"""Re-bin (shrink or enlarge) a mask matrix.
Inputs
------
mask: np.ndarray
mask matrix.
binx: integer
binning along the 0th axis
biny: integer
binning along the 1st axis
enlarge: bool, optional
direction of binning. If True, the matrix will be enlarged, otherwise
shrinked (this is the default)
Output
------
the binned mask matrix, of shape ``M/binx`` times ``N/biny`` or ``M*binx``
times ``N*biny``, depending on the value of ``enlarge`` (if ``mask`` is
``M`` times ``N`` pixels).
Notes
-----
one is nonmasked, zero is masked.
"""
if not enlarge and (mask.shape[0] % binx or mask.shape[1] % biny):
raise ValueError('The number of pixels of the mask matrix should be divisible by the binning in each direction!') # depends on [control=['if'], data=[]]
if enlarge:
return mask.repeat(binx, axis=0).repeat(biny, axis=1) # depends on [control=['if'], data=[]]
else:
return mask[::binx, ::biny] |
def ensure_started(self):
"""Idempotent channel start"""
if self.active:
return self
self._observer = self._observer_class(**self._observer_params)
self.start()
self._active = True
return self | def function[ensure_started, parameter[self]]:
constant[Idempotent channel start]
if name[self].active begin[:]
return[name[self]]
name[self]._observer assign[=] call[name[self]._observer_class, parameter[]]
call[name[self].start, parameter[]]
name[self]._active assign[=] constant[True]
return[name[self]] | keyword[def] identifier[ensure_started] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[active] :
keyword[return] identifier[self]
identifier[self] . identifier[_observer] = identifier[self] . identifier[_observer_class] (** identifier[self] . identifier[_observer_params] )
identifier[self] . identifier[start] ()
identifier[self] . identifier[_active] = keyword[True]
keyword[return] identifier[self] | def ensure_started(self):
"""Idempotent channel start"""
if self.active:
return self # depends on [control=['if'], data=[]]
self._observer = self._observer_class(**self._observer_params)
self.start()
self._active = True
return self |
def _set_cfm_config(self, v, load=False):
"""
Setter method for cfm_config, mapped from YANG variable /cfm_config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_cfm_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_cfm_config() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=cfm_config.cfm_config, is_container='container', presence=False, yang_name="cfm-config", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'sort-priority': u'152'}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag', defining_module='brocade-dot1ag', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """cfm_config must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=cfm_config.cfm_config, is_container='container', presence=False, yang_name="cfm-config", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'sort-priority': u'152'}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag', defining_module='brocade-dot1ag', yang_type='container', is_config=True)""",
})
self.__cfm_config = t
if hasattr(self, '_set'):
self._set() | def function[_set_cfm_config, parameter[self, v, load]]:
constant[
Setter method for cfm_config, mapped from YANG variable /cfm_config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_cfm_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_cfm_config() directly.
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da18f8130a0>
name[self].__cfm_config assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]] | keyword[def] identifier[_set_cfm_config] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[cfm_config] . identifier[cfm_config] , identifier[is_container] = literal[string] , identifier[presence] = keyword[False] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : keyword[None] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__cfm_config] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] () | def _set_cfm_config(self, v, load=False):
"""
Setter method for cfm_config, mapped from YANG variable /cfm_config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_cfm_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_cfm_config() directly.
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=cfm_config.cfm_config, is_container='container', presence=False, yang_name='cfm-config', rest_name='', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'sort-priority': u'152'}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag', defining_module='brocade-dot1ag', yang_type='container', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'cfm_config must be of a type compatible with container', 'defined-type': 'container', 'generated-type': 'YANGDynClass(base=cfm_config.cfm_config, is_container=\'container\', presence=False, yang_name="cfm-config", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'cli-drop-node-name\': None, u\'sort-priority\': u\'152\'}}, namespace=\'urn:brocade.com:mgmt:brocade-dot1ag\', defining_module=\'brocade-dot1ag\', yang_type=\'container\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__cfm_config = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]] |
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
"""
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
"""
assert all(stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
for stddev_type in stddev_types)
C = self.COEFFS[imt]
imean = (self._compute_magnitude_scaling(C, rup.mag) +
self._compute_distance_scaling(C, dists.rrup, rup.mag))
# Original GMPE returns log10 acceleration in cm/s/s
# Converts to natural logarithm of g
mean = np.log((10.0 ** (imean - 2.0)) / g)
mean = self._compute_site_scaling(sites.vs30, mean)
istddevs = self._compute_stddevs(
C, dists.rrup.shape, stddev_types
)
# Convert from common logarithm to natural logarithm
stddevs = np.log(10 ** np.array(istddevs))
return mean, stddevs | def function[get_mean_and_stddevs, parameter[self, sites, rup, dists, imt, stddev_types]]:
constant[
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
]
assert[call[name[all], parameter[<ast.GeneratorExp object at 0x7da18f00df60>]]]
variable[C] assign[=] call[name[self].COEFFS][name[imt]]
variable[imean] assign[=] binary_operation[call[name[self]._compute_magnitude_scaling, parameter[name[C], name[rup].mag]] + call[name[self]._compute_distance_scaling, parameter[name[C], name[dists].rrup, name[rup].mag]]]
variable[mean] assign[=] call[name[np].log, parameter[binary_operation[binary_operation[constant[10.0] ** binary_operation[name[imean] - constant[2.0]]] / name[g]]]]
variable[mean] assign[=] call[name[self]._compute_site_scaling, parameter[name[sites].vs30, name[mean]]]
variable[istddevs] assign[=] call[name[self]._compute_stddevs, parameter[name[C], name[dists].rrup.shape, name[stddev_types]]]
variable[stddevs] assign[=] call[name[np].log, parameter[binary_operation[constant[10] ** call[name[np].array, parameter[name[istddevs]]]]]]
return[tuple[[<ast.Name object at 0x7da18f00f310>, <ast.Name object at 0x7da18f00d8d0>]]] | keyword[def] identifier[get_mean_and_stddevs] ( identifier[self] , identifier[sites] , identifier[rup] , identifier[dists] , identifier[imt] , identifier[stddev_types] ):
literal[string]
keyword[assert] identifier[all] ( identifier[stddev_type] keyword[in] identifier[self] . identifier[DEFINED_FOR_STANDARD_DEVIATION_TYPES]
keyword[for] identifier[stddev_type] keyword[in] identifier[stddev_types] )
identifier[C] = identifier[self] . identifier[COEFFS] [ identifier[imt] ]
identifier[imean] =( identifier[self] . identifier[_compute_magnitude_scaling] ( identifier[C] , identifier[rup] . identifier[mag] )+
identifier[self] . identifier[_compute_distance_scaling] ( identifier[C] , identifier[dists] . identifier[rrup] , identifier[rup] . identifier[mag] ))
identifier[mean] = identifier[np] . identifier[log] (( literal[int] **( identifier[imean] - literal[int] ))/ identifier[g] )
identifier[mean] = identifier[self] . identifier[_compute_site_scaling] ( identifier[sites] . identifier[vs30] , identifier[mean] )
identifier[istddevs] = identifier[self] . identifier[_compute_stddevs] (
identifier[C] , identifier[dists] . identifier[rrup] . identifier[shape] , identifier[stddev_types]
)
identifier[stddevs] = identifier[np] . identifier[log] ( literal[int] ** identifier[np] . identifier[array] ( identifier[istddevs] ))
keyword[return] identifier[mean] , identifier[stddevs] | def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
"""
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
"""
assert all((stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES for stddev_type in stddev_types))
C = self.COEFFS[imt]
imean = self._compute_magnitude_scaling(C, rup.mag) + self._compute_distance_scaling(C, dists.rrup, rup.mag)
# Original GMPE returns log10 acceleration in cm/s/s
# Converts to natural logarithm of g
mean = np.log(10.0 ** (imean - 2.0) / g)
mean = self._compute_site_scaling(sites.vs30, mean)
istddevs = self._compute_stddevs(C, dists.rrup.shape, stddev_types)
# Convert from common logarithm to natural logarithm
stddevs = np.log(10 ** np.array(istddevs))
return (mean, stddevs) |
def set_training(model, mode):
"""
A context manager to temporarily set the training mode of 'model'
to 'mode', resetting it when we exit the with-block. A no-op if
mode is None.
"""
if mode is None:
yield
return
old_mode = model.training
if old_mode != mode:
model.train(mode)
try:
yield
finally:
if old_mode != mode:
model.train(old_mode) | def function[set_training, parameter[model, mode]]:
constant[
A context manager to temporarily set the training mode of 'model'
to 'mode', resetting it when we exit the with-block. A no-op if
mode is None.
]
if compare[name[mode] is constant[None]] begin[:]
<ast.Yield object at 0x7da1b01e24a0>
return[None]
variable[old_mode] assign[=] name[model].training
if compare[name[old_mode] not_equal[!=] name[mode]] begin[:]
call[name[model].train, parameter[name[mode]]]
<ast.Try object at 0x7da1b0192a10> | keyword[def] identifier[set_training] ( identifier[model] , identifier[mode] ):
literal[string]
keyword[if] identifier[mode] keyword[is] keyword[None] :
keyword[yield]
keyword[return]
identifier[old_mode] = identifier[model] . identifier[training]
keyword[if] identifier[old_mode] != identifier[mode] :
identifier[model] . identifier[train] ( identifier[mode] )
keyword[try] :
keyword[yield]
keyword[finally] :
keyword[if] identifier[old_mode] != identifier[mode] :
identifier[model] . identifier[train] ( identifier[old_mode] ) | def set_training(model, mode):
"""
A context manager to temporarily set the training mode of 'model'
to 'mode', resetting it when we exit the with-block. A no-op if
mode is None.
"""
if mode is None:
yield
return # depends on [control=['if'], data=[]]
old_mode = model.training
if old_mode != mode:
model.train(mode) # depends on [control=['if'], data=['mode']]
try:
yield # depends on [control=['try'], data=[]]
finally:
if old_mode != mode:
model.train(old_mode) # depends on [control=['if'], data=['old_mode']] |
def set_docstring(self, loc, tokens):
"""Set the docstring."""
internal_assert(len(tokens) == 2, "invalid docstring tokens", tokens)
self.docstring = self.reformat(tokens[0]) + "\n\n"
return tokens[1] | def function[set_docstring, parameter[self, loc, tokens]]:
constant[Set the docstring.]
call[name[internal_assert], parameter[compare[call[name[len], parameter[name[tokens]]] equal[==] constant[2]], constant[invalid docstring tokens], name[tokens]]]
name[self].docstring assign[=] binary_operation[call[name[self].reformat, parameter[call[name[tokens]][constant[0]]]] + constant[
]]
return[call[name[tokens]][constant[1]]] | keyword[def] identifier[set_docstring] ( identifier[self] , identifier[loc] , identifier[tokens] ):
literal[string]
identifier[internal_assert] ( identifier[len] ( identifier[tokens] )== literal[int] , literal[string] , identifier[tokens] )
identifier[self] . identifier[docstring] = identifier[self] . identifier[reformat] ( identifier[tokens] [ literal[int] ])+ literal[string]
keyword[return] identifier[tokens] [ literal[int] ] | def set_docstring(self, loc, tokens):
"""Set the docstring."""
internal_assert(len(tokens) == 2, 'invalid docstring tokens', tokens)
self.docstring = self.reformat(tokens[0]) + '\n\n'
return tokens[1] |
def _get_algorithm_info(self, algorithm_info):
'''Get algorithm info'''
if algorithm_info['algorithm'] not in self.ALGORITHMS:
raise Exception('Algorithm not supported: %s'
% algorithm_info['algorithm'])
algorithm = self.ALGORITHMS[algorithm_info['algorithm']]
algorithm_info.update(algorithm)
return algorithm_info | def function[_get_algorithm_info, parameter[self, algorithm_info]]:
constant[Get algorithm info]
if compare[call[name[algorithm_info]][constant[algorithm]] <ast.NotIn object at 0x7da2590d7190> name[self].ALGORITHMS] begin[:]
<ast.Raise object at 0x7da2054a65f0>
variable[algorithm] assign[=] call[name[self].ALGORITHMS][call[name[algorithm_info]][constant[algorithm]]]
call[name[algorithm_info].update, parameter[name[algorithm]]]
return[name[algorithm_info]] | keyword[def] identifier[_get_algorithm_info] ( identifier[self] , identifier[algorithm_info] ):
literal[string]
keyword[if] identifier[algorithm_info] [ literal[string] ] keyword[not] keyword[in] identifier[self] . identifier[ALGORITHMS] :
keyword[raise] identifier[Exception] ( literal[string]
% identifier[algorithm_info] [ literal[string] ])
identifier[algorithm] = identifier[self] . identifier[ALGORITHMS] [ identifier[algorithm_info] [ literal[string] ]]
identifier[algorithm_info] . identifier[update] ( identifier[algorithm] )
keyword[return] identifier[algorithm_info] | def _get_algorithm_info(self, algorithm_info):
"""Get algorithm info"""
if algorithm_info['algorithm'] not in self.ALGORITHMS:
raise Exception('Algorithm not supported: %s' % algorithm_info['algorithm']) # depends on [control=['if'], data=[]]
algorithm = self.ALGORITHMS[algorithm_info['algorithm']]
algorithm_info.update(algorithm)
return algorithm_info |
def has_access_api(f):
"""
Use this decorator to enable granular security permissions to your API methods.
Permissions will be associated to a role, and roles are associated to users.
By default the permission's name is the methods name.
this will return a message and HTTP 401 is case of unauthorized access.
"""
if hasattr(f, '_permission_name'):
permission_str = f._permission_name
else:
permission_str = f.__name__
def wraps(self, *args, **kwargs):
permission_str = PERMISSION_PREFIX + f._permission_name
if self.appbuilder.sm.has_access(
permission_str,
self.__class__.__name__
):
return f(self, *args, **kwargs)
else:
log.warning(
LOGMSG_ERR_SEC_ACCESS_DENIED.format(
permission_str,
self.__class__.__name__
)
)
response = make_response(
jsonify(
{
'message': str(FLAMSG_ERR_SEC_ACCESS_DENIED),
'severity': 'danger'
}
),
401
)
response.headers['Content-Type'] = "application/json"
return response
f._permission_name = permission_str
return functools.update_wrapper(wraps, f) | def function[has_access_api, parameter[f]]:
constant[
Use this decorator to enable granular security permissions to your API methods.
Permissions will be associated to a role, and roles are associated to users.
By default the permission's name is the methods name.
this will return a message and HTTP 401 is case of unauthorized access.
]
if call[name[hasattr], parameter[name[f], constant[_permission_name]]] begin[:]
variable[permission_str] assign[=] name[f]._permission_name
def function[wraps, parameter[self]]:
variable[permission_str] assign[=] binary_operation[name[PERMISSION_PREFIX] + name[f]._permission_name]
if call[name[self].appbuilder.sm.has_access, parameter[name[permission_str], name[self].__class__.__name__]] begin[:]
return[call[name[f], parameter[name[self], <ast.Starred object at 0x7da207f021d0>]]]
name[f]._permission_name assign[=] name[permission_str]
return[call[name[functools].update_wrapper, parameter[name[wraps], name[f]]]] | keyword[def] identifier[has_access_api] ( identifier[f] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[f] , literal[string] ):
identifier[permission_str] = identifier[f] . identifier[_permission_name]
keyword[else] :
identifier[permission_str] = identifier[f] . identifier[__name__]
keyword[def] identifier[wraps] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
identifier[permission_str] = identifier[PERMISSION_PREFIX] + identifier[f] . identifier[_permission_name]
keyword[if] identifier[self] . identifier[appbuilder] . identifier[sm] . identifier[has_access] (
identifier[permission_str] ,
identifier[self] . identifier[__class__] . identifier[__name__]
):
keyword[return] identifier[f] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] )
keyword[else] :
identifier[log] . identifier[warning] (
identifier[LOGMSG_ERR_SEC_ACCESS_DENIED] . identifier[format] (
identifier[permission_str] ,
identifier[self] . identifier[__class__] . identifier[__name__]
)
)
identifier[response] = identifier[make_response] (
identifier[jsonify] (
{
literal[string] : identifier[str] ( identifier[FLAMSG_ERR_SEC_ACCESS_DENIED] ),
literal[string] : literal[string]
}
),
literal[int]
)
identifier[response] . identifier[headers] [ literal[string] ]= literal[string]
keyword[return] identifier[response]
identifier[f] . identifier[_permission_name] = identifier[permission_str]
keyword[return] identifier[functools] . identifier[update_wrapper] ( identifier[wraps] , identifier[f] ) | def has_access_api(f):
"""
Use this decorator to enable granular security permissions to your API methods.
Permissions will be associated to a role, and roles are associated to users.
By default the permission's name is the methods name.
this will return a message and HTTP 401 is case of unauthorized access.
"""
if hasattr(f, '_permission_name'):
permission_str = f._permission_name # depends on [control=['if'], data=[]]
else:
permission_str = f.__name__
def wraps(self, *args, **kwargs):
permission_str = PERMISSION_PREFIX + f._permission_name
if self.appbuilder.sm.has_access(permission_str, self.__class__.__name__):
return f(self, *args, **kwargs) # depends on [control=['if'], data=[]]
else:
log.warning(LOGMSG_ERR_SEC_ACCESS_DENIED.format(permission_str, self.__class__.__name__))
response = make_response(jsonify({'message': str(FLAMSG_ERR_SEC_ACCESS_DENIED), 'severity': 'danger'}), 401)
response.headers['Content-Type'] = 'application/json'
return response
f._permission_name = permission_str
return functools.update_wrapper(wraps, f) |
def get_url(self):
"""Gets the URL associated with this content for web-based retrieval.
return: (string) - the url for this data
raise: IllegalState - ``has_url()`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
# construct the URL from runtime's FILESYSTEM location param
# plus what we know about the location of repository / assetContents
# have to get repositoryId from the asset?
# return self._payload.get_url()
url = '/repository/repositories/{0}/assets/{1}/contents/{2}/stream'.format(self._my_map['assignedRepositoryIds'][0],
str(self.get_asset_id()),
str(self.get_id()))
if 'url_hostname' in self._config_map:
url_hostname = self._config_map['url_hostname']
return '{0}{1}'.format(url_hostname, url)
return url | def function[get_url, parameter[self]]:
constant[Gets the URL associated with this content for web-based retrieval.
return: (string) - the url for this data
raise: IllegalState - ``has_url()`` is ``false``
*compliance: mandatory -- This method must be implemented.*
]
variable[url] assign[=] call[constant[/repository/repositories/{0}/assets/{1}/contents/{2}/stream].format, parameter[call[call[name[self]._my_map][constant[assignedRepositoryIds]]][constant[0]], call[name[str], parameter[call[name[self].get_asset_id, parameter[]]]], call[name[str], parameter[call[name[self].get_id, parameter[]]]]]]
if compare[constant[url_hostname] in name[self]._config_map] begin[:]
variable[url_hostname] assign[=] call[name[self]._config_map][constant[url_hostname]]
return[call[constant[{0}{1}].format, parameter[name[url_hostname], name[url]]]]
return[name[url]] | keyword[def] identifier[get_url] ( identifier[self] ):
literal[string]
identifier[url] = literal[string] . identifier[format] ( identifier[self] . identifier[_my_map] [ literal[string] ][ literal[int] ],
identifier[str] ( identifier[self] . identifier[get_asset_id] ()),
identifier[str] ( identifier[self] . identifier[get_id] ()))
keyword[if] literal[string] keyword[in] identifier[self] . identifier[_config_map] :
identifier[url_hostname] = identifier[self] . identifier[_config_map] [ literal[string] ]
keyword[return] literal[string] . identifier[format] ( identifier[url_hostname] , identifier[url] )
keyword[return] identifier[url] | def get_url(self):
"""Gets the URL associated with this content for web-based retrieval.
return: (string) - the url for this data
raise: IllegalState - ``has_url()`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
# construct the URL from runtime's FILESYSTEM location param
# plus what we know about the location of repository / assetContents
# have to get repositoryId from the asset?
# return self._payload.get_url()
url = '/repository/repositories/{0}/assets/{1}/contents/{2}/stream'.format(self._my_map['assignedRepositoryIds'][0], str(self.get_asset_id()), str(self.get_id()))
if 'url_hostname' in self._config_map:
url_hostname = self._config_map['url_hostname']
return '{0}{1}'.format(url_hostname, url) # depends on [control=['if'], data=[]]
return url |
def add_event_handler(self, callback, event=None):
"""
Registers the given callback to be called on the specified event.
Args:
callback (`callable`):
The callable function accepting one parameter to be used.
Note that if you have used `telethon.events.register` in
the callback, ``event`` will be ignored, and instead the
events you previously registered will be used.
event (`_EventBuilder` | `type`, optional):
The event builder class or instance to be used,
for instance ``events.NewMessage``.
If left unspecified, `telethon.events.raw.Raw` (the
:tl:`Update` objects with no further processing) will
be passed instead.
"""
builders = events._get_handlers(callback)
if builders is not None:
for event in builders:
self._event_builders.append((event, callback))
return
if isinstance(event, type):
event = event()
elif not event:
event = events.Raw()
self._event_builders.append((event, callback)) | def function[add_event_handler, parameter[self, callback, event]]:
constant[
Registers the given callback to be called on the specified event.
Args:
callback (`callable`):
The callable function accepting one parameter to be used.
Note that if you have used `telethon.events.register` in
the callback, ``event`` will be ignored, and instead the
events you previously registered will be used.
event (`_EventBuilder` | `type`, optional):
The event builder class or instance to be used,
for instance ``events.NewMessage``.
If left unspecified, `telethon.events.raw.Raw` (the
:tl:`Update` objects with no further processing) will
be passed instead.
]
variable[builders] assign[=] call[name[events]._get_handlers, parameter[name[callback]]]
if compare[name[builders] is_not constant[None]] begin[:]
for taget[name[event]] in starred[name[builders]] begin[:]
call[name[self]._event_builders.append, parameter[tuple[[<ast.Name object at 0x7da1b21259f0>, <ast.Name object at 0x7da1b2125840>]]]]
return[None]
if call[name[isinstance], parameter[name[event], name[type]]] begin[:]
variable[event] assign[=] call[name[event], parameter[]]
call[name[self]._event_builders.append, parameter[tuple[[<ast.Name object at 0x7da1b2124c70>, <ast.Name object at 0x7da1b2124eb0>]]]] | keyword[def] identifier[add_event_handler] ( identifier[self] , identifier[callback] , identifier[event] = keyword[None] ):
literal[string]
identifier[builders] = identifier[events] . identifier[_get_handlers] ( identifier[callback] )
keyword[if] identifier[builders] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[event] keyword[in] identifier[builders] :
identifier[self] . identifier[_event_builders] . identifier[append] (( identifier[event] , identifier[callback] ))
keyword[return]
keyword[if] identifier[isinstance] ( identifier[event] , identifier[type] ):
identifier[event] = identifier[event] ()
keyword[elif] keyword[not] identifier[event] :
identifier[event] = identifier[events] . identifier[Raw] ()
identifier[self] . identifier[_event_builders] . identifier[append] (( identifier[event] , identifier[callback] )) | def add_event_handler(self, callback, event=None):
"""
Registers the given callback to be called on the specified event.
Args:
callback (`callable`):
The callable function accepting one parameter to be used.
Note that if you have used `telethon.events.register` in
the callback, ``event`` will be ignored, and instead the
events you previously registered will be used.
event (`_EventBuilder` | `type`, optional):
The event builder class or instance to be used,
for instance ``events.NewMessage``.
If left unspecified, `telethon.events.raw.Raw` (the
:tl:`Update` objects with no further processing) will
be passed instead.
"""
builders = events._get_handlers(callback)
if builders is not None:
for event in builders:
self._event_builders.append((event, callback)) # depends on [control=['for'], data=['event']]
return # depends on [control=['if'], data=['builders']]
if isinstance(event, type):
event = event() # depends on [control=['if'], data=[]]
elif not event:
event = events.Raw() # depends on [control=['if'], data=[]]
self._event_builders.append((event, callback)) |
def recent(self, message_id=None, limit=None):
""" Recent messages.
Kwargs:
message_id (int): If specified, return messages since the specified message ID
limit (int): If specified, limit the number of messages
Returns:
array. Messages
"""
parameters = {}
if message_id:
parameters["since_message_id"] = message_id
if limit:
parameters["limit"] = limit
messages = self._connection.get("room/%s/recent" % self.id, key="messages", parameters=parameters)
if messages:
messages = [Message(self._campfire, message) for message in messages]
return messages | def function[recent, parameter[self, message_id, limit]]:
constant[ Recent messages.
Kwargs:
message_id (int): If specified, return messages since the specified message ID
limit (int): If specified, limit the number of messages
Returns:
array. Messages
]
variable[parameters] assign[=] dictionary[[], []]
if name[message_id] begin[:]
call[name[parameters]][constant[since_message_id]] assign[=] name[message_id]
if name[limit] begin[:]
call[name[parameters]][constant[limit]] assign[=] name[limit]
variable[messages] assign[=] call[name[self]._connection.get, parameter[binary_operation[constant[room/%s/recent] <ast.Mod object at 0x7da2590d6920> name[self].id]]]
if name[messages] begin[:]
variable[messages] assign[=] <ast.ListComp object at 0x7da1b004dab0>
return[name[messages]] | keyword[def] identifier[recent] ( identifier[self] , identifier[message_id] = keyword[None] , identifier[limit] = keyword[None] ):
literal[string]
identifier[parameters] ={}
keyword[if] identifier[message_id] :
identifier[parameters] [ literal[string] ]= identifier[message_id]
keyword[if] identifier[limit] :
identifier[parameters] [ literal[string] ]= identifier[limit]
identifier[messages] = identifier[self] . identifier[_connection] . identifier[get] ( literal[string] % identifier[self] . identifier[id] , identifier[key] = literal[string] , identifier[parameters] = identifier[parameters] )
keyword[if] identifier[messages] :
identifier[messages] =[ identifier[Message] ( identifier[self] . identifier[_campfire] , identifier[message] ) keyword[for] identifier[message] keyword[in] identifier[messages] ]
keyword[return] identifier[messages] | def recent(self, message_id=None, limit=None):
""" Recent messages.
Kwargs:
message_id (int): If specified, return messages since the specified message ID
limit (int): If specified, limit the number of messages
Returns:
array. Messages
"""
parameters = {}
if message_id:
parameters['since_message_id'] = message_id # depends on [control=['if'], data=[]]
if limit:
parameters['limit'] = limit # depends on [control=['if'], data=[]]
messages = self._connection.get('room/%s/recent' % self.id, key='messages', parameters=parameters)
if messages:
messages = [Message(self._campfire, message) for message in messages] # depends on [control=['if'], data=[]]
return messages |
def add_subscription(self, channel, callback_function):
"""
Add a channel to subscribe to and a callback function to
run when the channel receives an update.
If channel already exists, create a new "subscription"
and append another callback function.
Args:
channel (str): The channel to add a subscription too.
callback_function (func): The function to run on an
update to the passed in channel.
"""
if channel not in CHANNELS:
CHANNELS.append(channel)
SUBSCRIPTIONS[channel] = [callback_function]
else:
SUBSCRIPTIONS[channel].append(callback_function)
# If a channel gets added after subscription has already been called
# call subscribe on the individual channel, here.
if self._subscribed:
_LOGGER.info("New channel added after main subscribe call.")
self._pubnub.subscribe().channels(channel).execute() | def function[add_subscription, parameter[self, channel, callback_function]]:
constant[
Add a channel to subscribe to and a callback function to
run when the channel receives an update.
If channel already exists, create a new "subscription"
and append another callback function.
Args:
channel (str): The channel to add a subscription too.
callback_function (func): The function to run on an
update to the passed in channel.
]
if compare[name[channel] <ast.NotIn object at 0x7da2590d7190> name[CHANNELS]] begin[:]
call[name[CHANNELS].append, parameter[name[channel]]]
call[name[SUBSCRIPTIONS]][name[channel]] assign[=] list[[<ast.Name object at 0x7da20c76e260>]]
if name[self]._subscribed begin[:]
call[name[_LOGGER].info, parameter[constant[New channel added after main subscribe call.]]]
call[call[call[name[self]._pubnub.subscribe, parameter[]].channels, parameter[name[channel]]].execute, parameter[]] | keyword[def] identifier[add_subscription] ( identifier[self] , identifier[channel] , identifier[callback_function] ):
literal[string]
keyword[if] identifier[channel] keyword[not] keyword[in] identifier[CHANNELS] :
identifier[CHANNELS] . identifier[append] ( identifier[channel] )
identifier[SUBSCRIPTIONS] [ identifier[channel] ]=[ identifier[callback_function] ]
keyword[else] :
identifier[SUBSCRIPTIONS] [ identifier[channel] ]. identifier[append] ( identifier[callback_function] )
keyword[if] identifier[self] . identifier[_subscribed] :
identifier[_LOGGER] . identifier[info] ( literal[string] )
identifier[self] . identifier[_pubnub] . identifier[subscribe] (). identifier[channels] ( identifier[channel] ). identifier[execute] () | def add_subscription(self, channel, callback_function):
"""
Add a channel to subscribe to and a callback function to
run when the channel receives an update.
If channel already exists, create a new "subscription"
and append another callback function.
Args:
channel (str): The channel to add a subscription too.
callback_function (func): The function to run on an
update to the passed in channel.
"""
if channel not in CHANNELS:
CHANNELS.append(channel)
SUBSCRIPTIONS[channel] = [callback_function] # depends on [control=['if'], data=['channel', 'CHANNELS']]
else:
SUBSCRIPTIONS[channel].append(callback_function)
# If a channel gets added after subscription has already been called
# call subscribe on the individual channel, here.
if self._subscribed:
_LOGGER.info('New channel added after main subscribe call.')
self._pubnub.subscribe().channels(channel).execute() # depends on [control=['if'], data=[]] |
def on_channel_open(self, channel):
"""Called by pika when the channel has been opened.
The channel object is passed in so we can make use of it. Since the
channel is now open, we'll start consuming.
:param pika.channel.Channel channel: The channel object
"""
logger.debug('Channel opened')
self._channel = channel
self.add_on_channel_close_callback()
self.setup_qos() | def function[on_channel_open, parameter[self, channel]]:
constant[Called by pika when the channel has been opened.
The channel object is passed in so we can make use of it. Since the
channel is now open, we'll start consuming.
:param pika.channel.Channel channel: The channel object
]
call[name[logger].debug, parameter[constant[Channel opened]]]
name[self]._channel assign[=] name[channel]
call[name[self].add_on_channel_close_callback, parameter[]]
call[name[self].setup_qos, parameter[]] | keyword[def] identifier[on_channel_open] ( identifier[self] , identifier[channel] ):
literal[string]
identifier[logger] . identifier[debug] ( literal[string] )
identifier[self] . identifier[_channel] = identifier[channel]
identifier[self] . identifier[add_on_channel_close_callback] ()
identifier[self] . identifier[setup_qos] () | def on_channel_open(self, channel):
"""Called by pika when the channel has been opened.
The channel object is passed in so we can make use of it. Since the
channel is now open, we'll start consuming.
:param pika.channel.Channel channel: The channel object
"""
logger.debug('Channel opened')
self._channel = channel
self.add_on_channel_close_callback()
self.setup_qos() |
def validate_usage(validation_context, cert, key_usage, extended_key_usage, extended_optional):
"""
Validates the end-entity certificate from a
certvalidator.path.ValidationPath object to ensure that the certificate
is valid for the key usage and extended key usage purposes specified.
THE CERTIFICATE PATH MUST BE VALIDATED SEPARATELY VIA validate_path()!
:param validation_context:
A certvalidator.context.ValidationContext object to use for
configuring validation behavior
:param cert:
An asn1crypto.x509.Certificate object returned from validate_path()
:param key_usage:
A set of unicode strings of the required key usage purposes
:param extended_key_usage:
A set of unicode strings of the required extended key usage purposes
:param extended_optional:
A bool - if the extended_key_usage extension may be omitted and still
considered valid
:raises:
certvalidator.errors.InvalidCertificateError - when the certificate is not valid for the usages specified
"""
if not isinstance(validation_context, ValidationContext):
raise TypeError(pretty_message(
'''
validation_context must be an instance of
certvalidator.context.ValidationContext, not %s
''',
type_name(validation_context)
))
if validation_context.is_whitelisted(cert):
return
if key_usage is None:
key_usage = set()
if extended_key_usage is None:
extended_key_usage = set()
if not isinstance(key_usage, set):
raise TypeError(pretty_message(
'''
key_usage must be a set of unicode strings, not %s
''',
type_name(key_usage)
))
if not isinstance(extended_key_usage, set):
raise TypeError(pretty_message(
'''
extended_key_usage must be a set of unicode strings, not %s
''',
type_name(extended_key_usage)
))
if not isinstance(extended_optional, bool):
raise TypeError(pretty_message(
'''
extended_optional must be a boolean, not %s
''',
type_name(extended_optional)
))
missing_key_usage = key_usage
if cert.key_usage_value:
missing_key_usage = key_usage - cert.key_usage_value.native
missing_extended_key_usage = set()
if extended_optional is False and not cert.extended_key_usage_value:
missing_extended_key_usage = extended_key_usage
elif cert.extended_key_usage_value is not None:
missing_extended_key_usage = extended_key_usage - set(cert.extended_key_usage_value.native)
if missing_key_usage or missing_extended_key_usage:
plural = 's' if len(missing_key_usage | missing_extended_key_usage) > 1 else ''
friendly_purposes = []
for purpose in sorted(missing_key_usage | missing_extended_key_usage):
friendly_purposes.append(purpose.replace('_', ' '))
raise InvalidCertificateError(pretty_message(
'''
The X.509 certificate provided is not valid for the purpose%s of %s
''',
plural,
', '.join(friendly_purposes)
)) | def function[validate_usage, parameter[validation_context, cert, key_usage, extended_key_usage, extended_optional]]:
constant[
Validates the end-entity certificate from a
certvalidator.path.ValidationPath object to ensure that the certificate
is valid for the key usage and extended key usage purposes specified.
THE CERTIFICATE PATH MUST BE VALIDATED SEPARATELY VIA validate_path()!
:param validation_context:
A certvalidator.context.ValidationContext object to use for
configuring validation behavior
:param cert:
An asn1crypto.x509.Certificate object returned from validate_path()
:param key_usage:
A set of unicode strings of the required key usage purposes
:param extended_key_usage:
A set of unicode strings of the required extended key usage purposes
:param extended_optional:
A bool - if the extended_key_usage extension may be omitted and still
considered valid
:raises:
certvalidator.errors.InvalidCertificateError - when the certificate is not valid for the usages specified
]
if <ast.UnaryOp object at 0x7da1b0db84f0> begin[:]
<ast.Raise object at 0x7da1b0db8e20>
if call[name[validation_context].is_whitelisted, parameter[name[cert]]] begin[:]
return[None]
if compare[name[key_usage] is constant[None]] begin[:]
variable[key_usage] assign[=] call[name[set], parameter[]]
if compare[name[extended_key_usage] is constant[None]] begin[:]
variable[extended_key_usage] assign[=] call[name[set], parameter[]]
if <ast.UnaryOp object at 0x7da1b0dbb0d0> begin[:]
<ast.Raise object at 0x7da1b0dbb340>
if <ast.UnaryOp object at 0x7da1b0dbb2e0> begin[:]
<ast.Raise object at 0x7da1b0dbb490>
if <ast.UnaryOp object at 0x7da1b0dbbe20> begin[:]
<ast.Raise object at 0x7da1b0dbbd30>
variable[missing_key_usage] assign[=] name[key_usage]
if name[cert].key_usage_value begin[:]
variable[missing_key_usage] assign[=] binary_operation[name[key_usage] - name[cert].key_usage_value.native]
variable[missing_extended_key_usage] assign[=] call[name[set], parameter[]]
if <ast.BoolOp object at 0x7da1b0dbb820> begin[:]
variable[missing_extended_key_usage] assign[=] name[extended_key_usage]
if <ast.BoolOp object at 0x7da1b0dba530> begin[:]
variable[plural] assign[=] <ast.IfExp object at 0x7da1b0dba620>
variable[friendly_purposes] assign[=] list[[]]
for taget[name[purpose]] in starred[call[name[sorted], parameter[binary_operation[name[missing_key_usage] <ast.BitOr object at 0x7da2590d6aa0> name[missing_extended_key_usage]]]]] begin[:]
call[name[friendly_purposes].append, parameter[call[name[purpose].replace, parameter[constant[_], constant[ ]]]]]
<ast.Raise object at 0x7da1b0dc2d70> | keyword[def] identifier[validate_usage] ( identifier[validation_context] , identifier[cert] , identifier[key_usage] , identifier[extended_key_usage] , identifier[extended_optional] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[validation_context] , identifier[ValidationContext] ):
keyword[raise] identifier[TypeError] ( identifier[pretty_message] (
literal[string] ,
identifier[type_name] ( identifier[validation_context] )
))
keyword[if] identifier[validation_context] . identifier[is_whitelisted] ( identifier[cert] ):
keyword[return]
keyword[if] identifier[key_usage] keyword[is] keyword[None] :
identifier[key_usage] = identifier[set] ()
keyword[if] identifier[extended_key_usage] keyword[is] keyword[None] :
identifier[extended_key_usage] = identifier[set] ()
keyword[if] keyword[not] identifier[isinstance] ( identifier[key_usage] , identifier[set] ):
keyword[raise] identifier[TypeError] ( identifier[pretty_message] (
literal[string] ,
identifier[type_name] ( identifier[key_usage] )
))
keyword[if] keyword[not] identifier[isinstance] ( identifier[extended_key_usage] , identifier[set] ):
keyword[raise] identifier[TypeError] ( identifier[pretty_message] (
literal[string] ,
identifier[type_name] ( identifier[extended_key_usage] )
))
keyword[if] keyword[not] identifier[isinstance] ( identifier[extended_optional] , identifier[bool] ):
keyword[raise] identifier[TypeError] ( identifier[pretty_message] (
literal[string] ,
identifier[type_name] ( identifier[extended_optional] )
))
identifier[missing_key_usage] = identifier[key_usage]
keyword[if] identifier[cert] . identifier[key_usage_value] :
identifier[missing_key_usage] = identifier[key_usage] - identifier[cert] . identifier[key_usage_value] . identifier[native]
identifier[missing_extended_key_usage] = identifier[set] ()
keyword[if] identifier[extended_optional] keyword[is] keyword[False] keyword[and] keyword[not] identifier[cert] . identifier[extended_key_usage_value] :
identifier[missing_extended_key_usage] = identifier[extended_key_usage]
keyword[elif] identifier[cert] . identifier[extended_key_usage_value] keyword[is] keyword[not] keyword[None] :
identifier[missing_extended_key_usage] = identifier[extended_key_usage] - identifier[set] ( identifier[cert] . identifier[extended_key_usage_value] . identifier[native] )
keyword[if] identifier[missing_key_usage] keyword[or] identifier[missing_extended_key_usage] :
identifier[plural] = literal[string] keyword[if] identifier[len] ( identifier[missing_key_usage] | identifier[missing_extended_key_usage] )> literal[int] keyword[else] literal[string]
identifier[friendly_purposes] =[]
keyword[for] identifier[purpose] keyword[in] identifier[sorted] ( identifier[missing_key_usage] | identifier[missing_extended_key_usage] ):
identifier[friendly_purposes] . identifier[append] ( identifier[purpose] . identifier[replace] ( literal[string] , literal[string] ))
keyword[raise] identifier[InvalidCertificateError] ( identifier[pretty_message] (
literal[string] ,
identifier[plural] ,
literal[string] . identifier[join] ( identifier[friendly_purposes] )
)) | def validate_usage(validation_context, cert, key_usage, extended_key_usage, extended_optional):
"""
Validates the end-entity certificate from a
certvalidator.path.ValidationPath object to ensure that the certificate
is valid for the key usage and extended key usage purposes specified.
THE CERTIFICATE PATH MUST BE VALIDATED SEPARATELY VIA validate_path()!
:param validation_context:
A certvalidator.context.ValidationContext object to use for
configuring validation behavior
:param cert:
An asn1crypto.x509.Certificate object returned from validate_path()
:param key_usage:
A set of unicode strings of the required key usage purposes
:param extended_key_usage:
A set of unicode strings of the required extended key usage purposes
:param extended_optional:
A bool - if the extended_key_usage extension may be omitted and still
considered valid
:raises:
certvalidator.errors.InvalidCertificateError - when the certificate is not valid for the usages specified
"""
if not isinstance(validation_context, ValidationContext):
raise TypeError(pretty_message('\n validation_context must be an instance of\n certvalidator.context.ValidationContext, not %s\n ', type_name(validation_context))) # depends on [control=['if'], data=[]]
if validation_context.is_whitelisted(cert):
return # depends on [control=['if'], data=[]]
if key_usage is None:
key_usage = set() # depends on [control=['if'], data=['key_usage']]
if extended_key_usage is None:
extended_key_usage = set() # depends on [control=['if'], data=['extended_key_usage']]
if not isinstance(key_usage, set):
raise TypeError(pretty_message('\n key_usage must be a set of unicode strings, not %s\n ', type_name(key_usage))) # depends on [control=['if'], data=[]]
if not isinstance(extended_key_usage, set):
raise TypeError(pretty_message('\n extended_key_usage must be a set of unicode strings, not %s\n ', type_name(extended_key_usage))) # depends on [control=['if'], data=[]]
if not isinstance(extended_optional, bool):
raise TypeError(pretty_message('\n extended_optional must be a boolean, not %s\n ', type_name(extended_optional))) # depends on [control=['if'], data=[]]
missing_key_usage = key_usage
if cert.key_usage_value:
missing_key_usage = key_usage - cert.key_usage_value.native # depends on [control=['if'], data=[]]
missing_extended_key_usage = set()
if extended_optional is False and (not cert.extended_key_usage_value):
missing_extended_key_usage = extended_key_usage # depends on [control=['if'], data=[]]
elif cert.extended_key_usage_value is not None:
missing_extended_key_usage = extended_key_usage - set(cert.extended_key_usage_value.native) # depends on [control=['if'], data=[]]
if missing_key_usage or missing_extended_key_usage:
plural = 's' if len(missing_key_usage | missing_extended_key_usage) > 1 else ''
friendly_purposes = []
for purpose in sorted(missing_key_usage | missing_extended_key_usage):
friendly_purposes.append(purpose.replace('_', ' ')) # depends on [control=['for'], data=['purpose']]
raise InvalidCertificateError(pretty_message('\n The X.509 certificate provided is not valid for the purpose%s of %s\n ', plural, ', '.join(friendly_purposes))) # depends on [control=['if'], data=[]] |
async def authenticate_with_device(atv):
"""Perform device authentication and print credentials."""
credentials = await atv.airplay.generate_credentials()
await atv.airplay.load_credentials(credentials)
try:
await atv.airplay.start_authentication()
pin = input('PIN Code: ')
await atv.airplay.finish_authentication(pin)
print('Credentials: {0}'.format(credentials))
except exceptions.DeviceAuthenticationError:
print('Failed to authenticate', file=sys.stderr) | <ast.AsyncFunctionDef object at 0x7da18fe918a0> | keyword[async] keyword[def] identifier[authenticate_with_device] ( identifier[atv] ):
literal[string]
identifier[credentials] = keyword[await] identifier[atv] . identifier[airplay] . identifier[generate_credentials] ()
keyword[await] identifier[atv] . identifier[airplay] . identifier[load_credentials] ( identifier[credentials] )
keyword[try] :
keyword[await] identifier[atv] . identifier[airplay] . identifier[start_authentication] ()
identifier[pin] = identifier[input] ( literal[string] )
keyword[await] identifier[atv] . identifier[airplay] . identifier[finish_authentication] ( identifier[pin] )
identifier[print] ( literal[string] . identifier[format] ( identifier[credentials] ))
keyword[except] identifier[exceptions] . identifier[DeviceAuthenticationError] :
identifier[print] ( literal[string] , identifier[file] = identifier[sys] . identifier[stderr] ) | async def authenticate_with_device(atv):
"""Perform device authentication and print credentials."""
credentials = await atv.airplay.generate_credentials()
await atv.airplay.load_credentials(credentials)
try:
await atv.airplay.start_authentication()
pin = input('PIN Code: ')
await atv.airplay.finish_authentication(pin)
print('Credentials: {0}'.format(credentials)) # depends on [control=['try'], data=[]]
except exceptions.DeviceAuthenticationError:
print('Failed to authenticate', file=sys.stderr) # depends on [control=['except'], data=[]] |
def query_transactions(self, initial_date, final_date,
page=None,
max_results=None):
""" query transaction by date range """
last_page = False
results = []
while last_page is False:
search_result = self._consume_query_transactions(
initial_date, final_date, page, max_results)
results.extend(search_result.transactions)
if search_result.current_page is None or \
search_result.total_pages is None or \
search_result.current_page == search_result.total_pages:
last_page = True
else:
page = search_result.current_page + 1
return results | def function[query_transactions, parameter[self, initial_date, final_date, page, max_results]]:
constant[ query transaction by date range ]
variable[last_page] assign[=] constant[False]
variable[results] assign[=] list[[]]
while compare[name[last_page] is constant[False]] begin[:]
variable[search_result] assign[=] call[name[self]._consume_query_transactions, parameter[name[initial_date], name[final_date], name[page], name[max_results]]]
call[name[results].extend, parameter[name[search_result].transactions]]
if <ast.BoolOp object at 0x7da18f58d990> begin[:]
variable[last_page] assign[=] constant[True]
return[name[results]] | keyword[def] identifier[query_transactions] ( identifier[self] , identifier[initial_date] , identifier[final_date] ,
identifier[page] = keyword[None] ,
identifier[max_results] = keyword[None] ):
literal[string]
identifier[last_page] = keyword[False]
identifier[results] =[]
keyword[while] identifier[last_page] keyword[is] keyword[False] :
identifier[search_result] = identifier[self] . identifier[_consume_query_transactions] (
identifier[initial_date] , identifier[final_date] , identifier[page] , identifier[max_results] )
identifier[results] . identifier[extend] ( identifier[search_result] . identifier[transactions] )
keyword[if] identifier[search_result] . identifier[current_page] keyword[is] keyword[None] keyword[or] identifier[search_result] . identifier[total_pages] keyword[is] keyword[None] keyword[or] identifier[search_result] . identifier[current_page] == identifier[search_result] . identifier[total_pages] :
identifier[last_page] = keyword[True]
keyword[else] :
identifier[page] = identifier[search_result] . identifier[current_page] + literal[int]
keyword[return] identifier[results] | def query_transactions(self, initial_date, final_date, page=None, max_results=None):
""" query transaction by date range """
last_page = False
results = []
while last_page is False:
search_result = self._consume_query_transactions(initial_date, final_date, page, max_results)
results.extend(search_result.transactions)
if search_result.current_page is None or search_result.total_pages is None or search_result.current_page == search_result.total_pages:
last_page = True # depends on [control=['if'], data=[]]
else:
page = search_result.current_page + 1 # depends on [control=['while'], data=['last_page']]
return results |
def archive_insert_data(self, data_dump):
'''
:param data: Archive table data
:type data: list[archive]
:raises: IOError
'''
with self.session as session:
try:
data = [self.tables.archive(**entry) for entry in data_dump]
session.add_all(data)
session.commit()
except SQLAlchemyError as exc:
session.rollback()
print_exc()
raise IOError(exc) | def function[archive_insert_data, parameter[self, data_dump]]:
constant[
:param data: Archive table data
:type data: list[archive]
:raises: IOError
]
with name[self].session begin[:]
<ast.Try object at 0x7da20c6c7490> | keyword[def] identifier[archive_insert_data] ( identifier[self] , identifier[data_dump] ):
literal[string]
keyword[with] identifier[self] . identifier[session] keyword[as] identifier[session] :
keyword[try] :
identifier[data] =[ identifier[self] . identifier[tables] . identifier[archive] (** identifier[entry] ) keyword[for] identifier[entry] keyword[in] identifier[data_dump] ]
identifier[session] . identifier[add_all] ( identifier[data] )
identifier[session] . identifier[commit] ()
keyword[except] identifier[SQLAlchemyError] keyword[as] identifier[exc] :
identifier[session] . identifier[rollback] ()
identifier[print_exc] ()
keyword[raise] identifier[IOError] ( identifier[exc] ) | def archive_insert_data(self, data_dump):
"""
:param data: Archive table data
:type data: list[archive]
:raises: IOError
"""
with self.session as session:
try:
data = [self.tables.archive(**entry) for entry in data_dump]
session.add_all(data)
session.commit() # depends on [control=['try'], data=[]]
except SQLAlchemyError as exc:
session.rollback()
print_exc()
raise IOError(exc) # depends on [control=['except'], data=['exc']] # depends on [control=['with'], data=['session']] |
def _serve_environment(self, request):
"""Serve a JSON object containing some base properties used by the frontend.
* data_location is either a path to a directory or an address to a
database (depending on which mode TensorBoard is running in).
* window_title is the title of the TensorBoard web page.
"""
return http_util.Respond(
request,
{
'data_location': self._logdir or self._db_uri,
'mode': 'db' if self._db_uri else 'logdir',
'window_title': self._window_title,
},
'application/json') | def function[_serve_environment, parameter[self, request]]:
constant[Serve a JSON object containing some base properties used by the frontend.
* data_location is either a path to a directory or an address to a
database (depending on which mode TensorBoard is running in).
* window_title is the title of the TensorBoard web page.
]
return[call[name[http_util].Respond, parameter[name[request], dictionary[[<ast.Constant object at 0x7da20e9b1de0>, <ast.Constant object at 0x7da20e9b3160>, <ast.Constant object at 0x7da20e9b2d70>], [<ast.BoolOp object at 0x7da20e9b1900>, <ast.IfExp object at 0x7da1b1f98340>, <ast.Attribute object at 0x7da1b1f9aaa0>]], constant[application/json]]]] | keyword[def] identifier[_serve_environment] ( identifier[self] , identifier[request] ):
literal[string]
keyword[return] identifier[http_util] . identifier[Respond] (
identifier[request] ,
{
literal[string] : identifier[self] . identifier[_logdir] keyword[or] identifier[self] . identifier[_db_uri] ,
literal[string] : literal[string] keyword[if] identifier[self] . identifier[_db_uri] keyword[else] literal[string] ,
literal[string] : identifier[self] . identifier[_window_title] ,
},
literal[string] ) | def _serve_environment(self, request):
"""Serve a JSON object containing some base properties used by the frontend.
* data_location is either a path to a directory or an address to a
database (depending on which mode TensorBoard is running in).
* window_title is the title of the TensorBoard web page.
"""
return http_util.Respond(request, {'data_location': self._logdir or self._db_uri, 'mode': 'db' if self._db_uri else 'logdir', 'window_title': self._window_title}, 'application/json') |
def predict_lmm_forest(tree_nodes, left_children, right_children, best_predictor, mean, splitting_value, X, depth):
"""
predict_lmm_forest(VectorXi const & tree_nodes, VectorXi const & left_children, VectorXi const & right_children, VectorXi const & best_predictor, MatrixXd const & mean, MatrixXd const & splitting_value, MatrixXd const & X, limix::mfloat_t depth)
Parameters
----------
tree_nodes: VectorXi const &
left_children: VectorXi const &
right_children: VectorXi const &
best_predictor: VectorXi const &
mean: MatrixXd const &
splitting_value: MatrixXd const &
X: MatrixXd const &
depth: limix::mfloat_t
"""
return _core.predict_lmm_forest(tree_nodes, left_children, right_children, best_predictor, mean, splitting_value, X, depth) | def function[predict_lmm_forest, parameter[tree_nodes, left_children, right_children, best_predictor, mean, splitting_value, X, depth]]:
constant[
predict_lmm_forest(VectorXi const & tree_nodes, VectorXi const & left_children, VectorXi const & right_children, VectorXi const & best_predictor, MatrixXd const & mean, MatrixXd const & splitting_value, MatrixXd const & X, limix::mfloat_t depth)
Parameters
----------
tree_nodes: VectorXi const &
left_children: VectorXi const &
right_children: VectorXi const &
best_predictor: VectorXi const &
mean: MatrixXd const &
splitting_value: MatrixXd const &
X: MatrixXd const &
depth: limix::mfloat_t
]
return[call[name[_core].predict_lmm_forest, parameter[name[tree_nodes], name[left_children], name[right_children], name[best_predictor], name[mean], name[splitting_value], name[X], name[depth]]]] | keyword[def] identifier[predict_lmm_forest] ( identifier[tree_nodes] , identifier[left_children] , identifier[right_children] , identifier[best_predictor] , identifier[mean] , identifier[splitting_value] , identifier[X] , identifier[depth] ):
literal[string]
keyword[return] identifier[_core] . identifier[predict_lmm_forest] ( identifier[tree_nodes] , identifier[left_children] , identifier[right_children] , identifier[best_predictor] , identifier[mean] , identifier[splitting_value] , identifier[X] , identifier[depth] ) | def predict_lmm_forest(tree_nodes, left_children, right_children, best_predictor, mean, splitting_value, X, depth):
"""
predict_lmm_forest(VectorXi const & tree_nodes, VectorXi const & left_children, VectorXi const & right_children, VectorXi const & best_predictor, MatrixXd const & mean, MatrixXd const & splitting_value, MatrixXd const & X, limix::mfloat_t depth)
Parameters
----------
tree_nodes: VectorXi const &
left_children: VectorXi const &
right_children: VectorXi const &
best_predictor: VectorXi const &
mean: MatrixXd const &
splitting_value: MatrixXd const &
X: MatrixXd const &
depth: limix::mfloat_t
"""
return _core.predict_lmm_forest(tree_nodes, left_children, right_children, best_predictor, mean, splitting_value, X, depth) |
def as_knock(self, created=False):
"""
Returns a dictionary with the knock data built from _knocker_data
"""
knock = {}
if self.should_knock(created):
for field, data in self._retrieve_data(None, self._knocker_data):
knock[field] = data
return knock | def function[as_knock, parameter[self, created]]:
constant[
Returns a dictionary with the knock data built from _knocker_data
]
variable[knock] assign[=] dictionary[[], []]
if call[name[self].should_knock, parameter[name[created]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da18fe92560>, <ast.Name object at 0x7da18fe92e30>]]] in starred[call[name[self]._retrieve_data, parameter[constant[None], name[self]._knocker_data]]] begin[:]
call[name[knock]][name[field]] assign[=] name[data]
return[name[knock]] | keyword[def] identifier[as_knock] ( identifier[self] , identifier[created] = keyword[False] ):
literal[string]
identifier[knock] ={}
keyword[if] identifier[self] . identifier[should_knock] ( identifier[created] ):
keyword[for] identifier[field] , identifier[data] keyword[in] identifier[self] . identifier[_retrieve_data] ( keyword[None] , identifier[self] . identifier[_knocker_data] ):
identifier[knock] [ identifier[field] ]= identifier[data]
keyword[return] identifier[knock] | def as_knock(self, created=False):
"""
Returns a dictionary with the knock data built from _knocker_data
"""
knock = {}
if self.should_knock(created):
for (field, data) in self._retrieve_data(None, self._knocker_data):
knock[field] = data # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
return knock |
def add_spout(self, name, spout_cls, par, config=None, optional_outputs=None):
"""Add a spout to the topology"""
spout_spec = spout_cls.spec(name=name, par=par, config=config,
optional_outputs=optional_outputs)
self.add_spec(spout_spec)
return spout_spec | def function[add_spout, parameter[self, name, spout_cls, par, config, optional_outputs]]:
constant[Add a spout to the topology]
variable[spout_spec] assign[=] call[name[spout_cls].spec, parameter[]]
call[name[self].add_spec, parameter[name[spout_spec]]]
return[name[spout_spec]] | keyword[def] identifier[add_spout] ( identifier[self] , identifier[name] , identifier[spout_cls] , identifier[par] , identifier[config] = keyword[None] , identifier[optional_outputs] = keyword[None] ):
literal[string]
identifier[spout_spec] = identifier[spout_cls] . identifier[spec] ( identifier[name] = identifier[name] , identifier[par] = identifier[par] , identifier[config] = identifier[config] ,
identifier[optional_outputs] = identifier[optional_outputs] )
identifier[self] . identifier[add_spec] ( identifier[spout_spec] )
keyword[return] identifier[spout_spec] | def add_spout(self, name, spout_cls, par, config=None, optional_outputs=None):
"""Add a spout to the topology"""
spout_spec = spout_cls.spec(name=name, par=par, config=config, optional_outputs=optional_outputs)
self.add_spec(spout_spec)
return spout_spec |
def run(bam, chrom, pos1, pos2, reffa, chr_reffa, parameters):
"""Run mpileup on given chrom and pos"""
# check for chr ref
is_chr_query = chrom.startswith('chr')
if is_chr_query and chr_reffa is None:
chr_reffa = reffa
# check bam ref type
bam_header = subprocess.check_output("samtools view -H {}".format(bam), shell=True)
is_chr_bam = bam_header.find('SN:chr') != -1
if is_chr_bam:
reffa = chr_reffa
if not is_chr_query and is_chr_bam:
chrom = 'chr' + chrom
if is_chr_query and not is_chr_bam:
chrom = re.sub(r'^chr', '', chrom)
posmin = min(pos1, pos2)
posmax = max(pos1, pos2)
cmd = "samtools view -bh {bam} {chrom}:{pos1}-{pos2} " \
"| samtools mpileup {parameters} -f {reffa} -".format(bam=bam, chrom=chrom,
pos1=posmin, pos2=posmax,
reffa=reffa, parameters=parameters)
if pos1 == pos2:
cmd += " | awk '$2 == {pos}'".format(pos=pos1)
else:
cmd += " | tail -n +2 | awk '$2 >= {posmin} && $2 <= {posmax}'".format(posmin=posmin, posmax=posmax)
sys.stderr.write("Running:\n{}\n".format(cmd))
child = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
stdout, stderr = child.communicate()
if child.returncode != 0:
if len(stdout) == 0 and stderr is None:
warnings.warn("Command:\n{cmd}\n did not exit with zero exit code. "
"Possibly no coverage for sample.".format(cmd=cmd))
else:
raise(Exception("Command:\n{cmd}\n did not exit with zero exit code. "
"Check command.".format(cmd=cmd)))
else:
return stdout | def function[run, parameter[bam, chrom, pos1, pos2, reffa, chr_reffa, parameters]]:
constant[Run mpileup on given chrom and pos]
variable[is_chr_query] assign[=] call[name[chrom].startswith, parameter[constant[chr]]]
if <ast.BoolOp object at 0x7da20c990460> begin[:]
variable[chr_reffa] assign[=] name[reffa]
variable[bam_header] assign[=] call[name[subprocess].check_output, parameter[call[constant[samtools view -H {}].format, parameter[name[bam]]]]]
variable[is_chr_bam] assign[=] compare[call[name[bam_header].find, parameter[constant[SN:chr]]] not_equal[!=] <ast.UnaryOp object at 0x7da20c9901c0>]
if name[is_chr_bam] begin[:]
variable[reffa] assign[=] name[chr_reffa]
if <ast.BoolOp object at 0x7da20c993eb0> begin[:]
variable[chrom] assign[=] binary_operation[constant[chr] + name[chrom]]
if <ast.BoolOp object at 0x7da20c993e20> begin[:]
variable[chrom] assign[=] call[name[re].sub, parameter[constant[^chr], constant[], name[chrom]]]
variable[posmin] assign[=] call[name[min], parameter[name[pos1], name[pos2]]]
variable[posmax] assign[=] call[name[max], parameter[name[pos1], name[pos2]]]
variable[cmd] assign[=] call[constant[samtools view -bh {bam} {chrom}:{pos1}-{pos2} | samtools mpileup {parameters} -f {reffa} -].format, parameter[]]
if compare[name[pos1] equal[==] name[pos2]] begin[:]
<ast.AugAssign object at 0x7da20c991cc0>
call[name[sys].stderr.write, parameter[call[constant[Running:
{}
].format, parameter[name[cmd]]]]]
variable[child] assign[=] call[name[subprocess].Popen, parameter[name[cmd]]]
<ast.Tuple object at 0x7da1b195f730> assign[=] call[name[child].communicate, parameter[]]
if compare[name[child].returncode not_equal[!=] constant[0]] begin[:]
if <ast.BoolOp object at 0x7da1b195f670> begin[:]
call[name[warnings].warn, parameter[call[constant[Command:
{cmd}
did not exit with zero exit code. Possibly no coverage for sample.].format, parameter[]]]] | keyword[def] identifier[run] ( identifier[bam] , identifier[chrom] , identifier[pos1] , identifier[pos2] , identifier[reffa] , identifier[chr_reffa] , identifier[parameters] ):
literal[string]
identifier[is_chr_query] = identifier[chrom] . identifier[startswith] ( literal[string] )
keyword[if] identifier[is_chr_query] keyword[and] identifier[chr_reffa] keyword[is] keyword[None] :
identifier[chr_reffa] = identifier[reffa]
identifier[bam_header] = identifier[subprocess] . identifier[check_output] ( literal[string] . identifier[format] ( identifier[bam] ), identifier[shell] = keyword[True] )
identifier[is_chr_bam] = identifier[bam_header] . identifier[find] ( literal[string] )!=- literal[int]
keyword[if] identifier[is_chr_bam] :
identifier[reffa] = identifier[chr_reffa]
keyword[if] keyword[not] identifier[is_chr_query] keyword[and] identifier[is_chr_bam] :
identifier[chrom] = literal[string] + identifier[chrom]
keyword[if] identifier[is_chr_query] keyword[and] keyword[not] identifier[is_chr_bam] :
identifier[chrom] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[chrom] )
identifier[posmin] = identifier[min] ( identifier[pos1] , identifier[pos2] )
identifier[posmax] = identifier[max] ( identifier[pos1] , identifier[pos2] )
identifier[cmd] = literal[string] literal[string] . identifier[format] ( identifier[bam] = identifier[bam] , identifier[chrom] = identifier[chrom] ,
identifier[pos1] = identifier[posmin] , identifier[pos2] = identifier[posmax] ,
identifier[reffa] = identifier[reffa] , identifier[parameters] = identifier[parameters] )
keyword[if] identifier[pos1] == identifier[pos2] :
identifier[cmd] += literal[string] . identifier[format] ( identifier[pos] = identifier[pos1] )
keyword[else] :
identifier[cmd] += literal[string] . identifier[format] ( identifier[posmin] = identifier[posmin] , identifier[posmax] = identifier[posmax] )
identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] . identifier[format] ( identifier[cmd] ))
identifier[child] = identifier[subprocess] . identifier[Popen] ( identifier[cmd] , identifier[shell] = keyword[True] , identifier[stdout] = identifier[subprocess] . identifier[PIPE] )
identifier[stdout] , identifier[stderr] = identifier[child] . identifier[communicate] ()
keyword[if] identifier[child] . identifier[returncode] != literal[int] :
keyword[if] identifier[len] ( identifier[stdout] )== literal[int] keyword[and] identifier[stderr] keyword[is] keyword[None] :
identifier[warnings] . identifier[warn] ( literal[string]
literal[string] . identifier[format] ( identifier[cmd] = identifier[cmd] ))
keyword[else] :
keyword[raise] ( identifier[Exception] ( literal[string]
literal[string] . identifier[format] ( identifier[cmd] = identifier[cmd] )))
keyword[else] :
keyword[return] identifier[stdout] | def run(bam, chrom, pos1, pos2, reffa, chr_reffa, parameters):
"""Run mpileup on given chrom and pos"""
# check for chr ref
is_chr_query = chrom.startswith('chr')
if is_chr_query and chr_reffa is None:
chr_reffa = reffa # depends on [control=['if'], data=[]]
# check bam ref type
bam_header = subprocess.check_output('samtools view -H {}'.format(bam), shell=True)
is_chr_bam = bam_header.find('SN:chr') != -1
if is_chr_bam:
reffa = chr_reffa # depends on [control=['if'], data=[]]
if not is_chr_query and is_chr_bam:
chrom = 'chr' + chrom # depends on [control=['if'], data=[]]
if is_chr_query and (not is_chr_bam):
chrom = re.sub('^chr', '', chrom) # depends on [control=['if'], data=[]]
posmin = min(pos1, pos2)
posmax = max(pos1, pos2)
cmd = 'samtools view -bh {bam} {chrom}:{pos1}-{pos2} | samtools mpileup {parameters} -f {reffa} -'.format(bam=bam, chrom=chrom, pos1=posmin, pos2=posmax, reffa=reffa, parameters=parameters)
if pos1 == pos2:
cmd += " | awk '$2 == {pos}'".format(pos=pos1) # depends on [control=['if'], data=['pos1']]
else:
cmd += " | tail -n +2 | awk '$2 >= {posmin} && $2 <= {posmax}'".format(posmin=posmin, posmax=posmax)
sys.stderr.write('Running:\n{}\n'.format(cmd))
child = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
(stdout, stderr) = child.communicate()
if child.returncode != 0:
if len(stdout) == 0 and stderr is None:
warnings.warn('Command:\n{cmd}\n did not exit with zero exit code. Possibly no coverage for sample.'.format(cmd=cmd)) # depends on [control=['if'], data=[]]
else:
raise Exception('Command:\n{cmd}\n did not exit with zero exit code. Check command.'.format(cmd=cmd)) # depends on [control=['if'], data=[]]
else:
return stdout |
def _timeout_cb(self, method):
"""Call the timeout handler due.
"""
self._anything_done = True
logger.debug("_timeout_cb() called for: {0!r}".format(method))
result = method()
# pylint: disable=W0212
rec = method._pyxmpp_recurring
if rec:
self._prepare_pending()
return True
if rec is None and result is not None:
logger.debug(" auto-recurring, restarting in {0} s"
.format(result))
tag = glib.timeout_add(int(result * 1000), self._timeout_cb, method)
self._timer_sources[method] = tag
else:
self._timer_sources.pop(method, None)
self._prepare_pending()
return False | def function[_timeout_cb, parameter[self, method]]:
constant[Call the timeout handler due.
]
name[self]._anything_done assign[=] constant[True]
call[name[logger].debug, parameter[call[constant[_timeout_cb() called for: {0!r}].format, parameter[name[method]]]]]
variable[result] assign[=] call[name[method], parameter[]]
variable[rec] assign[=] name[method]._pyxmpp_recurring
if name[rec] begin[:]
call[name[self]._prepare_pending, parameter[]]
return[constant[True]]
if <ast.BoolOp object at 0x7da18ede7850> begin[:]
call[name[logger].debug, parameter[call[constant[ auto-recurring, restarting in {0} s].format, parameter[name[result]]]]]
variable[tag] assign[=] call[name[glib].timeout_add, parameter[call[name[int], parameter[binary_operation[name[result] * constant[1000]]]], name[self]._timeout_cb, name[method]]]
call[name[self]._timer_sources][name[method]] assign[=] name[tag]
call[name[self]._prepare_pending, parameter[]]
return[constant[False]] | keyword[def] identifier[_timeout_cb] ( identifier[self] , identifier[method] ):
literal[string]
identifier[self] . identifier[_anything_done] = keyword[True]
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[method] ))
identifier[result] = identifier[method] ()
identifier[rec] = identifier[method] . identifier[_pyxmpp_recurring]
keyword[if] identifier[rec] :
identifier[self] . identifier[_prepare_pending] ()
keyword[return] keyword[True]
keyword[if] identifier[rec] keyword[is] keyword[None] keyword[and] identifier[result] keyword[is] keyword[not] keyword[None] :
identifier[logger] . identifier[debug] ( literal[string]
. identifier[format] ( identifier[result] ))
identifier[tag] = identifier[glib] . identifier[timeout_add] ( identifier[int] ( identifier[result] * literal[int] ), identifier[self] . identifier[_timeout_cb] , identifier[method] )
identifier[self] . identifier[_timer_sources] [ identifier[method] ]= identifier[tag]
keyword[else] :
identifier[self] . identifier[_timer_sources] . identifier[pop] ( identifier[method] , keyword[None] )
identifier[self] . identifier[_prepare_pending] ()
keyword[return] keyword[False] | def _timeout_cb(self, method):
"""Call the timeout handler due.
"""
self._anything_done = True
logger.debug('_timeout_cb() called for: {0!r}'.format(method))
result = method()
# pylint: disable=W0212
rec = method._pyxmpp_recurring
if rec:
self._prepare_pending()
return True # depends on [control=['if'], data=[]]
if rec is None and result is not None:
logger.debug(' auto-recurring, restarting in {0} s'.format(result))
tag = glib.timeout_add(int(result * 1000), self._timeout_cb, method)
self._timer_sources[method] = tag # depends on [control=['if'], data=[]]
else:
self._timer_sources.pop(method, None)
self._prepare_pending()
return False |
def _raise_duplicate_symbol(msgid, symbol, other_symbol):
"""Raise an error when a symbol is duplicated.
:param str msgid: The msgid corresponding to the symbols
:param str symbol: Offending symbol
:param str other_symbol: Other offending symbol
:raises InvalidMessageError: when a symbol is duplicated.
"""
symbols = [symbol, other_symbol]
symbols.sort()
error_message = "Message id '{msgid}' cannot have both ".format(msgid=msgid)
error_message += "'{other_symbol}' and '{symbol}' as symbolic name.".format(
other_symbol=symbols[0], symbol=symbols[1]
)
raise InvalidMessageError(error_message) | def function[_raise_duplicate_symbol, parameter[msgid, symbol, other_symbol]]:
constant[Raise an error when a symbol is duplicated.
:param str msgid: The msgid corresponding to the symbols
:param str symbol: Offending symbol
:param str other_symbol: Other offending symbol
:raises InvalidMessageError: when a symbol is duplicated.
]
variable[symbols] assign[=] list[[<ast.Name object at 0x7da1b024d030>, <ast.Name object at 0x7da1b024f8b0>]]
call[name[symbols].sort, parameter[]]
variable[error_message] assign[=] call[constant[Message id '{msgid}' cannot have both ].format, parameter[]]
<ast.AugAssign object at 0x7da1b024cb80>
<ast.Raise object at 0x7da1b024fa00> | keyword[def] identifier[_raise_duplicate_symbol] ( identifier[msgid] , identifier[symbol] , identifier[other_symbol] ):
literal[string]
identifier[symbols] =[ identifier[symbol] , identifier[other_symbol] ]
identifier[symbols] . identifier[sort] ()
identifier[error_message] = literal[string] . identifier[format] ( identifier[msgid] = identifier[msgid] )
identifier[error_message] += literal[string] . identifier[format] (
identifier[other_symbol] = identifier[symbols] [ literal[int] ], identifier[symbol] = identifier[symbols] [ literal[int] ]
)
keyword[raise] identifier[InvalidMessageError] ( identifier[error_message] ) | def _raise_duplicate_symbol(msgid, symbol, other_symbol):
"""Raise an error when a symbol is duplicated.
:param str msgid: The msgid corresponding to the symbols
:param str symbol: Offending symbol
:param str other_symbol: Other offending symbol
:raises InvalidMessageError: when a symbol is duplicated.
"""
symbols = [symbol, other_symbol]
symbols.sort()
error_message = "Message id '{msgid}' cannot have both ".format(msgid=msgid)
error_message += "'{other_symbol}' and '{symbol}' as symbolic name.".format(other_symbol=symbols[0], symbol=symbols[1])
raise InvalidMessageError(error_message) |
def log_html(self, log) -> str:
"""Return single check sub-result string as HTML or not if below log
level."""
if not self.omit_loglevel(log["status"]):
emoticon = EMOTICON[log["status"]]
status = log["status"]
message = html.escape(log["message"]).replace("\n", "<br/>")
return (
"<li class='details_item'>"
f"<span class='details_indicator'>{emoticon} {status}</span>"
f"<span class='details_text'>{message}</span>"
"</li>"
)
return "" | def function[log_html, parameter[self, log]]:
constant[Return single check sub-result string as HTML or not if below log
level.]
if <ast.UnaryOp object at 0x7da1b122cee0> begin[:]
variable[emoticon] assign[=] call[name[EMOTICON]][call[name[log]][constant[status]]]
variable[status] assign[=] call[name[log]][constant[status]]
variable[message] assign[=] call[call[name[html].escape, parameter[call[name[log]][constant[message]]]].replace, parameter[constant[
], constant[<br/>]]]
return[<ast.JoinedStr object at 0x7da1b1250490>]
return[constant[]] | keyword[def] identifier[log_html] ( identifier[self] , identifier[log] )-> identifier[str] :
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[omit_loglevel] ( identifier[log] [ literal[string] ]):
identifier[emoticon] = identifier[EMOTICON] [ identifier[log] [ literal[string] ]]
identifier[status] = identifier[log] [ literal[string] ]
identifier[message] = identifier[html] . identifier[escape] ( identifier[log] [ literal[string] ]). identifier[replace] ( literal[string] , literal[string] )
keyword[return] (
literal[string]
literal[string]
literal[string]
literal[string]
)
keyword[return] literal[string] | def log_html(self, log) -> str:
"""Return single check sub-result string as HTML or not if below log
level."""
if not self.omit_loglevel(log['status']):
emoticon = EMOTICON[log['status']]
status = log['status']
message = html.escape(log['message']).replace('\n', '<br/>')
return f"<li class='details_item'><span class='details_indicator'>{emoticon} {status}</span><span class='details_text'>{message}</span></li>" # depends on [control=['if'], data=[]]
return '' |
def all(self):
"""All results"""
all = list(self)
if self._results_cache:
return iter(self._results_cache)
return all | def function[all, parameter[self]]:
constant[All results]
variable[all] assign[=] call[name[list], parameter[name[self]]]
if name[self]._results_cache begin[:]
return[call[name[iter], parameter[name[self]._results_cache]]]
return[name[all]] | keyword[def] identifier[all] ( identifier[self] ):
literal[string]
identifier[all] = identifier[list] ( identifier[self] )
keyword[if] identifier[self] . identifier[_results_cache] :
keyword[return] identifier[iter] ( identifier[self] . identifier[_results_cache] )
keyword[return] identifier[all] | def all(self):
"""All results"""
all = list(self)
if self._results_cache:
return iter(self._results_cache) # depends on [control=['if'], data=[]]
return all |
def cancel(self):
""" stops the timer. call_back function is not called """
self.event.clear()
if self.__timer is not None:
self.__timer.cancel() | def function[cancel, parameter[self]]:
constant[ stops the timer. call_back function is not called ]
call[name[self].event.clear, parameter[]]
if compare[name[self].__timer is_not constant[None]] begin[:]
call[name[self].__timer.cancel, parameter[]] | keyword[def] identifier[cancel] ( identifier[self] ):
literal[string]
identifier[self] . identifier[event] . identifier[clear] ()
keyword[if] identifier[self] . identifier[__timer] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[__timer] . identifier[cancel] () | def cancel(self):
""" stops the timer. call_back function is not called """
self.event.clear()
if self.__timer is not None:
self.__timer.cancel() # depends on [control=['if'], data=[]] |
def write(self, filename=None):
"""Write array to xvg file *filename* in NXY format.
.. Note:: Only plain files working at the moment, not compressed.
"""
self._init_filename(filename)
with utilities.openany(self.real_filename, 'w') as xvg:
xvg.write("# xmgrace compatible NXY data file\n"
"# Written by gromacs.formats.XVG()\n")
xvg.write("# :columns: {0!r}\n".format(self.names))
for xyy in self.array.T:
xyy.tofile(xvg, sep=" ", format="%-8s") # quick and dirty ascii output...--no compression!
xvg.write('\n') | def function[write, parameter[self, filename]]:
constant[Write array to xvg file *filename* in NXY format.
.. Note:: Only plain files working at the moment, not compressed.
]
call[name[self]._init_filename, parameter[name[filename]]]
with call[name[utilities].openany, parameter[name[self].real_filename, constant[w]]] begin[:]
call[name[xvg].write, parameter[constant[# xmgrace compatible NXY data file
# Written by gromacs.formats.XVG()
]]]
call[name[xvg].write, parameter[call[constant[# :columns: {0!r}
].format, parameter[name[self].names]]]]
for taget[name[xyy]] in starred[name[self].array.T] begin[:]
call[name[xyy].tofile, parameter[name[xvg]]]
call[name[xvg].write, parameter[constant[
]]] | keyword[def] identifier[write] ( identifier[self] , identifier[filename] = keyword[None] ):
literal[string]
identifier[self] . identifier[_init_filename] ( identifier[filename] )
keyword[with] identifier[utilities] . identifier[openany] ( identifier[self] . identifier[real_filename] , literal[string] ) keyword[as] identifier[xvg] :
identifier[xvg] . identifier[write] ( literal[string]
literal[string] )
identifier[xvg] . identifier[write] ( literal[string] . identifier[format] ( identifier[self] . identifier[names] ))
keyword[for] identifier[xyy] keyword[in] identifier[self] . identifier[array] . identifier[T] :
identifier[xyy] . identifier[tofile] ( identifier[xvg] , identifier[sep] = literal[string] , identifier[format] = literal[string] )
identifier[xvg] . identifier[write] ( literal[string] ) | def write(self, filename=None):
"""Write array to xvg file *filename* in NXY format.
.. Note:: Only plain files working at the moment, not compressed.
"""
self._init_filename(filename)
with utilities.openany(self.real_filename, 'w') as xvg:
xvg.write('# xmgrace compatible NXY data file\n# Written by gromacs.formats.XVG()\n')
xvg.write('# :columns: {0!r}\n'.format(self.names))
for xyy in self.array.T:
xyy.tofile(xvg, sep=' ', format='%-8s') # quick and dirty ascii output...--no compression!
xvg.write('\n') # depends on [control=['for'], data=['xyy']] # depends on [control=['with'], data=['xvg']] |
def recursive_unicode(obj):
"""Walks a simple data structure, converting byte strings to unicode.
Supports lists, tuples, and dictionaries.
"""
if isinstance(obj, dict):
return dict((recursive_unicode(k), recursive_unicode(v)) for (k,v) in obj.iteritems())
elif isinstance(obj, list):
return list(recursive_unicode(i) for i in obj)
elif isinstance(obj, tuple):
return tuple(recursive_unicode(i) for i in obj)
elif isinstance(obj, bytes):
return to_unicode(obj)
else:
return obj | def function[recursive_unicode, parameter[obj]]:
constant[Walks a simple data structure, converting byte strings to unicode.
Supports lists, tuples, and dictionaries.
]
if call[name[isinstance], parameter[name[obj], name[dict]]] begin[:]
return[call[name[dict], parameter[<ast.GeneratorExp object at 0x7da20cabc940>]]] | keyword[def] identifier[recursive_unicode] ( identifier[obj] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[dict] ):
keyword[return] identifier[dict] (( identifier[recursive_unicode] ( identifier[k] ), identifier[recursive_unicode] ( identifier[v] )) keyword[for] ( identifier[k] , identifier[v] ) keyword[in] identifier[obj] . identifier[iteritems] ())
keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[list] ):
keyword[return] identifier[list] ( identifier[recursive_unicode] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[obj] )
keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[tuple] ):
keyword[return] identifier[tuple] ( identifier[recursive_unicode] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[obj] )
keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[bytes] ):
keyword[return] identifier[to_unicode] ( identifier[obj] )
keyword[else] :
keyword[return] identifier[obj] | def recursive_unicode(obj):
"""Walks a simple data structure, converting byte strings to unicode.
Supports lists, tuples, and dictionaries.
"""
if isinstance(obj, dict):
return dict(((recursive_unicode(k), recursive_unicode(v)) for (k, v) in obj.iteritems())) # depends on [control=['if'], data=[]]
elif isinstance(obj, list):
return list((recursive_unicode(i) for i in obj)) # depends on [control=['if'], data=[]]
elif isinstance(obj, tuple):
return tuple((recursive_unicode(i) for i in obj)) # depends on [control=['if'], data=[]]
elif isinstance(obj, bytes):
return to_unicode(obj) # depends on [control=['if'], data=[]]
else:
return obj |
def create(self, create_missing=None):
"""Manually fetch a complete set of attributes for this entity.
For more information, see `Bugzilla #1449749
<https://bugzilla.redhat.com/show_bug.cgi?id=1449749>`_.
"""
return Host(
self._server_config,
id=self.create_json(create_missing)['id'],
).read() | def function[create, parameter[self, create_missing]]:
constant[Manually fetch a complete set of attributes for this entity.
For more information, see `Bugzilla #1449749
<https://bugzilla.redhat.com/show_bug.cgi?id=1449749>`_.
]
return[call[call[name[Host], parameter[name[self]._server_config]].read, parameter[]]] | keyword[def] identifier[create] ( identifier[self] , identifier[create_missing] = keyword[None] ):
literal[string]
keyword[return] identifier[Host] (
identifier[self] . identifier[_server_config] ,
identifier[id] = identifier[self] . identifier[create_json] ( identifier[create_missing] )[ literal[string] ],
). identifier[read] () | def create(self, create_missing=None):
"""Manually fetch a complete set of attributes for this entity.
For more information, see `Bugzilla #1449749
<https://bugzilla.redhat.com/show_bug.cgi?id=1449749>`_.
"""
return Host(self._server_config, id=self.create_json(create_missing)['id']).read() |
def assigned_librefs(self):
"""
This method returns the list of currently assigned librefs
"""
code = """
data _null_; retain libref; retain cobs 1;
set sashelp.vlibnam end=last;
if cobs EQ 1 then
put "LIBREFSSTART";
cobs = 2;
if libref NE libname then
put "LIBREF=" libname;
libref = libname;
if last then
put "LIBREFSEND";
run;
"""
if self.nosub:
print(code)
return None
else:
ll = self.submit(code, results='text')
librefs = []
log = ll['LOG'].rpartition('LIBREFSEND')[0].rpartition('LIBREFSSTART')
for i in range(log[2].count('LIBREF=')):
log = log[2].partition('LIBREF=')[2].partition('\n')
librefs.append(log[0].strip())
return librefs | def function[assigned_librefs, parameter[self]]:
constant[
This method returns the list of currently assigned librefs
]
variable[code] assign[=] constant[
data _null_; retain libref; retain cobs 1;
set sashelp.vlibnam end=last;
if cobs EQ 1 then
put "LIBREFSSTART";
cobs = 2;
if libref NE libname then
put "LIBREF=" libname;
libref = libname;
if last then
put "LIBREFSEND";
run;
]
if name[self].nosub begin[:]
call[name[print], parameter[name[code]]]
return[constant[None]]
variable[librefs] assign[=] list[[]]
variable[log] assign[=] call[call[call[call[name[ll]][constant[LOG]].rpartition, parameter[constant[LIBREFSEND]]]][constant[0]].rpartition, parameter[constant[LIBREFSSTART]]]
for taget[name[i]] in starred[call[name[range], parameter[call[call[name[log]][constant[2]].count, parameter[constant[LIBREF=]]]]]] begin[:]
variable[log] assign[=] call[call[call[call[name[log]][constant[2]].partition, parameter[constant[LIBREF=]]]][constant[2]].partition, parameter[constant[
]]]
call[name[librefs].append, parameter[call[call[name[log]][constant[0]].strip, parameter[]]]]
return[name[librefs]] | keyword[def] identifier[assigned_librefs] ( identifier[self] ):
literal[string]
identifier[code] = literal[string]
keyword[if] identifier[self] . identifier[nosub] :
identifier[print] ( identifier[code] )
keyword[return] keyword[None]
keyword[else] :
identifier[ll] = identifier[self] . identifier[submit] ( identifier[code] , identifier[results] = literal[string] )
identifier[librefs] =[]
identifier[log] = identifier[ll] [ literal[string] ]. identifier[rpartition] ( literal[string] )[ literal[int] ]. identifier[rpartition] ( literal[string] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[log] [ literal[int] ]. identifier[count] ( literal[string] )):
identifier[log] = identifier[log] [ literal[int] ]. identifier[partition] ( literal[string] )[ literal[int] ]. identifier[partition] ( literal[string] )
identifier[librefs] . identifier[append] ( identifier[log] [ literal[int] ]. identifier[strip] ())
keyword[return] identifier[librefs] | def assigned_librefs(self):
"""
This method returns the list of currently assigned librefs
"""
code = '\n data _null_; retain libref; retain cobs 1; \n set sashelp.vlibnam end=last;\n if cobs EQ 1 then\n put "LIBREFSSTART";\n cobs = 2;\n if libref NE libname then\n put "LIBREF=" libname;\n libref = libname;\n if last then\n put "LIBREFSEND";\n run;\n '
if self.nosub:
print(code)
return None # depends on [control=['if'], data=[]]
else:
ll = self.submit(code, results='text')
librefs = []
log = ll['LOG'].rpartition('LIBREFSEND')[0].rpartition('LIBREFSSTART')
for i in range(log[2].count('LIBREF=')):
log = log[2].partition('LIBREF=')[2].partition('\n')
librefs.append(log[0].strip()) # depends on [control=['for'], data=[]]
return librefs |
def map_throats(self, throats, origin, filtered=True):
r"""
Given a list of throats on a target object, finds indices of
those throats on the calling object
Parameters
----------
throats : array_like
The indices of the throats on the object specified in ``origin``
origin : OpenPNM Base object
The object corresponding to the indices given in ``throats``
filtered : boolean (default is ``True``)
If ``True`` then a ND-array of indices is returned with missing
indices removed, otherwise a named-tuple containing both the
``indices`` and a boolean ``mask`` with ``False`` indicating
which locations were not found.
Returns
-------
Throat indices on the calling object corresponding to the same throats
on the target object. Can be an array or a tuple containing an array
and a mask, depending on the value of ``filtered``.
"""
ids = origin['throat._id'][throats]
return self._map(element='throat', ids=ids, filtered=filtered) | def function[map_throats, parameter[self, throats, origin, filtered]]:
constant[
Given a list of throats on a target object, finds indices of
those throats on the calling object
Parameters
----------
throats : array_like
The indices of the throats on the object specified in ``origin``
origin : OpenPNM Base object
The object corresponding to the indices given in ``throats``
filtered : boolean (default is ``True``)
If ``True`` then a ND-array of indices is returned with missing
indices removed, otherwise a named-tuple containing both the
``indices`` and a boolean ``mask`` with ``False`` indicating
which locations were not found.
Returns
-------
Throat indices on the calling object corresponding to the same throats
on the target object. Can be an array or a tuple containing an array
and a mask, depending on the value of ``filtered``.
]
variable[ids] assign[=] call[call[name[origin]][constant[throat._id]]][name[throats]]
return[call[name[self]._map, parameter[]]] | keyword[def] identifier[map_throats] ( identifier[self] , identifier[throats] , identifier[origin] , identifier[filtered] = keyword[True] ):
literal[string]
identifier[ids] = identifier[origin] [ literal[string] ][ identifier[throats] ]
keyword[return] identifier[self] . identifier[_map] ( identifier[element] = literal[string] , identifier[ids] = identifier[ids] , identifier[filtered] = identifier[filtered] ) | def map_throats(self, throats, origin, filtered=True):
"""
Given a list of throats on a target object, finds indices of
those throats on the calling object
Parameters
----------
throats : array_like
The indices of the throats on the object specified in ``origin``
origin : OpenPNM Base object
The object corresponding to the indices given in ``throats``
filtered : boolean (default is ``True``)
If ``True`` then a ND-array of indices is returned with missing
indices removed, otherwise a named-tuple containing both the
``indices`` and a boolean ``mask`` with ``False`` indicating
which locations were not found.
Returns
-------
Throat indices on the calling object corresponding to the same throats
on the target object. Can be an array or a tuple containing an array
and a mask, depending on the value of ``filtered``.
"""
ids = origin['throat._id'][throats]
return self._map(element='throat', ids=ids, filtered=filtered) |
def diff_missed_lines(self, filename):
"""
Return a list of 2-element tuples `(lineno, is_new)` for the given
file `filename` where `lineno` is a missed line number and `is_new`
indicates whether the missed line was introduced (True) or removed
(False).
"""
line_changed = []
for line in self.file_source(filename):
if line.status is not None:
is_new = not line.status
line_changed.append((line.number, is_new))
return line_changed | def function[diff_missed_lines, parameter[self, filename]]:
constant[
Return a list of 2-element tuples `(lineno, is_new)` for the given
file `filename` where `lineno` is a missed line number and `is_new`
indicates whether the missed line was introduced (True) or removed
(False).
]
variable[line_changed] assign[=] list[[]]
for taget[name[line]] in starred[call[name[self].file_source, parameter[name[filename]]]] begin[:]
if compare[name[line].status is_not constant[None]] begin[:]
variable[is_new] assign[=] <ast.UnaryOp object at 0x7da1b26ac850>
call[name[line_changed].append, parameter[tuple[[<ast.Attribute object at 0x7da1b0fc6260>, <ast.Name object at 0x7da1b0fc7a60>]]]]
return[name[line_changed]] | keyword[def] identifier[diff_missed_lines] ( identifier[self] , identifier[filename] ):
literal[string]
identifier[line_changed] =[]
keyword[for] identifier[line] keyword[in] identifier[self] . identifier[file_source] ( identifier[filename] ):
keyword[if] identifier[line] . identifier[status] keyword[is] keyword[not] keyword[None] :
identifier[is_new] = keyword[not] identifier[line] . identifier[status]
identifier[line_changed] . identifier[append] (( identifier[line] . identifier[number] , identifier[is_new] ))
keyword[return] identifier[line_changed] | def diff_missed_lines(self, filename):
"""
Return a list of 2-element tuples `(lineno, is_new)` for the given
file `filename` where `lineno` is a missed line number and `is_new`
indicates whether the missed line was introduced (True) or removed
(False).
"""
line_changed = []
for line in self.file_source(filename):
if line.status is not None:
is_new = not line.status
line_changed.append((line.number, is_new)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']]
return line_changed |
def Animation_resolveAnimation(self, animationId):
"""
Function path: Animation.resolveAnimation
Domain: Animation
Method name: resolveAnimation
Parameters:
Required arguments:
'animationId' (type: string) -> Animation id.
Returns:
'remoteObject' (type: Runtime.RemoteObject) -> Corresponding remote object.
Description: Gets the remote object of the Animation.
"""
assert isinstance(animationId, (str,)
), "Argument 'animationId' must be of type '['str']'. Received type: '%s'" % type(
animationId)
subdom_funcs = self.synchronous_command('Animation.resolveAnimation',
animationId=animationId)
return subdom_funcs | def function[Animation_resolveAnimation, parameter[self, animationId]]:
constant[
Function path: Animation.resolveAnimation
Domain: Animation
Method name: resolveAnimation
Parameters:
Required arguments:
'animationId' (type: string) -> Animation id.
Returns:
'remoteObject' (type: Runtime.RemoteObject) -> Corresponding remote object.
Description: Gets the remote object of the Animation.
]
assert[call[name[isinstance], parameter[name[animationId], tuple[[<ast.Name object at 0x7da1b1106b60>]]]]]
variable[subdom_funcs] assign[=] call[name[self].synchronous_command, parameter[constant[Animation.resolveAnimation]]]
return[name[subdom_funcs]] | keyword[def] identifier[Animation_resolveAnimation] ( identifier[self] , identifier[animationId] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[animationId] ,( identifier[str] ,)
), literal[string] % identifier[type] (
identifier[animationId] )
identifier[subdom_funcs] = identifier[self] . identifier[synchronous_command] ( literal[string] ,
identifier[animationId] = identifier[animationId] )
keyword[return] identifier[subdom_funcs] | def Animation_resolveAnimation(self, animationId):
"""
Function path: Animation.resolveAnimation
Domain: Animation
Method name: resolveAnimation
Parameters:
Required arguments:
'animationId' (type: string) -> Animation id.
Returns:
'remoteObject' (type: Runtime.RemoteObject) -> Corresponding remote object.
Description: Gets the remote object of the Animation.
"""
assert isinstance(animationId, (str,)), "Argument 'animationId' must be of type '['str']'. Received type: '%s'" % type(animationId)
subdom_funcs = self.synchronous_command('Animation.resolveAnimation', animationId=animationId)
return subdom_funcs |
def _align_intervals(int_hier, lab_hier, t_min=0.0, t_max=None):
'''Align a hierarchical annotation to span a fixed start and end time.
Parameters
----------
int_hier : list of list of intervals
lab_hier : list of list of str
Hierarchical segment annotations, encoded as a
list of list of intervals (int_hier) and list of
list of strings (lab_hier)
t_min : None or number >= 0
The minimum time value for the segmentation
t_max : None or number >= t_min
The maximum time value for the segmentation
Returns
-------
intervals_hier : list of list of intervals
labels_hier : list of list of str
`int_hier` `lab_hier` aligned to span `[t_min, t_max]`.
'''
return [list(_) for _ in zip(*[util.adjust_intervals(np.asarray(ival),
labels=lab,
t_min=t_min,
t_max=t_max)
for ival, lab in zip(int_hier, lab_hier)])] | def function[_align_intervals, parameter[int_hier, lab_hier, t_min, t_max]]:
constant[Align a hierarchical annotation to span a fixed start and end time.
Parameters
----------
int_hier : list of list of intervals
lab_hier : list of list of str
Hierarchical segment annotations, encoded as a
list of list of intervals (int_hier) and list of
list of strings (lab_hier)
t_min : None or number >= 0
The minimum time value for the segmentation
t_max : None or number >= t_min
The maximum time value for the segmentation
Returns
-------
intervals_hier : list of list of intervals
labels_hier : list of list of str
`int_hier` `lab_hier` aligned to span `[t_min, t_max]`.
]
return[<ast.ListComp object at 0x7da20e9b0520>] | keyword[def] identifier[_align_intervals] ( identifier[int_hier] , identifier[lab_hier] , identifier[t_min] = literal[int] , identifier[t_max] = keyword[None] ):
literal[string]
keyword[return] [ identifier[list] ( identifier[_] ) keyword[for] identifier[_] keyword[in] identifier[zip] (*[ identifier[util] . identifier[adjust_intervals] ( identifier[np] . identifier[asarray] ( identifier[ival] ),
identifier[labels] = identifier[lab] ,
identifier[t_min] = identifier[t_min] ,
identifier[t_max] = identifier[t_max] )
keyword[for] identifier[ival] , identifier[lab] keyword[in] identifier[zip] ( identifier[int_hier] , identifier[lab_hier] )])] | def _align_intervals(int_hier, lab_hier, t_min=0.0, t_max=None):
"""Align a hierarchical annotation to span a fixed start and end time.
Parameters
----------
int_hier : list of list of intervals
lab_hier : list of list of str
Hierarchical segment annotations, encoded as a
list of list of intervals (int_hier) and list of
list of strings (lab_hier)
t_min : None or number >= 0
The minimum time value for the segmentation
t_max : None or number >= t_min
The maximum time value for the segmentation
Returns
-------
intervals_hier : list of list of intervals
labels_hier : list of list of str
`int_hier` `lab_hier` aligned to span `[t_min, t_max]`.
"""
return [list(_) for _ in zip(*[util.adjust_intervals(np.asarray(ival), labels=lab, t_min=t_min, t_max=t_max) for (ival, lab) in zip(int_hier, lab_hier)])] |
def validator(flag_name, message='Flag validation failed',
flag_values=_flagvalues.FLAGS):
"""A function decorator for defining a flag validator.
Registers the decorated function as a validator for flag_name, e.g.
@flags.validator('foo')
def _CheckFoo(foo):
...
See register_validator() for the specification of checker function.
Args:
flag_name: str, name of the flag to be checked.
message: str, error text to be shown to the user if checker returns False.
If checker raises flags.ValidationError, message from the raised
error will be shown.
flag_values: flags.FlagValues, optional FlagValues instance to validate
against.
Returns:
A function decorator that registers its function argument as a validator.
Raises:
AttributeError: Raised when flag_name is not registered as a valid flag
name.
"""
def decorate(function):
register_validator(flag_name, function,
message=message,
flag_values=flag_values)
return function
return decorate | def function[validator, parameter[flag_name, message, flag_values]]:
constant[A function decorator for defining a flag validator.
Registers the decorated function as a validator for flag_name, e.g.
@flags.validator('foo')
def _CheckFoo(foo):
...
See register_validator() for the specification of checker function.
Args:
flag_name: str, name of the flag to be checked.
message: str, error text to be shown to the user if checker returns False.
If checker raises flags.ValidationError, message from the raised
error will be shown.
flag_values: flags.FlagValues, optional FlagValues instance to validate
against.
Returns:
A function decorator that registers its function argument as a validator.
Raises:
AttributeError: Raised when flag_name is not registered as a valid flag
name.
]
def function[decorate, parameter[function]]:
call[name[register_validator], parameter[name[flag_name], name[function]]]
return[name[function]]
return[name[decorate]] | keyword[def] identifier[validator] ( identifier[flag_name] , identifier[message] = literal[string] ,
identifier[flag_values] = identifier[_flagvalues] . identifier[FLAGS] ):
literal[string]
keyword[def] identifier[decorate] ( identifier[function] ):
identifier[register_validator] ( identifier[flag_name] , identifier[function] ,
identifier[message] = identifier[message] ,
identifier[flag_values] = identifier[flag_values] )
keyword[return] identifier[function]
keyword[return] identifier[decorate] | def validator(flag_name, message='Flag validation failed', flag_values=_flagvalues.FLAGS):
"""A function decorator for defining a flag validator.
Registers the decorated function as a validator for flag_name, e.g.
@flags.validator('foo')
def _CheckFoo(foo):
...
See register_validator() for the specification of checker function.
Args:
flag_name: str, name of the flag to be checked.
message: str, error text to be shown to the user if checker returns False.
If checker raises flags.ValidationError, message from the raised
error will be shown.
flag_values: flags.FlagValues, optional FlagValues instance to validate
against.
Returns:
A function decorator that registers its function argument as a validator.
Raises:
AttributeError: Raised when flag_name is not registered as a valid flag
name.
"""
def decorate(function):
register_validator(flag_name, function, message=message, flag_values=flag_values)
return function
return decorate |
def generate_data(shp=[16, 20, 24]):
""" Generating data """
x = np.ones(shp)
# inserting box
x[4:-4, 6:-2, 1:-6] = -1
x_noisy = x + np.random.normal(0, 0.6, size=x.shape)
return x_noisy | def function[generate_data, parameter[shp]]:
constant[ Generating data ]
variable[x] assign[=] call[name[np].ones, parameter[name[shp]]]
call[name[x]][tuple[[<ast.Slice object at 0x7da1b271f7f0>, <ast.Slice object at 0x7da1b271c640>, <ast.Slice object at 0x7da1b271cfa0>]]] assign[=] <ast.UnaryOp object at 0x7da1b271d1e0>
variable[x_noisy] assign[=] binary_operation[name[x] + call[name[np].random.normal, parameter[constant[0], constant[0.6]]]]
return[name[x_noisy]] | keyword[def] identifier[generate_data] ( identifier[shp] =[ literal[int] , literal[int] , literal[int] ]):
literal[string]
identifier[x] = identifier[np] . identifier[ones] ( identifier[shp] )
identifier[x] [ literal[int] :- literal[int] , literal[int] :- literal[int] , literal[int] :- literal[int] ]=- literal[int]
identifier[x_noisy] = identifier[x] + identifier[np] . identifier[random] . identifier[normal] ( literal[int] , literal[int] , identifier[size] = identifier[x] . identifier[shape] )
keyword[return] identifier[x_noisy] | def generate_data(shp=[16, 20, 24]):
""" Generating data """
x = np.ones(shp) # inserting box
x[4:-4, 6:-2, 1:-6] = -1
x_noisy = x + np.random.normal(0, 0.6, size=x.shape)
return x_noisy |
def map(self, *args):
"""maps the function onto multiple inputs. The input should be multiple sequences. The
sequences will be zipped together forming the positional arguments for the call. This is
equivalent to map(func, ...) but is executed with a single network call."""
call_args = [self._map_args(*cur_args) for cur_args in zip(*args)]
r = self._invoke(call_args)
ret_type = _get_annotation('return', self.func)
output_name = getattr(self.func, '__output_name__', 'output1')
return [_decode_response(
r['Results'][output_name]['value'].get("ColumnNames"),
r['Results'][output_name]['value'].get("ColumnTypes"),
x,
ret_type)
for x in r['Results']['output1']['value']['Values']] | def function[map, parameter[self]]:
constant[maps the function onto multiple inputs. The input should be multiple sequences. The
sequences will be zipped together forming the positional arguments for the call. This is
equivalent to map(func, ...) but is executed with a single network call.]
variable[call_args] assign[=] <ast.ListComp object at 0x7da18f58f0d0>
variable[r] assign[=] call[name[self]._invoke, parameter[name[call_args]]]
variable[ret_type] assign[=] call[name[_get_annotation], parameter[constant[return], name[self].func]]
variable[output_name] assign[=] call[name[getattr], parameter[name[self].func, constant[__output_name__], constant[output1]]]
return[<ast.ListComp object at 0x7da18f58eb00>] | keyword[def] identifier[map] ( identifier[self] ,* identifier[args] ):
literal[string]
identifier[call_args] =[ identifier[self] . identifier[_map_args] (* identifier[cur_args] ) keyword[for] identifier[cur_args] keyword[in] identifier[zip] (* identifier[args] )]
identifier[r] = identifier[self] . identifier[_invoke] ( identifier[call_args] )
identifier[ret_type] = identifier[_get_annotation] ( literal[string] , identifier[self] . identifier[func] )
identifier[output_name] = identifier[getattr] ( identifier[self] . identifier[func] , literal[string] , literal[string] )
keyword[return] [ identifier[_decode_response] (
identifier[r] [ literal[string] ][ identifier[output_name] ][ literal[string] ]. identifier[get] ( literal[string] ),
identifier[r] [ literal[string] ][ identifier[output_name] ][ literal[string] ]. identifier[get] ( literal[string] ),
identifier[x] ,
identifier[ret_type] )
keyword[for] identifier[x] keyword[in] identifier[r] [ literal[string] ][ literal[string] ][ literal[string] ][ literal[string] ]] | def map(self, *args):
"""maps the function onto multiple inputs. The input should be multiple sequences. The
sequences will be zipped together forming the positional arguments for the call. This is
equivalent to map(func, ...) but is executed with a single network call."""
call_args = [self._map_args(*cur_args) for cur_args in zip(*args)]
r = self._invoke(call_args)
ret_type = _get_annotation('return', self.func)
output_name = getattr(self.func, '__output_name__', 'output1')
return [_decode_response(r['Results'][output_name]['value'].get('ColumnNames'), r['Results'][output_name]['value'].get('ColumnTypes'), x, ret_type) for x in r['Results']['output1']['value']['Values']] |
def getstruct(self, msgid, as_json=False, stream=sys.stdout):
"""Get and print the whole message.
as_json indicates whether to print the part list as JSON or not.
"""
parts = [part.get_content_type() for hdr, part in self._get(msgid)]
if as_json:
print(json.dumps(parts), file=stream)
else:
for c in parts:
print(c, file=stream) | def function[getstruct, parameter[self, msgid, as_json, stream]]:
constant[Get and print the whole message.
as_json indicates whether to print the part list as JSON or not.
]
variable[parts] assign[=] <ast.ListComp object at 0x7da18c4ce260>
if name[as_json] begin[:]
call[name[print], parameter[call[name[json].dumps, parameter[name[parts]]]]] | keyword[def] identifier[getstruct] ( identifier[self] , identifier[msgid] , identifier[as_json] = keyword[False] , identifier[stream] = identifier[sys] . identifier[stdout] ):
literal[string]
identifier[parts] =[ identifier[part] . identifier[get_content_type] () keyword[for] identifier[hdr] , identifier[part] keyword[in] identifier[self] . identifier[_get] ( identifier[msgid] )]
keyword[if] identifier[as_json] :
identifier[print] ( identifier[json] . identifier[dumps] ( identifier[parts] ), identifier[file] = identifier[stream] )
keyword[else] :
keyword[for] identifier[c] keyword[in] identifier[parts] :
identifier[print] ( identifier[c] , identifier[file] = identifier[stream] ) | def getstruct(self, msgid, as_json=False, stream=sys.stdout):
"""Get and print the whole message.
as_json indicates whether to print the part list as JSON or not.
"""
parts = [part.get_content_type() for (hdr, part) in self._get(msgid)]
if as_json:
print(json.dumps(parts), file=stream) # depends on [control=['if'], data=[]]
else:
for c in parts:
print(c, file=stream) # depends on [control=['for'], data=['c']] |
def reparse_local_options(self):
"""
Re-parse the leftover command-line options stored
in self.largs, so that any value overridden on the
command line is immediately available if the user turns
around and does a GetOption() right away.
We mimic the processing of the single args
in the original OptionParser._process_args(), but here we
allow exact matches for long-opts only (no partial
argument names!).
Else, this would lead to problems in add_local_option()
below. When called from there, we try to reparse the
command-line arguments that
1. haven't been processed so far (self.largs), but
2. are possibly not added to the list of options yet.
So, when we only have a value for "--myargument" yet,
a command-line argument of "--myarg=test" would set it.
Responsible for this behaviour is the method
_match_long_opt(), which allows for partial matches of
the option name, as long as the common prefix appears to
be unique.
This would lead to further confusion, because we might want
to add another option "--myarg" later on (see issue #2929).
"""
rargs = []
largs_restore = []
# Loop over all remaining arguments
skip = False
for l in self.largs:
if skip:
# Accept all remaining arguments as they are
largs_restore.append(l)
else:
if len(l) > 2 and l[0:2] == "--":
# Check long option
lopt = (l,)
if "=" in l:
# Split into option and value
lopt = l.split("=", 1)
if lopt[0] in self._long_opt:
# Argument is already known
rargs.append('='.join(lopt))
else:
# Not known yet, so reject for now
largs_restore.append('='.join(lopt))
else:
if l == "--" or l == "-":
# Stop normal processing and don't
# process the rest of the command-line opts
largs_restore.append(l)
skip = True
else:
rargs.append(l)
# Parse the filtered list
self.parse_args(rargs, self.values)
# Restore the list of remaining arguments for the
# next call of AddOption/add_local_option...
self.largs = self.largs + largs_restore | def function[reparse_local_options, parameter[self]]:
constant[
Re-parse the leftover command-line options stored
in self.largs, so that any value overridden on the
command line is immediately available if the user turns
around and does a GetOption() right away.
We mimic the processing of the single args
in the original OptionParser._process_args(), but here we
allow exact matches for long-opts only (no partial
argument names!).
Else, this would lead to problems in add_local_option()
below. When called from there, we try to reparse the
command-line arguments that
1. haven't been processed so far (self.largs), but
2. are possibly not added to the list of options yet.
So, when we only have a value for "--myargument" yet,
a command-line argument of "--myarg=test" would set it.
Responsible for this behaviour is the method
_match_long_opt(), which allows for partial matches of
the option name, as long as the common prefix appears to
be unique.
This would lead to further confusion, because we might want
to add another option "--myarg" later on (see issue #2929).
]
variable[rargs] assign[=] list[[]]
variable[largs_restore] assign[=] list[[]]
variable[skip] assign[=] constant[False]
for taget[name[l]] in starred[name[self].largs] begin[:]
if name[skip] begin[:]
call[name[largs_restore].append, parameter[name[l]]]
call[name[self].parse_args, parameter[name[rargs], name[self].values]]
name[self].largs assign[=] binary_operation[name[self].largs + name[largs_restore]] | keyword[def] identifier[reparse_local_options] ( identifier[self] ):
literal[string]
identifier[rargs] =[]
identifier[largs_restore] =[]
identifier[skip] = keyword[False]
keyword[for] identifier[l] keyword[in] identifier[self] . identifier[largs] :
keyword[if] identifier[skip] :
identifier[largs_restore] . identifier[append] ( identifier[l] )
keyword[else] :
keyword[if] identifier[len] ( identifier[l] )> literal[int] keyword[and] identifier[l] [ literal[int] : literal[int] ]== literal[string] :
identifier[lopt] =( identifier[l] ,)
keyword[if] literal[string] keyword[in] identifier[l] :
identifier[lopt] = identifier[l] . identifier[split] ( literal[string] , literal[int] )
keyword[if] identifier[lopt] [ literal[int] ] keyword[in] identifier[self] . identifier[_long_opt] :
identifier[rargs] . identifier[append] ( literal[string] . identifier[join] ( identifier[lopt] ))
keyword[else] :
identifier[largs_restore] . identifier[append] ( literal[string] . identifier[join] ( identifier[lopt] ))
keyword[else] :
keyword[if] identifier[l] == literal[string] keyword[or] identifier[l] == literal[string] :
identifier[largs_restore] . identifier[append] ( identifier[l] )
identifier[skip] = keyword[True]
keyword[else] :
identifier[rargs] . identifier[append] ( identifier[l] )
identifier[self] . identifier[parse_args] ( identifier[rargs] , identifier[self] . identifier[values] )
identifier[self] . identifier[largs] = identifier[self] . identifier[largs] + identifier[largs_restore] | def reparse_local_options(self):
"""
Re-parse the leftover command-line options stored
in self.largs, so that any value overridden on the
command line is immediately available if the user turns
around and does a GetOption() right away.
We mimic the processing of the single args
in the original OptionParser._process_args(), but here we
allow exact matches for long-opts only (no partial
argument names!).
Else, this would lead to problems in add_local_option()
below. When called from there, we try to reparse the
command-line arguments that
1. haven't been processed so far (self.largs), but
2. are possibly not added to the list of options yet.
So, when we only have a value for "--myargument" yet,
a command-line argument of "--myarg=test" would set it.
Responsible for this behaviour is the method
_match_long_opt(), which allows for partial matches of
the option name, as long as the common prefix appears to
be unique.
This would lead to further confusion, because we might want
to add another option "--myarg" later on (see issue #2929).
"""
rargs = []
largs_restore = []
# Loop over all remaining arguments
skip = False
for l in self.largs:
if skip:
# Accept all remaining arguments as they are
largs_restore.append(l) # depends on [control=['if'], data=[]]
elif len(l) > 2 and l[0:2] == '--':
# Check long option
lopt = (l,)
if '=' in l:
# Split into option and value
lopt = l.split('=', 1) # depends on [control=['if'], data=['l']]
if lopt[0] in self._long_opt:
# Argument is already known
rargs.append('='.join(lopt)) # depends on [control=['if'], data=[]]
else:
# Not known yet, so reject for now
largs_restore.append('='.join(lopt)) # depends on [control=['if'], data=[]]
elif l == '--' or l == '-':
# Stop normal processing and don't
# process the rest of the command-line opts
largs_restore.append(l)
skip = True # depends on [control=['if'], data=[]]
else:
rargs.append(l) # depends on [control=['for'], data=['l']]
# Parse the filtered list
self.parse_args(rargs, self.values)
# Restore the list of remaining arguments for the
# next call of AddOption/add_local_option...
self.largs = self.largs + largs_restore |
def byteswap(input,output=None,clobber=True):
"""Input GEIS files "input" will be read and converted to a new GEIS file
whose byte-order has been swapped from its original state.
Parameters
----------
input - str
Full filename with path of input GEIS image header file
output - str
Full filename with path of output GEIS image header file
If None, a default name will be created as input_swap.??h
clobber - bool
Overwrite any pre-existing output file? [Default: True]
Notes
-----
This function will automatically read and write out the data file using the
GEIS image naming conventions.
"""
global dat
cardLen = fits.Card.length
# input file(s) must be of the form *.??h and *.??d
if input[-1] != 'h' or input[-4] != '.':
raise "Illegal input GEIS file name %s" % input
data_file = input[:-1]+'d'
# Create default output name if no output name was specified by the user
if output is None:
output = input.replace('.','_swap.')
out_data = output[:-1]+'d'
if os.path.exists(output) and not clobber:
errstr = 'Output file already exists! Please remove or rename and start again...'
raise IOError(errstr)
_os = sys.platform
if _os[:5] == 'linux' or _os[:5] == 'win32' or _os[:5] == 'sunos' or _os[:3] == 'osf' or _os[:6] == 'darwin':
bytes_per_line = cardLen+1
else:
raise "Platform %s is not supported (yet)." % _os
end_card = 'END'+' '* (cardLen-3)
# open input file
im = open(input)
# Generate the primary HDU so we can have access to keywords which describe
# the number of groups and shape of each group's array
#
cards = []
while 1:
line = im.read(bytes_per_line)[:cardLen]
line = line[:8].upper() + line[8:]
if line == end_card:
break
cards.append(fits.Card.fromstring(line))
phdr = fits.Header(cards)
im.close()
_naxis0 = phdr.get('NAXIS', 0)
_naxis = [phdr['NAXIS'+str(j)] for j in range(1, _naxis0+1)]
_naxis.insert(0, _naxis0)
_bitpix = phdr['BITPIX']
_psize = phdr['PSIZE']
if phdr['DATATYPE'][:4] == 'REAL':
_bitpix = -_bitpix
if _naxis0 > 0:
size = reduce(lambda x,y:x*y, _naxis[1:])
data_size = abs(_bitpix) * size // 8
else:
data_size = 0
group_size = data_size + _psize // 8
# decode the group parameter definitions,
# group parameters will become extension header
groups = phdr['GROUPS']
gcount = phdr['GCOUNT']
pcount = phdr['PCOUNT']
formats = []
bools = []
floats = []
_range = list(range(1, pcount+1))
key = [phdr['PTYPE'+str(j)] for j in _range]
comm = [phdr.cards['PTYPE'+str(j)].comment for j in _range]
# delete group parameter definition header keywords
_list = ['PTYPE'+str(j) for j in _range] + \
['PDTYPE'+str(j) for j in _range] + \
['PSIZE'+str(j) for j in _range] + \
['DATATYPE', 'PSIZE', 'GCOUNT', 'PCOUNT', 'BSCALE', 'BZERO']
# Construct record array formats for the group parameters
# as interpreted from the Primary header file
for i in range(1, pcount+1):
ptype = key[i-1]
pdtype = phdr['PDTYPE'+str(i)]
star = pdtype.find('*')
_type = pdtype[:star]
_bytes = pdtype[star+1:]
# collect boolean keywords since they need special attention later
if _type == 'LOGICAL':
bools.append(i)
if pdtype == 'REAL*4':
floats.append(i)
fmt = geis_fmt[_type] + _bytes
formats.append((ptype,fmt))
_shape = _naxis[1:]
_shape.reverse()
_code = fits.BITPIX2DTYPE[_bitpix]
_bscale = phdr.get('BSCALE', 1)
_bzero = phdr.get('BZERO', 0)
if phdr['DATATYPE'][:10] == 'UNSIGNED*2':
_uint16 = 1
_bzero = 32768
else:
_uint16 = 0
# Use copy-on-write for all data types since byteswap may be needed
# in some platforms.
f1 = open(data_file, mode='rb')
dat = f1.read()
f1.close()
errormsg = ""
loc = 0
outdat = b''
for k in range(gcount):
ext_dat = numpy.fromstring(dat[loc:loc+data_size], dtype=_code)
ext_dat = ext_dat.reshape(_shape).byteswap()
outdat += ext_dat.tostring()
ext_hdu = fits.hdu.ImageHDU(data=ext_dat)
rec = numpy.fromstring(dat[loc+data_size:loc+group_size], dtype=formats).byteswap()
outdat += rec.tostring()
loc += group_size
if os.path.exists(output):
os.remove(output)
if os.path.exists(out_data):
os.remove(out_data)
shutil.copy(input,output)
outfile = open(out_data,mode='wb')
outfile.write(outdat)
outfile.close()
print('Finished byte-swapping ',input,' to ',output)
#-------------------------------------------------------------------------------
"""Input GEIS files "input" will be read and a HDUList object will
be returned that matches the waiver-FITS format written out by 'stwfits' in IRAF.
The user can use the writeto method to write the HDUList object to
a FITS file.
"""
# global dat # !!! (looks like this is a function missing its head)
cardLen = fits.Card.length
# input file(s) must be of the form *.??h and *.??d
if input[-1] != 'h' or input[-4] != '.':
raise "Illegal input GEIS file name %s" % input
data_file = input[:-1]+'d'
_os = sys.platform
if _os[:5] == 'linux' or _os[:5] == 'win32' or _os[:5] == 'sunos' or _os[:3] == 'osf' or _os[:6] == 'darwin':
bytes_per_line = cardLen+1
else:
raise "Platform %s is not supported (yet)." % _os
end_card = 'END'+' '* (cardLen-3)
# open input file
im = open(input)
# Generate the primary HDU
cards = []
while 1:
line = im.read(bytes_per_line)[:cardLen]
line = line[:8].upper() + line[8:]
if line == end_card:
break
cards.append(fits.Card.fromstring(line))
phdr = fits.Header(cards)
im.close()
phdr.set('FILENAME', value=input, after='DATE')
# Determine starting point for adding Group Parameter Block keywords to Primary header
phdr_indx = phdr.index('PSIZE')
_naxis0 = phdr.get('NAXIS', 0)
_naxis = [phdr['NAXIS'+str(j)] for j in range(1, _naxis0+1)]
_naxis.insert(0, _naxis0)
_bitpix = phdr['BITPIX']
_psize = phdr['PSIZE']
if phdr['DATATYPE'][:4] == 'REAL':
_bitpix = -_bitpix
if _naxis0 > 0:
size = reduce(lambda x,y:x*y, _naxis[1:])
data_size = abs(_bitpix) * size // 8
else:
data_size = 0
group_size = data_size + _psize // 8
# decode the group parameter definitions,
# group parameters will become extension table
groups = phdr['GROUPS']
gcount = phdr['GCOUNT']
pcount = phdr['PCOUNT']
formats = []
bools = []
floats = []
cols = [] # column definitions used for extension table
cols_dict = {} # provides name access to Column defs
_range = list(range(1, pcount+1))
key = [phdr['PTYPE'+str(j)] for j in _range]
comm = [phdr.cards['PTYPE'+str(j)].comment for j in _range]
# delete group parameter definition header keywords
_list = ['PTYPE'+str(j) for j in _range] + \
['PDTYPE'+str(j) for j in _range] + \
['PSIZE'+str(j) for j in _range] + \
['DATATYPE', 'PSIZE', 'GCOUNT', 'PCOUNT', 'BSCALE', 'BZERO']
# Construct record array formats for the group parameters
# as interpreted from the Primary header file
for i in range(1, pcount+1):
ptype = key[i-1]
pdtype = phdr['PDTYPE'+str(i)]
star = pdtype.find('*')
_type = pdtype[:star]
_bytes = pdtype[star+1:]
# collect boolean keywords since they need special attention later
if _type == 'LOGICAL':
bools.append(i)
if pdtype == 'REAL*4':
floats.append(i)
# identify keywords which require conversion to special units
if ptype in kw_DOUBLE:
_type = 'DOUBLE'
fmt = geis_fmt[_type] + _bytes
formats.append((ptype,fmt))
# Set up definitions for use in creating the group-parameter block table
nrpt = ''
nbits = str(int(_bytes)*8)
if 'CHAR' in _type:
nrpt = _bytes
nbits = _bytes
afmt = cols_fmt[_type]+ nbits
if 'LOGICAL' in _type:
afmt = cols_fmt[_type]
cfmt = cols_pfmt[_type]+nrpt
#print 'Column format for ',ptype,': ',cfmt,' with dtype of ',afmt
cols_dict[ptype] = fits.Column(name=ptype,format=cfmt,array=numpy.zeros(gcount,dtype=afmt))
cols.append(cols_dict[ptype]) # This keeps the columns in order
_shape = _naxis[1:]
_shape.reverse()
_code = fits.BITPIX2DTYPE[_bitpix]
_bscale = phdr.get('BSCALE', 1)
_bzero = phdr.get('BZERO', 0)
if phdr['DATATYPE'][:10] == 'UNSIGNED*2':
_uint16 = 1
_bzero = 32768
else:
_uint16 = 0
# delete from the end, so it will not conflict with previous delete
for i in range(len(phdr)-1, -1, -1):
if phdr.cards[i].keyword in _list:
del phdr[i]
# clean up other primary header keywords
phdr['SIMPLE'] = True
phdr['GROUPS'] = False
_after = 'NAXIS'
if _naxis0 > 0:
_after += str(_naxis0)
phdr.set('EXTEND', value=True,
comment="FITS dataset may contain extensions",
after=_after)
# Use copy-on-write for all data types since byteswap may be needed
# in some platforms.
f1 = open(data_file, mode='rb')
dat = f1.read()
errormsg = ""
# Define data array for all groups
arr_shape = _naxis[:]
arr_shape[0] = gcount
arr_stack = numpy.zeros(arr_shape,dtype=_code)
loc = 0
for k in range(gcount):
ext_dat = numpy.fromstring(dat[loc:loc+data_size], dtype=_code)
ext_dat = ext_dat.reshape(_shape)
if _uint16:
ext_dat += _bzero
# Check to see whether there are any NaN's or infs which might indicate
# a byte-swapping problem, such as being written out on little-endian
# and being read in on big-endian or vice-versa.
if _code.find('float') >= 0 and \
(numpy.any(numpy.isnan(ext_dat)) or numpy.any(numpy.isinf(ext_dat))):
errormsg += "===================================\n"
errormsg += "= WARNING: =\n"
errormsg += "= Input image: =\n"
errormsg += input+"[%d]\n"%(k+1)
errormsg += "= had floating point data values =\n"
errormsg += "= of NaN and/or Inf. =\n"
errormsg += "===================================\n"
elif _code.find('int') >= 0:
# Check INT data for max values
ext_dat_frac,ext_dat_exp = numpy.frexp(ext_dat)
if ext_dat_exp.max() == int(_bitpix) - 1:
# Potential problems with byteswapping
errormsg += "===================================\n"
errormsg += "= WARNING: =\n"
errormsg += "= Input image: =\n"
errormsg += input+"[%d]\n"%(k+1)
errormsg += "= had integer data values =\n"
errormsg += "= with maximum bitvalues. =\n"
errormsg += "===================================\n"
arr_stack[k] = ext_dat
rec = numpy.fromstring(dat[loc+data_size:loc+group_size], dtype=formats)
loc += group_size
# Add data from this GPB to table
for i in range(1, pcount+1):
val = rec[0][i-1]
if i in bools:
if val:
val = 'T'
else:
val = 'F'
cols[i-1].array[k] = val
# Based on the first group, add GPB keywords to PRIMARY header
if k == 0:
# Create separate PyFITS Card objects for each entry in 'rec'
# and update Primary HDU with these keywords after PSIZE
for i in range(1, pcount+1):
#val = rec.field(i-1)[0]
val = rec[0][i-1]
if val.dtype.kind == 'S':
val = val.decode('ascii')
if i in bools:
if val:
val = True
else:
val = False
if i in floats:
# use fromstring, format in Card is deprecated in pyfits 0.9
_str = '%-8s= %20.13G / %s' % (key[i-1], val, comm[i-1])
_card = fits.Card.fromstring(_str)
else:
_card = fits.Card(keyword=key[i-1], value=val, comment=comm[i-1])
phdr.insert(phdr_indx+i, _card)
# deal with bscale/bzero
if (_bscale != 1 or _bzero != 0):
phdr['BSCALE'] = _bscale
phdr['BZERO'] = _bzero
#hdulist.append(ext_hdu)
# Define new table based on Column definitions
ext_table = fits.TableHDU.from_columns(cols)
ext_table.header.set('EXTNAME', value=input+'.tab', after='TFIELDS')
# Add column descriptions to header of table extension to match stwfits output
for i in range(len(key)):
ext_table.header.append(fits.Card(keyword=key[i], value=comm[i]))
if errormsg != "":
errormsg += "===================================\n"
errormsg += "= This file may have been =\n"
errormsg += "= written out on a platform =\n"
errormsg += "= with a different byte-order. =\n"
errormsg += "= =\n"
errormsg += "= Please verify that the values =\n"
errormsg += "= are correct or apply the =\n"
errormsg += "= '.byteswap()' method. =\n"
errormsg += "===================================\n"
print(errormsg)
f1.close()
hdulist = fits.HDUList([fits.PrimaryHDU(header=phdr, data=arr_stack)])
hdulist.append(ext_table)
return hdulist | def function[byteswap, parameter[input, output, clobber]]:
constant[Input GEIS files "input" will be read and converted to a new GEIS file
whose byte-order has been swapped from its original state.
Parameters
----------
input - str
Full filename with path of input GEIS image header file
output - str
Full filename with path of output GEIS image header file
If None, a default name will be created as input_swap.??h
clobber - bool
Overwrite any pre-existing output file? [Default: True]
Notes
-----
This function will automatically read and write out the data file using the
GEIS image naming conventions.
]
<ast.Global object at 0x7da1b0e802b0>
variable[cardLen] assign[=] name[fits].Card.length
if <ast.BoolOp object at 0x7da1b0e80130> begin[:]
<ast.Raise object at 0x7da1b0e80fd0>
variable[data_file] assign[=] binary_operation[call[name[input]][<ast.Slice object at 0x7da1b0e3d600>] + constant[d]]
if compare[name[output] is constant[None]] begin[:]
variable[output] assign[=] call[name[input].replace, parameter[constant[.], constant[_swap.]]]
variable[out_data] assign[=] binary_operation[call[name[output]][<ast.Slice object at 0x7da1b0e3c370>] + constant[d]]
if <ast.BoolOp object at 0x7da1b0e3c0a0> begin[:]
variable[errstr] assign[=] constant[Output file already exists! Please remove or rename and start again...]
<ast.Raise object at 0x7da1b0e3c490>
variable[_os] assign[=] name[sys].platform
if <ast.BoolOp object at 0x7da1b0e3e890> begin[:]
variable[bytes_per_line] assign[=] binary_operation[name[cardLen] + constant[1]]
variable[end_card] assign[=] binary_operation[constant[END] + binary_operation[constant[ ] * binary_operation[name[cardLen] - constant[3]]]]
variable[im] assign[=] call[name[open], parameter[name[input]]]
variable[cards] assign[=] list[[]]
while constant[1] begin[:]
variable[line] assign[=] call[call[name[im].read, parameter[name[bytes_per_line]]]][<ast.Slice object at 0x7da1b0e81d20>]
variable[line] assign[=] binary_operation[call[call[name[line]][<ast.Slice object at 0x7da1b0e81ed0>].upper, parameter[]] + call[name[line]][<ast.Slice object at 0x7da1b0e81f90>]]
if compare[name[line] equal[==] name[end_card]] begin[:]
break
call[name[cards].append, parameter[call[name[fits].Card.fromstring, parameter[name[line]]]]]
variable[phdr] assign[=] call[name[fits].Header, parameter[name[cards]]]
call[name[im].close, parameter[]]
variable[_naxis0] assign[=] call[name[phdr].get, parameter[constant[NAXIS], constant[0]]]
variable[_naxis] assign[=] <ast.ListComp object at 0x7da1b0e826b0>
call[name[_naxis].insert, parameter[constant[0], name[_naxis0]]]
variable[_bitpix] assign[=] call[name[phdr]][constant[BITPIX]]
variable[_psize] assign[=] call[name[phdr]][constant[PSIZE]]
if compare[call[call[name[phdr]][constant[DATATYPE]]][<ast.Slice object at 0x7da1b0e82e30>] equal[==] constant[REAL]] begin[:]
variable[_bitpix] assign[=] <ast.UnaryOp object at 0x7da1b0e82f20>
if compare[name[_naxis0] greater[>] constant[0]] begin[:]
variable[size] assign[=] call[name[reduce], parameter[<ast.Lambda object at 0x7da1b0e83100>, call[name[_naxis]][<ast.Slice object at 0x7da1b0e832b0>]]]
variable[data_size] assign[=] binary_operation[binary_operation[call[name[abs], parameter[name[_bitpix]]] * name[size]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[8]]
variable[group_size] assign[=] binary_operation[name[data_size] + binary_operation[name[_psize] <ast.FloorDiv object at 0x7da2590d6bc0> constant[8]]]
variable[groups] assign[=] call[name[phdr]][constant[GROUPS]]
variable[gcount] assign[=] call[name[phdr]][constant[GCOUNT]]
variable[pcount] assign[=] call[name[phdr]][constant[PCOUNT]]
variable[formats] assign[=] list[[]]
variable[bools] assign[=] list[[]]
variable[floats] assign[=] list[[]]
variable[_range] assign[=] call[name[list], parameter[call[name[range], parameter[constant[1], binary_operation[name[pcount] + constant[1]]]]]]
variable[key] assign[=] <ast.ListComp object at 0x7da1b0e83d60>
variable[comm] assign[=] <ast.ListComp object at 0x7da1b0edbfd0>
variable[_list] assign[=] binary_operation[binary_operation[binary_operation[<ast.ListComp object at 0x7da1b0edbc40> + <ast.ListComp object at 0x7da1b0edba60>] + <ast.ListComp object at 0x7da1b0edb880>] + list[[<ast.Constant object at 0x7da1b0edb640>, <ast.Constant object at 0x7da1b0edb610>, <ast.Constant object at 0x7da1b0edb5e0>, <ast.Constant object at 0x7da1b0edb5b0>, <ast.Constant object at 0x7da1b0edb580>, <ast.Constant object at 0x7da1b0edb550>]]]
for taget[name[i]] in starred[call[name[range], parameter[constant[1], binary_operation[name[pcount] + constant[1]]]]] begin[:]
variable[ptype] assign[=] call[name[key]][binary_operation[name[i] - constant[1]]]
variable[pdtype] assign[=] call[name[phdr]][binary_operation[constant[PDTYPE] + call[name[str], parameter[name[i]]]]]
variable[star] assign[=] call[name[pdtype].find, parameter[constant[*]]]
variable[_type] assign[=] call[name[pdtype]][<ast.Slice object at 0x7da1b0ed8940>]
variable[_bytes] assign[=] call[name[pdtype]][<ast.Slice object at 0x7da1b0ed8820>]
if compare[name[_type] equal[==] constant[LOGICAL]] begin[:]
call[name[bools].append, parameter[name[i]]]
if compare[name[pdtype] equal[==] constant[REAL*4]] begin[:]
call[name[floats].append, parameter[name[i]]]
variable[fmt] assign[=] binary_operation[call[name[geis_fmt]][name[_type]] + name[_bytes]]
call[name[formats].append, parameter[tuple[[<ast.Name object at 0x7da1b0ed8160>, <ast.Name object at 0x7da1b0ed8130>]]]]
variable[_shape] assign[=] call[name[_naxis]][<ast.Slice object at 0x7da1b26aeb60>]
call[name[_shape].reverse, parameter[]]
variable[_code] assign[=] call[name[fits].BITPIX2DTYPE][name[_bitpix]]
variable[_bscale] assign[=] call[name[phdr].get, parameter[constant[BSCALE], constant[1]]]
variable[_bzero] assign[=] call[name[phdr].get, parameter[constant[BZERO], constant[0]]]
if compare[call[call[name[phdr]][constant[DATATYPE]]][<ast.Slice object at 0x7da1b26ad7b0>] equal[==] constant[UNSIGNED*2]] begin[:]
variable[_uint16] assign[=] constant[1]
variable[_bzero] assign[=] constant[32768]
variable[f1] assign[=] call[name[open], parameter[name[data_file]]]
variable[dat] assign[=] call[name[f1].read, parameter[]]
call[name[f1].close, parameter[]]
variable[errormsg] assign[=] constant[]
variable[loc] assign[=] constant[0]
variable[outdat] assign[=] constant[b'']
for taget[name[k]] in starred[call[name[range], parameter[name[gcount]]]] begin[:]
variable[ext_dat] assign[=] call[name[numpy].fromstring, parameter[call[name[dat]][<ast.Slice object at 0x7da1b26aef20>]]]
variable[ext_dat] assign[=] call[call[name[ext_dat].reshape, parameter[name[_shape]]].byteswap, parameter[]]
<ast.AugAssign object at 0x7da1b26acbe0>
variable[ext_hdu] assign[=] call[name[fits].hdu.ImageHDU, parameter[]]
variable[rec] assign[=] call[call[name[numpy].fromstring, parameter[call[name[dat]][<ast.Slice object at 0x7da1b0edc100>]]].byteswap, parameter[]]
<ast.AugAssign object at 0x7da1b0edc520>
<ast.AugAssign object at 0x7da1b0edc5b0>
if call[name[os].path.exists, parameter[name[output]]] begin[:]
call[name[os].remove, parameter[name[output]]]
if call[name[os].path.exists, parameter[name[out_data]]] begin[:]
call[name[os].remove, parameter[name[out_data]]]
call[name[shutil].copy, parameter[name[input], name[output]]]
variable[outfile] assign[=] call[name[open], parameter[name[out_data]]]
call[name[outfile].write, parameter[name[outdat]]]
call[name[outfile].close, parameter[]]
call[name[print], parameter[constant[Finished byte-swapping ], name[input], constant[ to ], name[output]]]
constant[Input GEIS files "input" will be read and a HDUList object will
be returned that matches the waiver-FITS format written out by 'stwfits' in IRAF.
The user can use the writeto method to write the HDUList object to
a FITS file.
]
variable[cardLen] assign[=] name[fits].Card.length
if <ast.BoolOp object at 0x7da1b0edc640> begin[:]
<ast.Raise object at 0x7da1b0edcd60>
variable[data_file] assign[=] binary_operation[call[name[input]][<ast.Slice object at 0x7da1b0edcfd0>] + constant[d]]
variable[_os] assign[=] name[sys].platform
if <ast.BoolOp object at 0x7da1b0edd300> begin[:]
variable[bytes_per_line] assign[=] binary_operation[name[cardLen] + constant[1]]
variable[end_card] assign[=] binary_operation[constant[END] + binary_operation[constant[ ] * binary_operation[name[cardLen] - constant[3]]]]
variable[im] assign[=] call[name[open], parameter[name[input]]]
variable[cards] assign[=] list[[]]
while constant[1] begin[:]
variable[line] assign[=] call[call[name[im].read, parameter[name[bytes_per_line]]]][<ast.Slice object at 0x7da1b0edcb50>]
variable[line] assign[=] binary_operation[call[call[name[line]][<ast.Slice object at 0x7da1b0ede350>].upper, parameter[]] + call[name[line]][<ast.Slice object at 0x7da1b0edfa30>]]
if compare[name[line] equal[==] name[end_card]] begin[:]
break
call[name[cards].append, parameter[call[name[fits].Card.fromstring, parameter[name[line]]]]]
variable[phdr] assign[=] call[name[fits].Header, parameter[name[cards]]]
call[name[im].close, parameter[]]
call[name[phdr].set, parameter[constant[FILENAME]]]
variable[phdr_indx] assign[=] call[name[phdr].index, parameter[constant[PSIZE]]]
variable[_naxis0] assign[=] call[name[phdr].get, parameter[constant[NAXIS], constant[0]]]
variable[_naxis] assign[=] <ast.ListComp object at 0x7da1b0ede2c0>
call[name[_naxis].insert, parameter[constant[0], name[_naxis0]]]
variable[_bitpix] assign[=] call[name[phdr]][constant[BITPIX]]
variable[_psize] assign[=] call[name[phdr]][constant[PSIZE]]
if compare[call[call[name[phdr]][constant[DATATYPE]]][<ast.Slice object at 0x7da1b0eddbd0>] equal[==] constant[REAL]] begin[:]
variable[_bitpix] assign[=] <ast.UnaryOp object at 0x7da1b0ede890>
if compare[name[_naxis0] greater[>] constant[0]] begin[:]
variable[size] assign[=] call[name[reduce], parameter[<ast.Lambda object at 0x7da1b0edea40>, call[name[_naxis]][<ast.Slice object at 0x7da1b0ea2ce0>]]]
variable[data_size] assign[=] binary_operation[binary_operation[call[name[abs], parameter[name[_bitpix]]] * name[size]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[8]]
variable[group_size] assign[=] binary_operation[name[data_size] + binary_operation[name[_psize] <ast.FloorDiv object at 0x7da2590d6bc0> constant[8]]]
variable[groups] assign[=] call[name[phdr]][constant[GROUPS]]
variable[gcount] assign[=] call[name[phdr]][constant[GCOUNT]]
variable[pcount] assign[=] call[name[phdr]][constant[PCOUNT]]
variable[formats] assign[=] list[[]]
variable[bools] assign[=] list[[]]
variable[floats] assign[=] list[[]]
variable[cols] assign[=] list[[]]
variable[cols_dict] assign[=] dictionary[[], []]
variable[_range] assign[=] call[name[list], parameter[call[name[range], parameter[constant[1], binary_operation[name[pcount] + constant[1]]]]]]
variable[key] assign[=] <ast.ListComp object at 0x7da18f811900>
variable[comm] assign[=] <ast.ListComp object at 0x7da18f811d50>
variable[_list] assign[=] binary_operation[binary_operation[binary_operation[<ast.ListComp object at 0x7da18f813d60> + <ast.ListComp object at 0x7da18f811210>] + <ast.ListComp object at 0x7da18f813550>] + list[[<ast.Constant object at 0x7da18f811510>, <ast.Constant object at 0x7da18f811ba0>, <ast.Constant object at 0x7da18f8132b0>, <ast.Constant object at 0x7da18f813370>, <ast.Constant object at 0x7da18f812890>, <ast.Constant object at 0x7da18f811a20>]]]
for taget[name[i]] in starred[call[name[range], parameter[constant[1], binary_operation[name[pcount] + constant[1]]]]] begin[:]
variable[ptype] assign[=] call[name[key]][binary_operation[name[i] - constant[1]]]
variable[pdtype] assign[=] call[name[phdr]][binary_operation[constant[PDTYPE] + call[name[str], parameter[name[i]]]]]
variable[star] assign[=] call[name[pdtype].find, parameter[constant[*]]]
variable[_type] assign[=] call[name[pdtype]][<ast.Slice object at 0x7da18f811150>]
variable[_bytes] assign[=] call[name[pdtype]][<ast.Slice object at 0x7da18f813100>]
if compare[name[_type] equal[==] constant[LOGICAL]] begin[:]
call[name[bools].append, parameter[name[i]]]
if compare[name[pdtype] equal[==] constant[REAL*4]] begin[:]
call[name[floats].append, parameter[name[i]]]
if compare[name[ptype] in name[kw_DOUBLE]] begin[:]
variable[_type] assign[=] constant[DOUBLE]
variable[fmt] assign[=] binary_operation[call[name[geis_fmt]][name[_type]] + name[_bytes]]
call[name[formats].append, parameter[tuple[[<ast.Name object at 0x7da18f811f30>, <ast.Name object at 0x7da18f812470>]]]]
variable[nrpt] assign[=] constant[]
variable[nbits] assign[=] call[name[str], parameter[binary_operation[call[name[int], parameter[name[_bytes]]] * constant[8]]]]
if compare[constant[CHAR] in name[_type]] begin[:]
variable[nrpt] assign[=] name[_bytes]
variable[nbits] assign[=] name[_bytes]
variable[afmt] assign[=] binary_operation[call[name[cols_fmt]][name[_type]] + name[nbits]]
if compare[constant[LOGICAL] in name[_type]] begin[:]
variable[afmt] assign[=] call[name[cols_fmt]][name[_type]]
variable[cfmt] assign[=] binary_operation[call[name[cols_pfmt]][name[_type]] + name[nrpt]]
call[name[cols_dict]][name[ptype]] assign[=] call[name[fits].Column, parameter[]]
call[name[cols].append, parameter[call[name[cols_dict]][name[ptype]]]]
variable[_shape] assign[=] call[name[_naxis]][<ast.Slice object at 0x7da1b0ed56f0>]
call[name[_shape].reverse, parameter[]]
variable[_code] assign[=] call[name[fits].BITPIX2DTYPE][name[_bitpix]]
variable[_bscale] assign[=] call[name[phdr].get, parameter[constant[BSCALE], constant[1]]]
variable[_bzero] assign[=] call[name[phdr].get, parameter[constant[BZERO], constant[0]]]
if compare[call[call[name[phdr]][constant[DATATYPE]]][<ast.Slice object at 0x7da1b0ed4970>] equal[==] constant[UNSIGNED*2]] begin[:]
variable[_uint16] assign[=] constant[1]
variable[_bzero] assign[=] constant[32768]
for taget[name[i]] in starred[call[name[range], parameter[binary_operation[call[name[len], parameter[name[phdr]]] - constant[1]], <ast.UnaryOp object at 0x7da1b0ed4550>, <ast.UnaryOp object at 0x7da1b0ed4490>]]] begin[:]
if compare[call[name[phdr].cards][name[i]].keyword in name[_list]] begin[:]
<ast.Delete object at 0x7da1b0ed4280>
call[name[phdr]][constant[SIMPLE]] assign[=] constant[True]
call[name[phdr]][constant[GROUPS]] assign[=] constant[False]
variable[_after] assign[=] constant[NAXIS]
if compare[name[_naxis0] greater[>] constant[0]] begin[:]
<ast.AugAssign object at 0x7da1b0ed7ee0>
call[name[phdr].set, parameter[constant[EXTEND]]]
variable[f1] assign[=] call[name[open], parameter[name[data_file]]]
variable[dat] assign[=] call[name[f1].read, parameter[]]
variable[errormsg] assign[=] constant[]
variable[arr_shape] assign[=] call[name[_naxis]][<ast.Slice object at 0x7da204347100>]
call[name[arr_shape]][constant[0]] assign[=] name[gcount]
variable[arr_stack] assign[=] call[name[numpy].zeros, parameter[name[arr_shape]]]
variable[loc] assign[=] constant[0]
for taget[name[k]] in starred[call[name[range], parameter[name[gcount]]]] begin[:]
variable[ext_dat] assign[=] call[name[numpy].fromstring, parameter[call[name[dat]][<ast.Slice object at 0x7da2043462f0>]]]
variable[ext_dat] assign[=] call[name[ext_dat].reshape, parameter[name[_shape]]]
if name[_uint16] begin[:]
<ast.AugAssign object at 0x7da204346470>
if <ast.BoolOp object at 0x7da204347c10> begin[:]
<ast.AugAssign object at 0x7da204344940>
<ast.AugAssign object at 0x7da204344490>
<ast.AugAssign object at 0x7da204347ee0>
<ast.AugAssign object at 0x7da1b0ef6890>
<ast.AugAssign object at 0x7da1b0ef7640>
<ast.AugAssign object at 0x7da1b0ef6110>
<ast.AugAssign object at 0x7da1b0ef7f70>
call[name[arr_stack]][name[k]] assign[=] name[ext_dat]
variable[rec] assign[=] call[name[numpy].fromstring, parameter[call[name[dat]][<ast.Slice object at 0x7da1b0ef7f40>]]]
<ast.AugAssign object at 0x7da1b0ef4a00>
for taget[name[i]] in starred[call[name[range], parameter[constant[1], binary_operation[name[pcount] + constant[1]]]]] begin[:]
variable[val] assign[=] call[call[name[rec]][constant[0]]][binary_operation[name[i] - constant[1]]]
if compare[name[i] in name[bools]] begin[:]
if name[val] begin[:]
variable[val] assign[=] constant[T]
call[call[name[cols]][binary_operation[name[i] - constant[1]]].array][name[k]] assign[=] name[val]
if compare[name[k] equal[==] constant[0]] begin[:]
for taget[name[i]] in starred[call[name[range], parameter[constant[1], binary_operation[name[pcount] + constant[1]]]]] begin[:]
variable[val] assign[=] call[call[name[rec]][constant[0]]][binary_operation[name[i] - constant[1]]]
if compare[name[val].dtype.kind equal[==] constant[S]] begin[:]
variable[val] assign[=] call[name[val].decode, parameter[constant[ascii]]]
if compare[name[i] in name[bools]] begin[:]
if name[val] begin[:]
variable[val] assign[=] constant[True]
if compare[name[i] in name[floats]] begin[:]
variable[_str] assign[=] binary_operation[constant[%-8s= %20.13G / %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b0ef4490>, <ast.Name object at 0x7da1b0ef6f50>, <ast.Subscript object at 0x7da1b0ef6830>]]]
variable[_card] assign[=] call[name[fits].Card.fromstring, parameter[name[_str]]]
call[name[phdr].insert, parameter[binary_operation[name[phdr_indx] + name[i]], name[_card]]]
if <ast.BoolOp object at 0x7da1b0e52fb0> begin[:]
call[name[phdr]][constant[BSCALE]] assign[=] name[_bscale]
call[name[phdr]][constant[BZERO]] assign[=] name[_bzero]
variable[ext_table] assign[=] call[name[fits].TableHDU.from_columns, parameter[name[cols]]]
call[name[ext_table].header.set, parameter[constant[EXTNAME]]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[key]]]]]] begin[:]
call[name[ext_table].header.append, parameter[call[name[fits].Card, parameter[]]]]
if compare[name[errormsg] not_equal[!=] constant[]] begin[:]
<ast.AugAssign object at 0x7da1b0e53be0>
<ast.AugAssign object at 0x7da1b0e53c70>
<ast.AugAssign object at 0x7da1b0e53d00>
<ast.AugAssign object at 0x7da1b0e53d90>
<ast.AugAssign object at 0x7da1b0e53e20>
<ast.AugAssign object at 0x7da1b0e53eb0>
<ast.AugAssign object at 0x7da1b0e53f40>
<ast.AugAssign object at 0x7da1b0e53fd0>
<ast.AugAssign object at 0x7da1b0e57f70>
call[name[print], parameter[name[errormsg]]]
call[name[f1].close, parameter[]]
variable[hdulist] assign[=] call[name[fits].HDUList, parameter[list[[<ast.Call object at 0x7da1b0e57be0>]]]]
call[name[hdulist].append, parameter[name[ext_table]]]
return[name[hdulist]] | keyword[def] identifier[byteswap] ( identifier[input] , identifier[output] = keyword[None] , identifier[clobber] = keyword[True] ):
literal[string]
keyword[global] identifier[dat]
identifier[cardLen] = identifier[fits] . identifier[Card] . identifier[length]
keyword[if] identifier[input] [- literal[int] ]!= literal[string] keyword[or] identifier[input] [- literal[int] ]!= literal[string] :
keyword[raise] literal[string] % identifier[input]
identifier[data_file] = identifier[input] [:- literal[int] ]+ literal[string]
keyword[if] identifier[output] keyword[is] keyword[None] :
identifier[output] = identifier[input] . identifier[replace] ( literal[string] , literal[string] )
identifier[out_data] = identifier[output] [:- literal[int] ]+ literal[string]
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[output] ) keyword[and] keyword[not] identifier[clobber] :
identifier[errstr] = literal[string]
keyword[raise] identifier[IOError] ( identifier[errstr] )
identifier[_os] = identifier[sys] . identifier[platform]
keyword[if] identifier[_os] [: literal[int] ]== literal[string] keyword[or] identifier[_os] [: literal[int] ]== literal[string] keyword[or] identifier[_os] [: literal[int] ]== literal[string] keyword[or] identifier[_os] [: literal[int] ]== literal[string] keyword[or] identifier[_os] [: literal[int] ]== literal[string] :
identifier[bytes_per_line] = identifier[cardLen] + literal[int]
keyword[else] :
keyword[raise] literal[string] % identifier[_os]
identifier[end_card] = literal[string] + literal[string] *( identifier[cardLen] - literal[int] )
identifier[im] = identifier[open] ( identifier[input] )
identifier[cards] =[]
keyword[while] literal[int] :
identifier[line] = identifier[im] . identifier[read] ( identifier[bytes_per_line] )[: identifier[cardLen] ]
identifier[line] = identifier[line] [: literal[int] ]. identifier[upper] ()+ identifier[line] [ literal[int] :]
keyword[if] identifier[line] == identifier[end_card] :
keyword[break]
identifier[cards] . identifier[append] ( identifier[fits] . identifier[Card] . identifier[fromstring] ( identifier[line] ))
identifier[phdr] = identifier[fits] . identifier[Header] ( identifier[cards] )
identifier[im] . identifier[close] ()
identifier[_naxis0] = identifier[phdr] . identifier[get] ( literal[string] , literal[int] )
identifier[_naxis] =[ identifier[phdr] [ literal[string] + identifier[str] ( identifier[j] )] keyword[for] identifier[j] keyword[in] identifier[range] ( literal[int] , identifier[_naxis0] + literal[int] )]
identifier[_naxis] . identifier[insert] ( literal[int] , identifier[_naxis0] )
identifier[_bitpix] = identifier[phdr] [ literal[string] ]
identifier[_psize] = identifier[phdr] [ literal[string] ]
keyword[if] identifier[phdr] [ literal[string] ][: literal[int] ]== literal[string] :
identifier[_bitpix] =- identifier[_bitpix]
keyword[if] identifier[_naxis0] > literal[int] :
identifier[size] = identifier[reduce] ( keyword[lambda] identifier[x] , identifier[y] : identifier[x] * identifier[y] , identifier[_naxis] [ literal[int] :])
identifier[data_size] = identifier[abs] ( identifier[_bitpix] )* identifier[size] // literal[int]
keyword[else] :
identifier[data_size] = literal[int]
identifier[group_size] = identifier[data_size] + identifier[_psize] // literal[int]
identifier[groups] = identifier[phdr] [ literal[string] ]
identifier[gcount] = identifier[phdr] [ literal[string] ]
identifier[pcount] = identifier[phdr] [ literal[string] ]
identifier[formats] =[]
identifier[bools] =[]
identifier[floats] =[]
identifier[_range] = identifier[list] ( identifier[range] ( literal[int] , identifier[pcount] + literal[int] ))
identifier[key] =[ identifier[phdr] [ literal[string] + identifier[str] ( identifier[j] )] keyword[for] identifier[j] keyword[in] identifier[_range] ]
identifier[comm] =[ identifier[phdr] . identifier[cards] [ literal[string] + identifier[str] ( identifier[j] )]. identifier[comment] keyword[for] identifier[j] keyword[in] identifier[_range] ]
identifier[_list] =[ literal[string] + identifier[str] ( identifier[j] ) keyword[for] identifier[j] keyword[in] identifier[_range] ]+[ literal[string] + identifier[str] ( identifier[j] ) keyword[for] identifier[j] keyword[in] identifier[_range] ]+[ literal[string] + identifier[str] ( identifier[j] ) keyword[for] identifier[j] keyword[in] identifier[_range] ]+[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[pcount] + literal[int] ):
identifier[ptype] = identifier[key] [ identifier[i] - literal[int] ]
identifier[pdtype] = identifier[phdr] [ literal[string] + identifier[str] ( identifier[i] )]
identifier[star] = identifier[pdtype] . identifier[find] ( literal[string] )
identifier[_type] = identifier[pdtype] [: identifier[star] ]
identifier[_bytes] = identifier[pdtype] [ identifier[star] + literal[int] :]
keyword[if] identifier[_type] == literal[string] :
identifier[bools] . identifier[append] ( identifier[i] )
keyword[if] identifier[pdtype] == literal[string] :
identifier[floats] . identifier[append] ( identifier[i] )
identifier[fmt] = identifier[geis_fmt] [ identifier[_type] ]+ identifier[_bytes]
identifier[formats] . identifier[append] (( identifier[ptype] , identifier[fmt] ))
identifier[_shape] = identifier[_naxis] [ literal[int] :]
identifier[_shape] . identifier[reverse] ()
identifier[_code] = identifier[fits] . identifier[BITPIX2DTYPE] [ identifier[_bitpix] ]
identifier[_bscale] = identifier[phdr] . identifier[get] ( literal[string] , literal[int] )
identifier[_bzero] = identifier[phdr] . identifier[get] ( literal[string] , literal[int] )
keyword[if] identifier[phdr] [ literal[string] ][: literal[int] ]== literal[string] :
identifier[_uint16] = literal[int]
identifier[_bzero] = literal[int]
keyword[else] :
identifier[_uint16] = literal[int]
identifier[f1] = identifier[open] ( identifier[data_file] , identifier[mode] = literal[string] )
identifier[dat] = identifier[f1] . identifier[read] ()
identifier[f1] . identifier[close] ()
identifier[errormsg] = literal[string]
identifier[loc] = literal[int]
identifier[outdat] = literal[string]
keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[gcount] ):
identifier[ext_dat] = identifier[numpy] . identifier[fromstring] ( identifier[dat] [ identifier[loc] : identifier[loc] + identifier[data_size] ], identifier[dtype] = identifier[_code] )
identifier[ext_dat] = identifier[ext_dat] . identifier[reshape] ( identifier[_shape] ). identifier[byteswap] ()
identifier[outdat] += identifier[ext_dat] . identifier[tostring] ()
identifier[ext_hdu] = identifier[fits] . identifier[hdu] . identifier[ImageHDU] ( identifier[data] = identifier[ext_dat] )
identifier[rec] = identifier[numpy] . identifier[fromstring] ( identifier[dat] [ identifier[loc] + identifier[data_size] : identifier[loc] + identifier[group_size] ], identifier[dtype] = identifier[formats] ). identifier[byteswap] ()
identifier[outdat] += identifier[rec] . identifier[tostring] ()
identifier[loc] += identifier[group_size]
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[output] ):
identifier[os] . identifier[remove] ( identifier[output] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[out_data] ):
identifier[os] . identifier[remove] ( identifier[out_data] )
identifier[shutil] . identifier[copy] ( identifier[input] , identifier[output] )
identifier[outfile] = identifier[open] ( identifier[out_data] , identifier[mode] = literal[string] )
identifier[outfile] . identifier[write] ( identifier[outdat] )
identifier[outfile] . identifier[close] ()
identifier[print] ( literal[string] , identifier[input] , literal[string] , identifier[output] )
literal[string]
identifier[cardLen] = identifier[fits] . identifier[Card] . identifier[length]
keyword[if] identifier[input] [- literal[int] ]!= literal[string] keyword[or] identifier[input] [- literal[int] ]!= literal[string] :
keyword[raise] literal[string] % identifier[input]
identifier[data_file] = identifier[input] [:- literal[int] ]+ literal[string]
identifier[_os] = identifier[sys] . identifier[platform]
keyword[if] identifier[_os] [: literal[int] ]== literal[string] keyword[or] identifier[_os] [: literal[int] ]== literal[string] keyword[or] identifier[_os] [: literal[int] ]== literal[string] keyword[or] identifier[_os] [: literal[int] ]== literal[string] keyword[or] identifier[_os] [: literal[int] ]== literal[string] :
identifier[bytes_per_line] = identifier[cardLen] + literal[int]
keyword[else] :
keyword[raise] literal[string] % identifier[_os]
identifier[end_card] = literal[string] + literal[string] *( identifier[cardLen] - literal[int] )
identifier[im] = identifier[open] ( identifier[input] )
identifier[cards] =[]
keyword[while] literal[int] :
identifier[line] = identifier[im] . identifier[read] ( identifier[bytes_per_line] )[: identifier[cardLen] ]
identifier[line] = identifier[line] [: literal[int] ]. identifier[upper] ()+ identifier[line] [ literal[int] :]
keyword[if] identifier[line] == identifier[end_card] :
keyword[break]
identifier[cards] . identifier[append] ( identifier[fits] . identifier[Card] . identifier[fromstring] ( identifier[line] ))
identifier[phdr] = identifier[fits] . identifier[Header] ( identifier[cards] )
identifier[im] . identifier[close] ()
identifier[phdr] . identifier[set] ( literal[string] , identifier[value] = identifier[input] , identifier[after] = literal[string] )
identifier[phdr_indx] = identifier[phdr] . identifier[index] ( literal[string] )
identifier[_naxis0] = identifier[phdr] . identifier[get] ( literal[string] , literal[int] )
identifier[_naxis] =[ identifier[phdr] [ literal[string] + identifier[str] ( identifier[j] )] keyword[for] identifier[j] keyword[in] identifier[range] ( literal[int] , identifier[_naxis0] + literal[int] )]
identifier[_naxis] . identifier[insert] ( literal[int] , identifier[_naxis0] )
identifier[_bitpix] = identifier[phdr] [ literal[string] ]
identifier[_psize] = identifier[phdr] [ literal[string] ]
keyword[if] identifier[phdr] [ literal[string] ][: literal[int] ]== literal[string] :
identifier[_bitpix] =- identifier[_bitpix]
keyword[if] identifier[_naxis0] > literal[int] :
identifier[size] = identifier[reduce] ( keyword[lambda] identifier[x] , identifier[y] : identifier[x] * identifier[y] , identifier[_naxis] [ literal[int] :])
identifier[data_size] = identifier[abs] ( identifier[_bitpix] )* identifier[size] // literal[int]
keyword[else] :
identifier[data_size] = literal[int]
identifier[group_size] = identifier[data_size] + identifier[_psize] // literal[int]
identifier[groups] = identifier[phdr] [ literal[string] ]
identifier[gcount] = identifier[phdr] [ literal[string] ]
identifier[pcount] = identifier[phdr] [ literal[string] ]
identifier[formats] =[]
identifier[bools] =[]
identifier[floats] =[]
identifier[cols] =[]
identifier[cols_dict] ={}
identifier[_range] = identifier[list] ( identifier[range] ( literal[int] , identifier[pcount] + literal[int] ))
identifier[key] =[ identifier[phdr] [ literal[string] + identifier[str] ( identifier[j] )] keyword[for] identifier[j] keyword[in] identifier[_range] ]
identifier[comm] =[ identifier[phdr] . identifier[cards] [ literal[string] + identifier[str] ( identifier[j] )]. identifier[comment] keyword[for] identifier[j] keyword[in] identifier[_range] ]
identifier[_list] =[ literal[string] + identifier[str] ( identifier[j] ) keyword[for] identifier[j] keyword[in] identifier[_range] ]+[ literal[string] + identifier[str] ( identifier[j] ) keyword[for] identifier[j] keyword[in] identifier[_range] ]+[ literal[string] + identifier[str] ( identifier[j] ) keyword[for] identifier[j] keyword[in] identifier[_range] ]+[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[pcount] + literal[int] ):
identifier[ptype] = identifier[key] [ identifier[i] - literal[int] ]
identifier[pdtype] = identifier[phdr] [ literal[string] + identifier[str] ( identifier[i] )]
identifier[star] = identifier[pdtype] . identifier[find] ( literal[string] )
identifier[_type] = identifier[pdtype] [: identifier[star] ]
identifier[_bytes] = identifier[pdtype] [ identifier[star] + literal[int] :]
keyword[if] identifier[_type] == literal[string] :
identifier[bools] . identifier[append] ( identifier[i] )
keyword[if] identifier[pdtype] == literal[string] :
identifier[floats] . identifier[append] ( identifier[i] )
keyword[if] identifier[ptype] keyword[in] identifier[kw_DOUBLE] :
identifier[_type] = literal[string]
identifier[fmt] = identifier[geis_fmt] [ identifier[_type] ]+ identifier[_bytes]
identifier[formats] . identifier[append] (( identifier[ptype] , identifier[fmt] ))
identifier[nrpt] = literal[string]
identifier[nbits] = identifier[str] ( identifier[int] ( identifier[_bytes] )* literal[int] )
keyword[if] literal[string] keyword[in] identifier[_type] :
identifier[nrpt] = identifier[_bytes]
identifier[nbits] = identifier[_bytes]
identifier[afmt] = identifier[cols_fmt] [ identifier[_type] ]+ identifier[nbits]
keyword[if] literal[string] keyword[in] identifier[_type] :
identifier[afmt] = identifier[cols_fmt] [ identifier[_type] ]
identifier[cfmt] = identifier[cols_pfmt] [ identifier[_type] ]+ identifier[nrpt]
identifier[cols_dict] [ identifier[ptype] ]= identifier[fits] . identifier[Column] ( identifier[name] = identifier[ptype] , identifier[format] = identifier[cfmt] , identifier[array] = identifier[numpy] . identifier[zeros] ( identifier[gcount] , identifier[dtype] = identifier[afmt] ))
identifier[cols] . identifier[append] ( identifier[cols_dict] [ identifier[ptype] ])
identifier[_shape] = identifier[_naxis] [ literal[int] :]
identifier[_shape] . identifier[reverse] ()
identifier[_code] = identifier[fits] . identifier[BITPIX2DTYPE] [ identifier[_bitpix] ]
identifier[_bscale] = identifier[phdr] . identifier[get] ( literal[string] , literal[int] )
identifier[_bzero] = identifier[phdr] . identifier[get] ( literal[string] , literal[int] )
keyword[if] identifier[phdr] [ literal[string] ][: literal[int] ]== literal[string] :
identifier[_uint16] = literal[int]
identifier[_bzero] = literal[int]
keyword[else] :
identifier[_uint16] = literal[int]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[phdr] )- literal[int] ,- literal[int] ,- literal[int] ):
keyword[if] identifier[phdr] . identifier[cards] [ identifier[i] ]. identifier[keyword] keyword[in] identifier[_list] :
keyword[del] identifier[phdr] [ identifier[i] ]
identifier[phdr] [ literal[string] ]= keyword[True]
identifier[phdr] [ literal[string] ]= keyword[False]
identifier[_after] = literal[string]
keyword[if] identifier[_naxis0] > literal[int] :
identifier[_after] += identifier[str] ( identifier[_naxis0] )
identifier[phdr] . identifier[set] ( literal[string] , identifier[value] = keyword[True] ,
identifier[comment] = literal[string] ,
identifier[after] = identifier[_after] )
identifier[f1] = identifier[open] ( identifier[data_file] , identifier[mode] = literal[string] )
identifier[dat] = identifier[f1] . identifier[read] ()
identifier[errormsg] = literal[string]
identifier[arr_shape] = identifier[_naxis] [:]
identifier[arr_shape] [ literal[int] ]= identifier[gcount]
identifier[arr_stack] = identifier[numpy] . identifier[zeros] ( identifier[arr_shape] , identifier[dtype] = identifier[_code] )
identifier[loc] = literal[int]
keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[gcount] ):
identifier[ext_dat] = identifier[numpy] . identifier[fromstring] ( identifier[dat] [ identifier[loc] : identifier[loc] + identifier[data_size] ], identifier[dtype] = identifier[_code] )
identifier[ext_dat] = identifier[ext_dat] . identifier[reshape] ( identifier[_shape] )
keyword[if] identifier[_uint16] :
identifier[ext_dat] += identifier[_bzero]
keyword[if] identifier[_code] . identifier[find] ( literal[string] )>= literal[int] keyword[and] ( identifier[numpy] . identifier[any] ( identifier[numpy] . identifier[isnan] ( identifier[ext_dat] )) keyword[or] identifier[numpy] . identifier[any] ( identifier[numpy] . identifier[isinf] ( identifier[ext_dat] ))):
identifier[errormsg] += literal[string]
identifier[errormsg] += literal[string]
identifier[errormsg] += literal[string]
identifier[errormsg] += identifier[input] + literal[string] %( identifier[k] + literal[int] )
identifier[errormsg] += literal[string]
identifier[errormsg] += literal[string]
identifier[errormsg] += literal[string]
keyword[elif] identifier[_code] . identifier[find] ( literal[string] )>= literal[int] :
identifier[ext_dat_frac] , identifier[ext_dat_exp] = identifier[numpy] . identifier[frexp] ( identifier[ext_dat] )
keyword[if] identifier[ext_dat_exp] . identifier[max] ()== identifier[int] ( identifier[_bitpix] )- literal[int] :
identifier[errormsg] += literal[string]
identifier[errormsg] += literal[string]
identifier[errormsg] += literal[string]
identifier[errormsg] += identifier[input] + literal[string] %( identifier[k] + literal[int] )
identifier[errormsg] += literal[string]
identifier[errormsg] += literal[string]
identifier[errormsg] += literal[string]
identifier[arr_stack] [ identifier[k] ]= identifier[ext_dat]
identifier[rec] = identifier[numpy] . identifier[fromstring] ( identifier[dat] [ identifier[loc] + identifier[data_size] : identifier[loc] + identifier[group_size] ], identifier[dtype] = identifier[formats] )
identifier[loc] += identifier[group_size]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[pcount] + literal[int] ):
identifier[val] = identifier[rec] [ literal[int] ][ identifier[i] - literal[int] ]
keyword[if] identifier[i] keyword[in] identifier[bools] :
keyword[if] identifier[val] :
identifier[val] = literal[string]
keyword[else] :
identifier[val] = literal[string]
identifier[cols] [ identifier[i] - literal[int] ]. identifier[array] [ identifier[k] ]= identifier[val]
keyword[if] identifier[k] == literal[int] :
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[pcount] + literal[int] ):
identifier[val] = identifier[rec] [ literal[int] ][ identifier[i] - literal[int] ]
keyword[if] identifier[val] . identifier[dtype] . identifier[kind] == literal[string] :
identifier[val] = identifier[val] . identifier[decode] ( literal[string] )
keyword[if] identifier[i] keyword[in] identifier[bools] :
keyword[if] identifier[val] :
identifier[val] = keyword[True]
keyword[else] :
identifier[val] = keyword[False]
keyword[if] identifier[i] keyword[in] identifier[floats] :
identifier[_str] = literal[string] %( identifier[key] [ identifier[i] - literal[int] ], identifier[val] , identifier[comm] [ identifier[i] - literal[int] ])
identifier[_card] = identifier[fits] . identifier[Card] . identifier[fromstring] ( identifier[_str] )
keyword[else] :
identifier[_card] = identifier[fits] . identifier[Card] ( identifier[keyword] = identifier[key] [ identifier[i] - literal[int] ], identifier[value] = identifier[val] , identifier[comment] = identifier[comm] [ identifier[i] - literal[int] ])
identifier[phdr] . identifier[insert] ( identifier[phdr_indx] + identifier[i] , identifier[_card] )
keyword[if] ( identifier[_bscale] != literal[int] keyword[or] identifier[_bzero] != literal[int] ):
identifier[phdr] [ literal[string] ]= identifier[_bscale]
identifier[phdr] [ literal[string] ]= identifier[_bzero]
identifier[ext_table] = identifier[fits] . identifier[TableHDU] . identifier[from_columns] ( identifier[cols] )
identifier[ext_table] . identifier[header] . identifier[set] ( literal[string] , identifier[value] = identifier[input] + literal[string] , identifier[after] = literal[string] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[key] )):
identifier[ext_table] . identifier[header] . identifier[append] ( identifier[fits] . identifier[Card] ( identifier[keyword] = identifier[key] [ identifier[i] ], identifier[value] = identifier[comm] [ identifier[i] ]))
keyword[if] identifier[errormsg] != literal[string] :
identifier[errormsg] += literal[string]
identifier[errormsg] += literal[string]
identifier[errormsg] += literal[string]
identifier[errormsg] += literal[string]
identifier[errormsg] += literal[string]
identifier[errormsg] += literal[string]
identifier[errormsg] += literal[string]
identifier[errormsg] += literal[string]
identifier[errormsg] += literal[string]
identifier[print] ( identifier[errormsg] )
identifier[f1] . identifier[close] ()
identifier[hdulist] = identifier[fits] . identifier[HDUList] ([ identifier[fits] . identifier[PrimaryHDU] ( identifier[header] = identifier[phdr] , identifier[data] = identifier[arr_stack] )])
identifier[hdulist] . identifier[append] ( identifier[ext_table] )
keyword[return] identifier[hdulist] | def byteswap(input, output=None, clobber=True):
"""Input GEIS files "input" will be read and converted to a new GEIS file
whose byte-order has been swapped from its original state.
Parameters
----------
input - str
Full filename with path of input GEIS image header file
output - str
Full filename with path of output GEIS image header file
If None, a default name will be created as input_swap.??h
clobber - bool
Overwrite any pre-existing output file? [Default: True]
Notes
-----
This function will automatically read and write out the data file using the
GEIS image naming conventions.
"""
global dat
cardLen = fits.Card.length
# input file(s) must be of the form *.??h and *.??d
if input[-1] != 'h' or input[-4] != '.':
raise 'Illegal input GEIS file name %s' % input # depends on [control=['if'], data=[]]
data_file = input[:-1] + 'd'
# Create default output name if no output name was specified by the user
if output is None:
output = input.replace('.', '_swap.') # depends on [control=['if'], data=['output']]
out_data = output[:-1] + 'd'
if os.path.exists(output) and (not clobber):
errstr = 'Output file already exists! Please remove or rename and start again...'
raise IOError(errstr) # depends on [control=['if'], data=[]]
_os = sys.platform
if _os[:5] == 'linux' or _os[:5] == 'win32' or _os[:5] == 'sunos' or (_os[:3] == 'osf') or (_os[:6] == 'darwin'):
bytes_per_line = cardLen + 1 # depends on [control=['if'], data=[]]
else:
raise 'Platform %s is not supported (yet).' % _os
end_card = 'END' + ' ' * (cardLen - 3)
# open input file
im = open(input)
# Generate the primary HDU so we can have access to keywords which describe
# the number of groups and shape of each group's array
#
cards = []
while 1:
line = im.read(bytes_per_line)[:cardLen]
line = line[:8].upper() + line[8:]
if line == end_card:
break # depends on [control=['if'], data=[]]
cards.append(fits.Card.fromstring(line)) # depends on [control=['while'], data=[]]
phdr = fits.Header(cards)
im.close()
_naxis0 = phdr.get('NAXIS', 0)
_naxis = [phdr['NAXIS' + str(j)] for j in range(1, _naxis0 + 1)]
_naxis.insert(0, _naxis0)
_bitpix = phdr['BITPIX']
_psize = phdr['PSIZE']
if phdr['DATATYPE'][:4] == 'REAL':
_bitpix = -_bitpix # depends on [control=['if'], data=[]]
if _naxis0 > 0:
size = reduce(lambda x, y: x * y, _naxis[1:])
data_size = abs(_bitpix) * size // 8 # depends on [control=['if'], data=[]]
else:
data_size = 0
group_size = data_size + _psize // 8
# decode the group parameter definitions,
# group parameters will become extension header
groups = phdr['GROUPS']
gcount = phdr['GCOUNT']
pcount = phdr['PCOUNT']
formats = []
bools = []
floats = []
_range = list(range(1, pcount + 1))
key = [phdr['PTYPE' + str(j)] for j in _range]
comm = [phdr.cards['PTYPE' + str(j)].comment for j in _range]
# delete group parameter definition header keywords
_list = ['PTYPE' + str(j) for j in _range] + ['PDTYPE' + str(j) for j in _range] + ['PSIZE' + str(j) for j in _range] + ['DATATYPE', 'PSIZE', 'GCOUNT', 'PCOUNT', 'BSCALE', 'BZERO']
# Construct record array formats for the group parameters
# as interpreted from the Primary header file
for i in range(1, pcount + 1):
ptype = key[i - 1]
pdtype = phdr['PDTYPE' + str(i)]
star = pdtype.find('*')
_type = pdtype[:star]
_bytes = pdtype[star + 1:]
# collect boolean keywords since they need special attention later
if _type == 'LOGICAL':
bools.append(i) # depends on [control=['if'], data=[]]
if pdtype == 'REAL*4':
floats.append(i) # depends on [control=['if'], data=[]]
fmt = geis_fmt[_type] + _bytes
formats.append((ptype, fmt)) # depends on [control=['for'], data=['i']]
_shape = _naxis[1:]
_shape.reverse()
_code = fits.BITPIX2DTYPE[_bitpix]
_bscale = phdr.get('BSCALE', 1)
_bzero = phdr.get('BZERO', 0)
if phdr['DATATYPE'][:10] == 'UNSIGNED*2':
_uint16 = 1
_bzero = 32768 # depends on [control=['if'], data=[]]
else:
_uint16 = 0
# Use copy-on-write for all data types since byteswap may be needed
# in some platforms.
f1 = open(data_file, mode='rb')
dat = f1.read()
f1.close()
errormsg = ''
loc = 0
outdat = b''
for k in range(gcount):
ext_dat = numpy.fromstring(dat[loc:loc + data_size], dtype=_code)
ext_dat = ext_dat.reshape(_shape).byteswap()
outdat += ext_dat.tostring()
ext_hdu = fits.hdu.ImageHDU(data=ext_dat)
rec = numpy.fromstring(dat[loc + data_size:loc + group_size], dtype=formats).byteswap()
outdat += rec.tostring()
loc += group_size # depends on [control=['for'], data=[]]
if os.path.exists(output):
os.remove(output) # depends on [control=['if'], data=[]]
if os.path.exists(out_data):
os.remove(out_data) # depends on [control=['if'], data=[]]
shutil.copy(input, output)
outfile = open(out_data, mode='wb')
outfile.write(outdat)
outfile.close()
print('Finished byte-swapping ', input, ' to ', output)
#-------------------------------------------------------------------------------
'Input GEIS files "input" will be read and a HDUList object will\n be returned that matches the waiver-FITS format written out by \'stwfits\' in IRAF.\n\n The user can use the writeto method to write the HDUList object to\n a FITS file.\n '
# global dat # !!! (looks like this is a function missing its head)
cardLen = fits.Card.length
# input file(s) must be of the form *.??h and *.??d
if input[-1] != 'h' or input[-4] != '.':
raise 'Illegal input GEIS file name %s' % input # depends on [control=['if'], data=[]]
data_file = input[:-1] + 'd'
_os = sys.platform
if _os[:5] == 'linux' or _os[:5] == 'win32' or _os[:5] == 'sunos' or (_os[:3] == 'osf') or (_os[:6] == 'darwin'):
bytes_per_line = cardLen + 1 # depends on [control=['if'], data=[]]
else:
raise 'Platform %s is not supported (yet).' % _os
end_card = 'END' + ' ' * (cardLen - 3)
# open input file
im = open(input)
# Generate the primary HDU
cards = []
while 1:
line = im.read(bytes_per_line)[:cardLen]
line = line[:8].upper() + line[8:]
if line == end_card:
break # depends on [control=['if'], data=[]]
cards.append(fits.Card.fromstring(line)) # depends on [control=['while'], data=[]]
phdr = fits.Header(cards)
im.close()
phdr.set('FILENAME', value=input, after='DATE')
# Determine starting point for adding Group Parameter Block keywords to Primary header
phdr_indx = phdr.index('PSIZE')
_naxis0 = phdr.get('NAXIS', 0)
_naxis = [phdr['NAXIS' + str(j)] for j in range(1, _naxis0 + 1)]
_naxis.insert(0, _naxis0)
_bitpix = phdr['BITPIX']
_psize = phdr['PSIZE']
if phdr['DATATYPE'][:4] == 'REAL':
_bitpix = -_bitpix # depends on [control=['if'], data=[]]
if _naxis0 > 0:
size = reduce(lambda x, y: x * y, _naxis[1:])
data_size = abs(_bitpix) * size // 8 # depends on [control=['if'], data=[]]
else:
data_size = 0
group_size = data_size + _psize // 8
# decode the group parameter definitions,
# group parameters will become extension table
groups = phdr['GROUPS']
gcount = phdr['GCOUNT']
pcount = phdr['PCOUNT']
formats = []
bools = []
floats = []
cols = [] # column definitions used for extension table
cols_dict = {} # provides name access to Column defs
_range = list(range(1, pcount + 1))
key = [phdr['PTYPE' + str(j)] for j in _range]
comm = [phdr.cards['PTYPE' + str(j)].comment for j in _range]
# delete group parameter definition header keywords
_list = ['PTYPE' + str(j) for j in _range] + ['PDTYPE' + str(j) for j in _range] + ['PSIZE' + str(j) for j in _range] + ['DATATYPE', 'PSIZE', 'GCOUNT', 'PCOUNT', 'BSCALE', 'BZERO']
# Construct record array formats for the group parameters
# as interpreted from the Primary header file
for i in range(1, pcount + 1):
ptype = key[i - 1]
pdtype = phdr['PDTYPE' + str(i)]
star = pdtype.find('*')
_type = pdtype[:star]
_bytes = pdtype[star + 1:]
# collect boolean keywords since they need special attention later
if _type == 'LOGICAL':
bools.append(i) # depends on [control=['if'], data=[]]
if pdtype == 'REAL*4':
floats.append(i) # depends on [control=['if'], data=[]]
# identify keywords which require conversion to special units
if ptype in kw_DOUBLE:
_type = 'DOUBLE' # depends on [control=['if'], data=[]]
fmt = geis_fmt[_type] + _bytes
formats.append((ptype, fmt))
# Set up definitions for use in creating the group-parameter block table
nrpt = ''
nbits = str(int(_bytes) * 8)
if 'CHAR' in _type:
nrpt = _bytes
nbits = _bytes # depends on [control=['if'], data=[]]
afmt = cols_fmt[_type] + nbits
if 'LOGICAL' in _type:
afmt = cols_fmt[_type] # depends on [control=['if'], data=['_type']]
cfmt = cols_pfmt[_type] + nrpt
#print 'Column format for ',ptype,': ',cfmt,' with dtype of ',afmt
cols_dict[ptype] = fits.Column(name=ptype, format=cfmt, array=numpy.zeros(gcount, dtype=afmt))
cols.append(cols_dict[ptype]) # This keeps the columns in order # depends on [control=['for'], data=['i']]
_shape = _naxis[1:]
_shape.reverse()
_code = fits.BITPIX2DTYPE[_bitpix]
_bscale = phdr.get('BSCALE', 1)
_bzero = phdr.get('BZERO', 0)
if phdr['DATATYPE'][:10] == 'UNSIGNED*2':
_uint16 = 1
_bzero = 32768 # depends on [control=['if'], data=[]]
else:
_uint16 = 0
# delete from the end, so it will not conflict with previous delete
for i in range(len(phdr) - 1, -1, -1):
if phdr.cards[i].keyword in _list:
del phdr[i] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
# clean up other primary header keywords
phdr['SIMPLE'] = True
phdr['GROUPS'] = False
_after = 'NAXIS'
if _naxis0 > 0:
_after += str(_naxis0) # depends on [control=['if'], data=['_naxis0']]
phdr.set('EXTEND', value=True, comment='FITS dataset may contain extensions', after=_after)
# Use copy-on-write for all data types since byteswap may be needed
# in some platforms.
f1 = open(data_file, mode='rb')
dat = f1.read()
errormsg = ''
# Define data array for all groups
arr_shape = _naxis[:]
arr_shape[0] = gcount
arr_stack = numpy.zeros(arr_shape, dtype=_code)
loc = 0
for k in range(gcount):
ext_dat = numpy.fromstring(dat[loc:loc + data_size], dtype=_code)
ext_dat = ext_dat.reshape(_shape)
if _uint16:
ext_dat += _bzero # depends on [control=['if'], data=[]]
# Check to see whether there are any NaN's or infs which might indicate
# a byte-swapping problem, such as being written out on little-endian
# and being read in on big-endian or vice-versa.
if _code.find('float') >= 0 and (numpy.any(numpy.isnan(ext_dat)) or numpy.any(numpy.isinf(ext_dat))):
errormsg += '===================================\n'
errormsg += '= WARNING: =\n'
errormsg += '= Input image: =\n'
errormsg += input + '[%d]\n' % (k + 1)
errormsg += '= had floating point data values =\n'
errormsg += '= of NaN and/or Inf. =\n'
errormsg += '===================================\n' # depends on [control=['if'], data=[]]
elif _code.find('int') >= 0:
# Check INT data for max values
(ext_dat_frac, ext_dat_exp) = numpy.frexp(ext_dat)
if ext_dat_exp.max() == int(_bitpix) - 1:
# Potential problems with byteswapping
errormsg += '===================================\n'
errormsg += '= WARNING: =\n'
errormsg += '= Input image: =\n'
errormsg += input + '[%d]\n' % (k + 1)
errormsg += '= had integer data values =\n'
errormsg += '= with maximum bitvalues. =\n'
errormsg += '===================================\n' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
arr_stack[k] = ext_dat
rec = numpy.fromstring(dat[loc + data_size:loc + group_size], dtype=formats)
loc += group_size
# Add data from this GPB to table
for i in range(1, pcount + 1):
val = rec[0][i - 1]
if i in bools:
if val:
val = 'T' # depends on [control=['if'], data=[]]
else:
val = 'F' # depends on [control=['if'], data=[]]
cols[i - 1].array[k] = val # depends on [control=['for'], data=['i']]
# Based on the first group, add GPB keywords to PRIMARY header
if k == 0:
# Create separate PyFITS Card objects for each entry in 'rec'
# and update Primary HDU with these keywords after PSIZE
for i in range(1, pcount + 1):
#val = rec.field(i-1)[0]
val = rec[0][i - 1]
if val.dtype.kind == 'S':
val = val.decode('ascii') # depends on [control=['if'], data=[]]
if i in bools:
if val:
val = True # depends on [control=['if'], data=[]]
else:
val = False # depends on [control=['if'], data=[]]
if i in floats:
# use fromstring, format in Card is deprecated in pyfits 0.9
_str = '%-8s= %20.13G / %s' % (key[i - 1], val, comm[i - 1])
_card = fits.Card.fromstring(_str) # depends on [control=['if'], data=['i']]
else:
_card = fits.Card(keyword=key[i - 1], value=val, comment=comm[i - 1])
phdr.insert(phdr_indx + i, _card) # depends on [control=['for'], data=['i']]
# deal with bscale/bzero
if _bscale != 1 or _bzero != 0:
phdr['BSCALE'] = _bscale
phdr['BZERO'] = _bzero # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['k']]
#hdulist.append(ext_hdu)
# Define new table based on Column definitions
ext_table = fits.TableHDU.from_columns(cols)
ext_table.header.set('EXTNAME', value=input + '.tab', after='TFIELDS')
# Add column descriptions to header of table extension to match stwfits output
for i in range(len(key)):
ext_table.header.append(fits.Card(keyword=key[i], value=comm[i])) # depends on [control=['for'], data=['i']]
if errormsg != '':
errormsg += '===================================\n'
errormsg += '= This file may have been =\n'
errormsg += '= written out on a platform =\n'
errormsg += '= with a different byte-order. =\n'
errormsg += '= =\n'
errormsg += '= Please verify that the values =\n'
errormsg += '= are correct or apply the =\n'
errormsg += "= '.byteswap()' method. =\n"
errormsg += '===================================\n'
print(errormsg) # depends on [control=['if'], data=['errormsg']]
f1.close()
hdulist = fits.HDUList([fits.PrimaryHDU(header=phdr, data=arr_stack)])
hdulist.append(ext_table)
return hdulist |
def destroy_ssh_key(self, ssh_key_id):
"""
This method will delete the SSH key from your account.
"""
json = self.request('/ssh_keys/%s/destroy' % ssh_key_id, method='GET')
status = json.get('status')
return status | def function[destroy_ssh_key, parameter[self, ssh_key_id]]:
constant[
This method will delete the SSH key from your account.
]
variable[json] assign[=] call[name[self].request, parameter[binary_operation[constant[/ssh_keys/%s/destroy] <ast.Mod object at 0x7da2590d6920> name[ssh_key_id]]]]
variable[status] assign[=] call[name[json].get, parameter[constant[status]]]
return[name[status]] | keyword[def] identifier[destroy_ssh_key] ( identifier[self] , identifier[ssh_key_id] ):
literal[string]
identifier[json] = identifier[self] . identifier[request] ( literal[string] % identifier[ssh_key_id] , identifier[method] = literal[string] )
identifier[status] = identifier[json] . identifier[get] ( literal[string] )
keyword[return] identifier[status] | def destroy_ssh_key(self, ssh_key_id):
"""
This method will delete the SSH key from your account.
"""
json = self.request('/ssh_keys/%s/destroy' % ssh_key_id, method='GET')
status = json.get('status')
return status |
def route_directions(self, rt):
"""
Return a list of directions for a route.
The directions seem to always be INBOUND and OUTBOUND for the busses
currently, where INBOUND is towards downtown and OUTBOUND is away from
downtown. (No idea if this is going to change.)
Arguments:
`rt`: route designator
Response:
list of `dir`: directions served (e.g., INBOUND, OUTBOUND)
http://realtime.portauthority.org/bustime/apidoc/v1/main.jsp?section=routeDirections.jsp
"""
url = self.endpoint('R_DIRECTIONS', dict(rt=rt))
return self.response(url) | def function[route_directions, parameter[self, rt]]:
constant[
Return a list of directions for a route.
The directions seem to always be INBOUND and OUTBOUND for the busses
currently, where INBOUND is towards downtown and OUTBOUND is away from
downtown. (No idea if this is going to change.)
Arguments:
`rt`: route designator
Response:
list of `dir`: directions served (e.g., INBOUND, OUTBOUND)
http://realtime.portauthority.org/bustime/apidoc/v1/main.jsp?section=routeDirections.jsp
]
variable[url] assign[=] call[name[self].endpoint, parameter[constant[R_DIRECTIONS], call[name[dict], parameter[]]]]
return[call[name[self].response, parameter[name[url]]]] | keyword[def] identifier[route_directions] ( identifier[self] , identifier[rt] ):
literal[string]
identifier[url] = identifier[self] . identifier[endpoint] ( literal[string] , identifier[dict] ( identifier[rt] = identifier[rt] ))
keyword[return] identifier[self] . identifier[response] ( identifier[url] ) | def route_directions(self, rt):
"""
Return a list of directions for a route.
The directions seem to always be INBOUND and OUTBOUND for the busses
currently, where INBOUND is towards downtown and OUTBOUND is away from
downtown. (No idea if this is going to change.)
Arguments:
`rt`: route designator
Response:
list of `dir`: directions served (e.g., INBOUND, OUTBOUND)
http://realtime.portauthority.org/bustime/apidoc/v1/main.jsp?section=routeDirections.jsp
"""
url = self.endpoint('R_DIRECTIONS', dict(rt=rt))
return self.response(url) |
def add_line(preso, x1, y1, x2, y2, width="3pt", color="red"):
"""
Arrow pointing up to right:
context.xml:
office:automatic-styles/
<style:style style:name="gr1" style:family="graphic" style:parent-style-name="objectwithoutfill">
<style:graphic-properties
draw:marker-end="Arrow"
draw:marker-end-width="0.3cm"
draw:fill="none"
draw:textarea-vertical-align="middle"/>
</style:style>
3pt width color red
<style:style style:name="gr2" style:family="graphic" style:parent-style-name="objectwithoutfill">
<style:graphic-properties
svg:stroke-width="0.106cm"
svg:stroke-color="#ed1c24"
draw:marker-start-width="0.359cm"
draw:marker-end="Arrow"
draw:marker-end-width="0.459cm"
draw:fill="none"
draw:textarea-vertical-align="middle"
fo:padding-top="0.178cm"
fo:padding-bottom="0.178cm"
fo:padding-left="0.303cm"
fo:padding-right="0.303cm"/>
</style:style>
...
office:presentation/draw:page
<draw:line draw:style-name="gr1" draw:text-style-name="P2" draw:layer="layout" svg:x1="6.35cm" svg:y1="10.16cm" svg:x2="10.668cm" svg:y2="5.842cm"><text:p/></draw:line>
"""
marker_end_ratio = .459 / 3 # .459cm/3pt
marker_start_ratio = .359 / 3 # .359cm/3pt
stroke_ratio = .106 / 3 # .106cm/3pt
w = float(width[0:width.index("pt")])
sw = w * stroke_ratio
mew = w * marker_end_ratio
msw = w * marker_start_ratio
attribs = {
"svg:stroke-width": "{}cm".format(sw),
"svg:stroke-color": color, # "#ed1c24",
"draw:marker-start-width": "{}cm".format(msw),
"draw:marker-end": "Arrow",
"draw:marker-end-width": "{}cm".format(mew),
"draw:fill": "none",
"draw:textarea-vertical-align": "middle",
}
style = LineStyle(**attribs)
# node = style.style_node()
preso.add_style(style)
line_attrib = {
"draw:style-name": style.name,
"draw:layer": "layout",
"svg:x1": x1,
"svg:y1": y1,
"svg:x2": x2,
"svg:y2": y2,
}
line_node = el("draw:line", attrib=line_attrib)
preso.slides[-1]._page.append(line_node) | def function[add_line, parameter[preso, x1, y1, x2, y2, width, color]]:
constant[
Arrow pointing up to right:
context.xml:
office:automatic-styles/
<style:style style:name="gr1" style:family="graphic" style:parent-style-name="objectwithoutfill">
<style:graphic-properties
draw:marker-end="Arrow"
draw:marker-end-width="0.3cm"
draw:fill="none"
draw:textarea-vertical-align="middle"/>
</style:style>
3pt width color red
<style:style style:name="gr2" style:family="graphic" style:parent-style-name="objectwithoutfill">
<style:graphic-properties
svg:stroke-width="0.106cm"
svg:stroke-color="#ed1c24"
draw:marker-start-width="0.359cm"
draw:marker-end="Arrow"
draw:marker-end-width="0.459cm"
draw:fill="none"
draw:textarea-vertical-align="middle"
fo:padding-top="0.178cm"
fo:padding-bottom="0.178cm"
fo:padding-left="0.303cm"
fo:padding-right="0.303cm"/>
</style:style>
...
office:presentation/draw:page
<draw:line draw:style-name="gr1" draw:text-style-name="P2" draw:layer="layout" svg:x1="6.35cm" svg:y1="10.16cm" svg:x2="10.668cm" svg:y2="5.842cm"><text:p/></draw:line>
]
variable[marker_end_ratio] assign[=] binary_operation[constant[0.459] / constant[3]]
variable[marker_start_ratio] assign[=] binary_operation[constant[0.359] / constant[3]]
variable[stroke_ratio] assign[=] binary_operation[constant[0.106] / constant[3]]
variable[w] assign[=] call[name[float], parameter[call[name[width]][<ast.Slice object at 0x7da20c7c86d0>]]]
variable[sw] assign[=] binary_operation[name[w] * name[stroke_ratio]]
variable[mew] assign[=] binary_operation[name[w] * name[marker_end_ratio]]
variable[msw] assign[=] binary_operation[name[w] * name[marker_start_ratio]]
variable[attribs] assign[=] dictionary[[<ast.Constant object at 0x7da20c7ca1a0>, <ast.Constant object at 0x7da20c7c8640>, <ast.Constant object at 0x7da20c7cb190>, <ast.Constant object at 0x7da20c7cb3d0>, <ast.Constant object at 0x7da20c7c96c0>, <ast.Constant object at 0x7da20c7c83a0>, <ast.Constant object at 0x7da20c7ca230>], [<ast.Call object at 0x7da20c7c8b20>, <ast.Name object at 0x7da20c7cae30>, <ast.Call object at 0x7da20c7cad70>, <ast.Constant object at 0x7da20c7ca740>, <ast.Call object at 0x7da20c7c87f0>, <ast.Constant object at 0x7da20c7c9840>, <ast.Constant object at 0x7da20c7ca440>]]
variable[style] assign[=] call[name[LineStyle], parameter[]]
call[name[preso].add_style, parameter[name[style]]]
variable[line_attrib] assign[=] dictionary[[<ast.Constant object at 0x7da20c7cb1f0>, <ast.Constant object at 0x7da20c7c94e0>, <ast.Constant object at 0x7da20c7c9210>, <ast.Constant object at 0x7da20c7cb640>, <ast.Constant object at 0x7da20c7c8a90>, <ast.Constant object at 0x7da20c7c9930>], [<ast.Attribute object at 0x7da20c7ca140>, <ast.Constant object at 0x7da20c7caaa0>, <ast.Name object at 0x7da20c7c8700>, <ast.Name object at 0x7da20c7c9510>, <ast.Name object at 0x7da20c7cb760>, <ast.Name object at 0x7da20c7cbe50>]]
variable[line_node] assign[=] call[name[el], parameter[constant[draw:line]]]
call[call[name[preso].slides][<ast.UnaryOp object at 0x7da20c7cb880>]._page.append, parameter[name[line_node]]] | keyword[def] identifier[add_line] ( identifier[preso] , identifier[x1] , identifier[y1] , identifier[x2] , identifier[y2] , identifier[width] = literal[string] , identifier[color] = literal[string] ):
literal[string]
identifier[marker_end_ratio] = literal[int] / literal[int]
identifier[marker_start_ratio] = literal[int] / literal[int]
identifier[stroke_ratio] = literal[int] / literal[int]
identifier[w] = identifier[float] ( identifier[width] [ literal[int] : identifier[width] . identifier[index] ( literal[string] )])
identifier[sw] = identifier[w] * identifier[stroke_ratio]
identifier[mew] = identifier[w] * identifier[marker_end_ratio]
identifier[msw] = identifier[w] * identifier[marker_start_ratio]
identifier[attribs] ={
literal[string] : literal[string] . identifier[format] ( identifier[sw] ),
literal[string] : identifier[color] ,
literal[string] : literal[string] . identifier[format] ( identifier[msw] ),
literal[string] : literal[string] ,
literal[string] : literal[string] . identifier[format] ( identifier[mew] ),
literal[string] : literal[string] ,
literal[string] : literal[string] ,
}
identifier[style] = identifier[LineStyle] (** identifier[attribs] )
identifier[preso] . identifier[add_style] ( identifier[style] )
identifier[line_attrib] ={
literal[string] : identifier[style] . identifier[name] ,
literal[string] : literal[string] ,
literal[string] : identifier[x1] ,
literal[string] : identifier[y1] ,
literal[string] : identifier[x2] ,
literal[string] : identifier[y2] ,
}
identifier[line_node] = identifier[el] ( literal[string] , identifier[attrib] = identifier[line_attrib] )
identifier[preso] . identifier[slides] [- literal[int] ]. identifier[_page] . identifier[append] ( identifier[line_node] ) | def add_line(preso, x1, y1, x2, y2, width='3pt', color='red'):
"""
Arrow pointing up to right:
context.xml:
office:automatic-styles/
<style:style style:name="gr1" style:family="graphic" style:parent-style-name="objectwithoutfill">
<style:graphic-properties
draw:marker-end="Arrow"
draw:marker-end-width="0.3cm"
draw:fill="none"
draw:textarea-vertical-align="middle"/>
</style:style>
3pt width color red
<style:style style:name="gr2" style:family="graphic" style:parent-style-name="objectwithoutfill">
<style:graphic-properties
svg:stroke-width="0.106cm"
svg:stroke-color="#ed1c24"
draw:marker-start-width="0.359cm"
draw:marker-end="Arrow"
draw:marker-end-width="0.459cm"
draw:fill="none"
draw:textarea-vertical-align="middle"
fo:padding-top="0.178cm"
fo:padding-bottom="0.178cm"
fo:padding-left="0.303cm"
fo:padding-right="0.303cm"/>
</style:style>
...
office:presentation/draw:page
<draw:line draw:style-name="gr1" draw:text-style-name="P2" draw:layer="layout" svg:x1="6.35cm" svg:y1="10.16cm" svg:x2="10.668cm" svg:y2="5.842cm"><text:p/></draw:line>
"""
marker_end_ratio = 0.459 / 3 # .459cm/3pt
marker_start_ratio = 0.359 / 3 # .359cm/3pt
stroke_ratio = 0.106 / 3 # .106cm/3pt
w = float(width[0:width.index('pt')])
sw = w * stroke_ratio
mew = w * marker_end_ratio
msw = w * marker_start_ratio # "#ed1c24",
attribs = {'svg:stroke-width': '{}cm'.format(sw), 'svg:stroke-color': color, 'draw:marker-start-width': '{}cm'.format(msw), 'draw:marker-end': 'Arrow', 'draw:marker-end-width': '{}cm'.format(mew), 'draw:fill': 'none', 'draw:textarea-vertical-align': 'middle'}
style = LineStyle(**attribs)
# node = style.style_node()
preso.add_style(style)
line_attrib = {'draw:style-name': style.name, 'draw:layer': 'layout', 'svg:x1': x1, 'svg:y1': y1, 'svg:x2': x2, 'svg:y2': y2}
line_node = el('draw:line', attrib=line_attrib)
preso.slides[-1]._page.append(line_node) |
def seal_aes_ctr_legacy(key_service, secret, digest_method=DEFAULT_DIGEST):
"""
Encrypts `secret` using the key service.
You can decrypt with the companion method `open_aes_ctr_legacy`.
"""
# generate a a 64 byte key.
# Half will be for data encryption, the other half for HMAC
key, encoded_key = key_service.generate_key_data(64)
ciphertext, hmac = _seal_aes_ctr(
secret, key, LEGACY_NONCE, digest_method,
)
return {
'key': b64encode(encoded_key).decode('utf-8'),
'contents': b64encode(ciphertext).decode('utf-8'),
'hmac': codecs.encode(hmac, "hex_codec"),
'digest': digest_method,
} | def function[seal_aes_ctr_legacy, parameter[key_service, secret, digest_method]]:
constant[
Encrypts `secret` using the key service.
You can decrypt with the companion method `open_aes_ctr_legacy`.
]
<ast.Tuple object at 0x7da20c7c80a0> assign[=] call[name[key_service].generate_key_data, parameter[constant[64]]]
<ast.Tuple object at 0x7da20c7c9300> assign[=] call[name[_seal_aes_ctr], parameter[name[secret], name[key], name[LEGACY_NONCE], name[digest_method]]]
return[dictionary[[<ast.Constant object at 0x7da18bc70d30>, <ast.Constant object at 0x7da18bc71240>, <ast.Constant object at 0x7da18bc735e0>, <ast.Constant object at 0x7da18bc70040>], [<ast.Call object at 0x7da18bc714b0>, <ast.Call object at 0x7da18bc70850>, <ast.Call object at 0x7da18bc71f60>, <ast.Name object at 0x7da18bc719f0>]]] | keyword[def] identifier[seal_aes_ctr_legacy] ( identifier[key_service] , identifier[secret] , identifier[digest_method] = identifier[DEFAULT_DIGEST] ):
literal[string]
identifier[key] , identifier[encoded_key] = identifier[key_service] . identifier[generate_key_data] ( literal[int] )
identifier[ciphertext] , identifier[hmac] = identifier[_seal_aes_ctr] (
identifier[secret] , identifier[key] , identifier[LEGACY_NONCE] , identifier[digest_method] ,
)
keyword[return] {
literal[string] : identifier[b64encode] ( identifier[encoded_key] ). identifier[decode] ( literal[string] ),
literal[string] : identifier[b64encode] ( identifier[ciphertext] ). identifier[decode] ( literal[string] ),
literal[string] : identifier[codecs] . identifier[encode] ( identifier[hmac] , literal[string] ),
literal[string] : identifier[digest_method] ,
} | def seal_aes_ctr_legacy(key_service, secret, digest_method=DEFAULT_DIGEST):
"""
Encrypts `secret` using the key service.
You can decrypt with the companion method `open_aes_ctr_legacy`.
"""
# generate a a 64 byte key.
# Half will be for data encryption, the other half for HMAC
(key, encoded_key) = key_service.generate_key_data(64)
(ciphertext, hmac) = _seal_aes_ctr(secret, key, LEGACY_NONCE, digest_method)
return {'key': b64encode(encoded_key).decode('utf-8'), 'contents': b64encode(ciphertext).decode('utf-8'), 'hmac': codecs.encode(hmac, 'hex_codec'), 'digest': digest_method} |
def process_pem_rsakey(self, data, name, idx):
"""
Processes PEM encoded RSA key
:param data:
:param name:
:param idx:
:return:
"""
from cryptography.hazmat.primitives.serialization import load_der_public_key
from cryptography.hazmat.primitives.serialization import load_der_private_key
try:
if startswith(data, '-----BEGIN RSA PUBLIC KEY') or startswith(data, '-----BEGIN PUBLIC KEY'):
rsa = load_der_public_key(pem_to_der(data), self.get_backend())
public_numbers = rsa.public_numbers()
elif startswith(data, '-----BEGIN RSA PRIVATE KEY') or startswith(data, '-----BEGIN PRIVATE KEY'):
rsa = load_der_private_key(pem_to_der(data), None, self.get_backend())
public_numbers = rsa.private_numbers().public_numbers
else:
return None
self.num_rsa_keys += 1
self.num_rsa += 1
js = collections.OrderedDict()
js['type'] = 'pem-rsa-key'
js['fname'] = name
js['idx'] = idx
js['pem'] = data
js['e'] = '0x%x' % public_numbers.e
js['n'] = '0x%x' % public_numbers.n
if self.has_fingerprint(public_numbers.n):
logger.warning('Fingerprint found in PEM RSA key %s ' % name)
self.mark_and_add_effort(public_numbers.n, js)
if self.do_print:
print(json.dumps(js))
return TestResult(js)
except Exception as e:
logger.debug('Pubkey loading error: %s : %s [%s] : %s' % (name, idx, data[:20], e))
self.trace_logger.log(e) | def function[process_pem_rsakey, parameter[self, data, name, idx]]:
constant[
Processes PEM encoded RSA key
:param data:
:param name:
:param idx:
:return:
]
from relative_module[cryptography.hazmat.primitives.serialization] import module[load_der_public_key]
from relative_module[cryptography.hazmat.primitives.serialization] import module[load_der_private_key]
<ast.Try object at 0x7da18c4cc940> | keyword[def] identifier[process_pem_rsakey] ( identifier[self] , identifier[data] , identifier[name] , identifier[idx] ):
literal[string]
keyword[from] identifier[cryptography] . identifier[hazmat] . identifier[primitives] . identifier[serialization] keyword[import] identifier[load_der_public_key]
keyword[from] identifier[cryptography] . identifier[hazmat] . identifier[primitives] . identifier[serialization] keyword[import] identifier[load_der_private_key]
keyword[try] :
keyword[if] identifier[startswith] ( identifier[data] , literal[string] ) keyword[or] identifier[startswith] ( identifier[data] , literal[string] ):
identifier[rsa] = identifier[load_der_public_key] ( identifier[pem_to_der] ( identifier[data] ), identifier[self] . identifier[get_backend] ())
identifier[public_numbers] = identifier[rsa] . identifier[public_numbers] ()
keyword[elif] identifier[startswith] ( identifier[data] , literal[string] ) keyword[or] identifier[startswith] ( identifier[data] , literal[string] ):
identifier[rsa] = identifier[load_der_private_key] ( identifier[pem_to_der] ( identifier[data] ), keyword[None] , identifier[self] . identifier[get_backend] ())
identifier[public_numbers] = identifier[rsa] . identifier[private_numbers] (). identifier[public_numbers]
keyword[else] :
keyword[return] keyword[None]
identifier[self] . identifier[num_rsa_keys] += literal[int]
identifier[self] . identifier[num_rsa] += literal[int]
identifier[js] = identifier[collections] . identifier[OrderedDict] ()
identifier[js] [ literal[string] ]= literal[string]
identifier[js] [ literal[string] ]= identifier[name]
identifier[js] [ literal[string] ]= identifier[idx]
identifier[js] [ literal[string] ]= identifier[data]
identifier[js] [ literal[string] ]= literal[string] % identifier[public_numbers] . identifier[e]
identifier[js] [ literal[string] ]= literal[string] % identifier[public_numbers] . identifier[n]
keyword[if] identifier[self] . identifier[has_fingerprint] ( identifier[public_numbers] . identifier[n] ):
identifier[logger] . identifier[warning] ( literal[string] % identifier[name] )
identifier[self] . identifier[mark_and_add_effort] ( identifier[public_numbers] . identifier[n] , identifier[js] )
keyword[if] identifier[self] . identifier[do_print] :
identifier[print] ( identifier[json] . identifier[dumps] ( identifier[js] ))
keyword[return] identifier[TestResult] ( identifier[js] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[logger] . identifier[debug] ( literal[string] %( identifier[name] , identifier[idx] , identifier[data] [: literal[int] ], identifier[e] ))
identifier[self] . identifier[trace_logger] . identifier[log] ( identifier[e] ) | def process_pem_rsakey(self, data, name, idx):
"""
Processes PEM encoded RSA key
:param data:
:param name:
:param idx:
:return:
"""
from cryptography.hazmat.primitives.serialization import load_der_public_key
from cryptography.hazmat.primitives.serialization import load_der_private_key
try:
if startswith(data, '-----BEGIN RSA PUBLIC KEY') or startswith(data, '-----BEGIN PUBLIC KEY'):
rsa = load_der_public_key(pem_to_der(data), self.get_backend())
public_numbers = rsa.public_numbers() # depends on [control=['if'], data=[]]
elif startswith(data, '-----BEGIN RSA PRIVATE KEY') or startswith(data, '-----BEGIN PRIVATE KEY'):
rsa = load_der_private_key(pem_to_der(data), None, self.get_backend())
public_numbers = rsa.private_numbers().public_numbers # depends on [control=['if'], data=[]]
else:
return None
self.num_rsa_keys += 1
self.num_rsa += 1
js = collections.OrderedDict()
js['type'] = 'pem-rsa-key'
js['fname'] = name
js['idx'] = idx
js['pem'] = data
js['e'] = '0x%x' % public_numbers.e
js['n'] = '0x%x' % public_numbers.n
if self.has_fingerprint(public_numbers.n):
logger.warning('Fingerprint found in PEM RSA key %s ' % name)
self.mark_and_add_effort(public_numbers.n, js)
if self.do_print:
print(json.dumps(js)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return TestResult(js) # depends on [control=['try'], data=[]]
except Exception as e:
logger.debug('Pubkey loading error: %s : %s [%s] : %s' % (name, idx, data[:20], e))
self.trace_logger.log(e) # depends on [control=['except'], data=['e']] |
def initialize(self, num_of_paths=None, grid=None, seed=None):
"""initialize StackedConsumer"""
super(StackedConsumer, self).initialize(grid, num_of_paths, seed)
for c in self.consumers:
c.initialize(grid, num_of_paths, seed)
self.state = [c.state for c in self.consumers] | def function[initialize, parameter[self, num_of_paths, grid, seed]]:
constant[initialize StackedConsumer]
call[call[name[super], parameter[name[StackedConsumer], name[self]]].initialize, parameter[name[grid], name[num_of_paths], name[seed]]]
for taget[name[c]] in starred[name[self].consumers] begin[:]
call[name[c].initialize, parameter[name[grid], name[num_of_paths], name[seed]]]
name[self].state assign[=] <ast.ListComp object at 0x7da20c993d00> | keyword[def] identifier[initialize] ( identifier[self] , identifier[num_of_paths] = keyword[None] , identifier[grid] = keyword[None] , identifier[seed] = keyword[None] ):
literal[string]
identifier[super] ( identifier[StackedConsumer] , identifier[self] ). identifier[initialize] ( identifier[grid] , identifier[num_of_paths] , identifier[seed] )
keyword[for] identifier[c] keyword[in] identifier[self] . identifier[consumers] :
identifier[c] . identifier[initialize] ( identifier[grid] , identifier[num_of_paths] , identifier[seed] )
identifier[self] . identifier[state] =[ identifier[c] . identifier[state] keyword[for] identifier[c] keyword[in] identifier[self] . identifier[consumers] ] | def initialize(self, num_of_paths=None, grid=None, seed=None):
"""initialize StackedConsumer"""
super(StackedConsumer, self).initialize(grid, num_of_paths, seed)
for c in self.consumers:
c.initialize(grid, num_of_paths, seed) # depends on [control=['for'], data=['c']]
self.state = [c.state for c in self.consumers] |
def in_channels(m:nn.Module) -> List[int]:
"Return the shape of the first weight layer in `m`."
for l in flatten_model(m):
if hasattr(l, 'weight'): return l.weight.shape[1]
raise Exception('No weight layer') | def function[in_channels, parameter[m]]:
constant[Return the shape of the first weight layer in `m`.]
for taget[name[l]] in starred[call[name[flatten_model], parameter[name[m]]]] begin[:]
if call[name[hasattr], parameter[name[l], constant[weight]]] begin[:]
return[call[name[l].weight.shape][constant[1]]]
<ast.Raise object at 0x7da1b1e9a860> | keyword[def] identifier[in_channels] ( identifier[m] : identifier[nn] . identifier[Module] )-> identifier[List] [ identifier[int] ]:
literal[string]
keyword[for] identifier[l] keyword[in] identifier[flatten_model] ( identifier[m] ):
keyword[if] identifier[hasattr] ( identifier[l] , literal[string] ): keyword[return] identifier[l] . identifier[weight] . identifier[shape] [ literal[int] ]
keyword[raise] identifier[Exception] ( literal[string] ) | def in_channels(m: nn.Module) -> List[int]:
"""Return the shape of the first weight layer in `m`."""
for l in flatten_model(m):
if hasattr(l, 'weight'):
return l.weight.shape[1] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['l']]
raise Exception('No weight layer') |
def scan_volumes(cryst, lo=0.98, hi=1.02, n=5, scale_volumes=True):
'''
Provide set of crystals along volume axis from lo to hi (inclusive).
No volume cell optimization is performed. Bounds are specified as
fractions (1.10 = 10% increase). If scale_volumes==False the scalling
is applied to lattice vectors instead of volumes.
:param lo: lower bound of the V/V_0 in the scan
:param hi: upper bound of the V/V_0 in the scan
:param n: number of volume sample points
:param scale_volumes: If True scale the unit cell volume or,
if False, scale the length of lattice axes.
:returns: a list of deformed systems
'''
scale = linspace(lo, hi, num=n)
if scale_volumes:
scale **= (1.0/3.0)
uc = cryst.get_cell()
systems = [Atoms(cryst) for s in scale]
for n, s in enumerate(scale):
systems[n].set_cell(s*uc, scale_atoms=True)
return systems | def function[scan_volumes, parameter[cryst, lo, hi, n, scale_volumes]]:
constant[
Provide set of crystals along volume axis from lo to hi (inclusive).
No volume cell optimization is performed. Bounds are specified as
fractions (1.10 = 10% increase). If scale_volumes==False the scalling
is applied to lattice vectors instead of volumes.
:param lo: lower bound of the V/V_0 in the scan
:param hi: upper bound of the V/V_0 in the scan
:param n: number of volume sample points
:param scale_volumes: If True scale the unit cell volume or,
if False, scale the length of lattice axes.
:returns: a list of deformed systems
]
variable[scale] assign[=] call[name[linspace], parameter[name[lo], name[hi]]]
if name[scale_volumes] begin[:]
<ast.AugAssign object at 0x7da1b0ca7dc0>
variable[uc] assign[=] call[name[cryst].get_cell, parameter[]]
variable[systems] assign[=] <ast.ListComp object at 0x7da1b0ca52a0>
for taget[tuple[[<ast.Name object at 0x7da1b0ca55d0>, <ast.Name object at 0x7da1b0ca6500>]]] in starred[call[name[enumerate], parameter[name[scale]]]] begin[:]
call[call[name[systems]][name[n]].set_cell, parameter[binary_operation[name[s] * name[uc]]]]
return[name[systems]] | keyword[def] identifier[scan_volumes] ( identifier[cryst] , identifier[lo] = literal[int] , identifier[hi] = literal[int] , identifier[n] = literal[int] , identifier[scale_volumes] = keyword[True] ):
literal[string]
identifier[scale] = identifier[linspace] ( identifier[lo] , identifier[hi] , identifier[num] = identifier[n] )
keyword[if] identifier[scale_volumes] :
identifier[scale] **=( literal[int] / literal[int] )
identifier[uc] = identifier[cryst] . identifier[get_cell] ()
identifier[systems] =[ identifier[Atoms] ( identifier[cryst] ) keyword[for] identifier[s] keyword[in] identifier[scale] ]
keyword[for] identifier[n] , identifier[s] keyword[in] identifier[enumerate] ( identifier[scale] ):
identifier[systems] [ identifier[n] ]. identifier[set_cell] ( identifier[s] * identifier[uc] , identifier[scale_atoms] = keyword[True] )
keyword[return] identifier[systems] | def scan_volumes(cryst, lo=0.98, hi=1.02, n=5, scale_volumes=True):
"""
Provide set of crystals along volume axis from lo to hi (inclusive).
No volume cell optimization is performed. Bounds are specified as
fractions (1.10 = 10% increase). If scale_volumes==False the scalling
is applied to lattice vectors instead of volumes.
:param lo: lower bound of the V/V_0 in the scan
:param hi: upper bound of the V/V_0 in the scan
:param n: number of volume sample points
:param scale_volumes: If True scale the unit cell volume or,
if False, scale the length of lattice axes.
:returns: a list of deformed systems
"""
scale = linspace(lo, hi, num=n)
if scale_volumes:
scale **= 1.0 / 3.0 # depends on [control=['if'], data=[]]
uc = cryst.get_cell()
systems = [Atoms(cryst) for s in scale]
for (n, s) in enumerate(scale):
systems[n].set_cell(s * uc, scale_atoms=True) # depends on [control=['for'], data=[]]
return systems |
def record_received(self, msg):
"""Handle ALDB record received from device."""
release_lock = False
userdata = msg.userdata
rec = ALDBRecord.create_from_userdata(userdata)
self._records[rec.mem_addr] = rec
_LOGGER.debug('ALDB Record: %s', rec)
rec_count = self._load_action.rec_count
if rec_count == 1 or self._have_all_records():
release_lock = True
if self._is_first_record(rec):
self._mem_addr = rec.mem_addr
if release_lock and self._rec_mgr_lock.locked():
_LOGGER.debug('Releasing lock because record received')
self._rec_mgr_lock.release() | def function[record_received, parameter[self, msg]]:
constant[Handle ALDB record received from device.]
variable[release_lock] assign[=] constant[False]
variable[userdata] assign[=] name[msg].userdata
variable[rec] assign[=] call[name[ALDBRecord].create_from_userdata, parameter[name[userdata]]]
call[name[self]._records][name[rec].mem_addr] assign[=] name[rec]
call[name[_LOGGER].debug, parameter[constant[ALDB Record: %s], name[rec]]]
variable[rec_count] assign[=] name[self]._load_action.rec_count
if <ast.BoolOp object at 0x7da1b1a21930> begin[:]
variable[release_lock] assign[=] constant[True]
if call[name[self]._is_first_record, parameter[name[rec]]] begin[:]
name[self]._mem_addr assign[=] name[rec].mem_addr
if <ast.BoolOp object at 0x7da1b1a20820> begin[:]
call[name[_LOGGER].debug, parameter[constant[Releasing lock because record received]]]
call[name[self]._rec_mgr_lock.release, parameter[]] | keyword[def] identifier[record_received] ( identifier[self] , identifier[msg] ):
literal[string]
identifier[release_lock] = keyword[False]
identifier[userdata] = identifier[msg] . identifier[userdata]
identifier[rec] = identifier[ALDBRecord] . identifier[create_from_userdata] ( identifier[userdata] )
identifier[self] . identifier[_records] [ identifier[rec] . identifier[mem_addr] ]= identifier[rec]
identifier[_LOGGER] . identifier[debug] ( literal[string] , identifier[rec] )
identifier[rec_count] = identifier[self] . identifier[_load_action] . identifier[rec_count]
keyword[if] identifier[rec_count] == literal[int] keyword[or] identifier[self] . identifier[_have_all_records] ():
identifier[release_lock] = keyword[True]
keyword[if] identifier[self] . identifier[_is_first_record] ( identifier[rec] ):
identifier[self] . identifier[_mem_addr] = identifier[rec] . identifier[mem_addr]
keyword[if] identifier[release_lock] keyword[and] identifier[self] . identifier[_rec_mgr_lock] . identifier[locked] ():
identifier[_LOGGER] . identifier[debug] ( literal[string] )
identifier[self] . identifier[_rec_mgr_lock] . identifier[release] () | def record_received(self, msg):
"""Handle ALDB record received from device."""
release_lock = False
userdata = msg.userdata
rec = ALDBRecord.create_from_userdata(userdata)
self._records[rec.mem_addr] = rec
_LOGGER.debug('ALDB Record: %s', rec)
rec_count = self._load_action.rec_count
if rec_count == 1 or self._have_all_records():
release_lock = True # depends on [control=['if'], data=[]]
if self._is_first_record(rec):
self._mem_addr = rec.mem_addr # depends on [control=['if'], data=[]]
if release_lock and self._rec_mgr_lock.locked():
_LOGGER.debug('Releasing lock because record received')
self._rec_mgr_lock.release() # depends on [control=['if'], data=[]] |
def JMS_to_Fierz_lep(C, ddll):
"""From JMS to semileptonic Fierz basis for Class V.
`ddll` should be of the form 'sbl_enu_tau', 'dbl_munu_e' etc."""
if ddll[:2] == 'uc':
s = uflav[ddll[0]]
b = uflav[ddll[1]]
q = 'u'
else:
s = dflav[ddll[0]]
b = dflav[ddll[1]]
q = 'd'
l = lflav[ddll[4:ddll.find('n')]]
lp = lflav[ddll[ddll.find('_',5)+1:len(ddll)]]
ind = ddll.replace('l_','').replace('nu_','')
return {
'F' + ind + '9' : C["V" + q + "eLR"][s, b, l, lp] / 2
+ C["Ve" + q + "LL"][l, lp, s, b] / 2,
'F' + ind + '10' : C["V" + q + "eLR"][s, b, l, lp] / 2
- C["Ve" + q + "LL"][l, lp, s, b] / 2,
'F' + ind + 'S' : C["Se" + q + "RL"][lp, l, b, s].conj() / 2
+ C["Se" + q + "RR"][l, lp, s, b] / 2,
'F' + ind + 'P' : - C["Se" + q + "RL"][lp, l, b, s].conj() / 2
+ C["Se" + q + "RR"][l, lp, s, b] / 2,
'F' + ind + 'T' : C["Te" + q + "RR"][l, lp, s, b] / 2
+ C["Te" + q + "RR"][lp, l, b, s].conj() / 2,
'F' + ind + 'T5' : C["Te" + q + "RR"][l, lp, s, b] / 2
- C["Te" + q + "RR"][lp, l, b, s].conj() / 2,
'F' + ind + '9p' : C["Ve" + q + "LR"][l, lp, s, b] / 2
+ C["Ve" + q + "RR"][l, lp, s, b] / 2,
'F' + ind + '10p' : -C["Ve" + q + "LR"][l, lp, s, b] / 2
+ C["Ve" + q + "RR"][l, lp, s, b] / 2,
'F' + ind + 'Sp' : C["Se" + q + "RL"][l, lp, s, b] / 2
+ C["Se" + q + "RR"][lp, l, b, s].conj() / 2,
'F' + ind + 'Pp' : C["Se" + q + "RL"][l, lp, s, b] / 2
- C["Se" + q + "RR"][lp, l, b, s].conj() / 2,
} | def function[JMS_to_Fierz_lep, parameter[C, ddll]]:
constant[From JMS to semileptonic Fierz basis for Class V.
`ddll` should be of the form 'sbl_enu_tau', 'dbl_munu_e' etc.]
if compare[call[name[ddll]][<ast.Slice object at 0x7da1b1991360>] equal[==] constant[uc]] begin[:]
variable[s] assign[=] call[name[uflav]][call[name[ddll]][constant[0]]]
variable[b] assign[=] call[name[uflav]][call[name[ddll]][constant[1]]]
variable[q] assign[=] constant[u]
variable[l] assign[=] call[name[lflav]][call[name[ddll]][<ast.Slice object at 0x7da1b1993a90>]]
variable[lp] assign[=] call[name[lflav]][call[name[ddll]][<ast.Slice object at 0x7da1b1991ab0>]]
variable[ind] assign[=] call[call[name[ddll].replace, parameter[constant[l_], constant[]]].replace, parameter[constant[nu_], constant[]]]
return[dictionary[[<ast.BinOp object at 0x7da1b1993400>, <ast.BinOp object at 0x7da1b1992e30>, <ast.BinOp object at 0x7da1b1992200>, <ast.BinOp object at 0x7da1b1991000>, <ast.BinOp object at 0x7da1b19902b0>, <ast.BinOp object at 0x7da1b19903d0>, <ast.BinOp object at 0x7da1b19938b0>, <ast.BinOp object at 0x7da1b1991c90>, <ast.BinOp object at 0x7da1b1991090>, <ast.BinOp object at 0x7da1b1990dc0>], [<ast.BinOp object at 0x7da1b1993190>, <ast.BinOp object at 0x7da1b1992fb0>, <ast.BinOp object at 0x7da1b1993820>, <ast.BinOp object at 0x7da1b19925f0>, <ast.BinOp object at 0x7da1b1990040>, <ast.BinOp object at 0x7da1b1992890>, <ast.BinOp object at 0x7da1b1b6b160>, <ast.BinOp object at 0x7da1b1b6a350>, <ast.BinOp object at 0x7da1b1981cc0>, <ast.BinOp object at 0x7da1b190c2e0>]]] | keyword[def] identifier[JMS_to_Fierz_lep] ( identifier[C] , identifier[ddll] ):
literal[string]
keyword[if] identifier[ddll] [: literal[int] ]== literal[string] :
identifier[s] = identifier[uflav] [ identifier[ddll] [ literal[int] ]]
identifier[b] = identifier[uflav] [ identifier[ddll] [ literal[int] ]]
identifier[q] = literal[string]
keyword[else] :
identifier[s] = identifier[dflav] [ identifier[ddll] [ literal[int] ]]
identifier[b] = identifier[dflav] [ identifier[ddll] [ literal[int] ]]
identifier[q] = literal[string]
identifier[l] = identifier[lflav] [ identifier[ddll] [ literal[int] : identifier[ddll] . identifier[find] ( literal[string] )]]
identifier[lp] = identifier[lflav] [ identifier[ddll] [ identifier[ddll] . identifier[find] ( literal[string] , literal[int] )+ literal[int] : identifier[len] ( identifier[ddll] )]]
identifier[ind] = identifier[ddll] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
keyword[return] {
literal[string] + identifier[ind] + literal[string] : identifier[C] [ literal[string] + identifier[q] + literal[string] ][ identifier[s] , identifier[b] , identifier[l] , identifier[lp] ]/ literal[int]
+ identifier[C] [ literal[string] + identifier[q] + literal[string] ][ identifier[l] , identifier[lp] , identifier[s] , identifier[b] ]/ literal[int] ,
literal[string] + identifier[ind] + literal[string] : identifier[C] [ literal[string] + identifier[q] + literal[string] ][ identifier[s] , identifier[b] , identifier[l] , identifier[lp] ]/ literal[int]
- identifier[C] [ literal[string] + identifier[q] + literal[string] ][ identifier[l] , identifier[lp] , identifier[s] , identifier[b] ]/ literal[int] ,
literal[string] + identifier[ind] + literal[string] : identifier[C] [ literal[string] + identifier[q] + literal[string] ][ identifier[lp] , identifier[l] , identifier[b] , identifier[s] ]. identifier[conj] ()/ literal[int]
+ identifier[C] [ literal[string] + identifier[q] + literal[string] ][ identifier[l] , identifier[lp] , identifier[s] , identifier[b] ]/ literal[int] ,
literal[string] + identifier[ind] + literal[string] :- identifier[C] [ literal[string] + identifier[q] + literal[string] ][ identifier[lp] , identifier[l] , identifier[b] , identifier[s] ]. identifier[conj] ()/ literal[int]
+ identifier[C] [ literal[string] + identifier[q] + literal[string] ][ identifier[l] , identifier[lp] , identifier[s] , identifier[b] ]/ literal[int] ,
literal[string] + identifier[ind] + literal[string] : identifier[C] [ literal[string] + identifier[q] + literal[string] ][ identifier[l] , identifier[lp] , identifier[s] , identifier[b] ]/ literal[int]
+ identifier[C] [ literal[string] + identifier[q] + literal[string] ][ identifier[lp] , identifier[l] , identifier[b] , identifier[s] ]. identifier[conj] ()/ literal[int] ,
literal[string] + identifier[ind] + literal[string] : identifier[C] [ literal[string] + identifier[q] + literal[string] ][ identifier[l] , identifier[lp] , identifier[s] , identifier[b] ]/ literal[int]
- identifier[C] [ literal[string] + identifier[q] + literal[string] ][ identifier[lp] , identifier[l] , identifier[b] , identifier[s] ]. identifier[conj] ()/ literal[int] ,
literal[string] + identifier[ind] + literal[string] : identifier[C] [ literal[string] + identifier[q] + literal[string] ][ identifier[l] , identifier[lp] , identifier[s] , identifier[b] ]/ literal[int]
+ identifier[C] [ literal[string] + identifier[q] + literal[string] ][ identifier[l] , identifier[lp] , identifier[s] , identifier[b] ]/ literal[int] ,
literal[string] + identifier[ind] + literal[string] :- identifier[C] [ literal[string] + identifier[q] + literal[string] ][ identifier[l] , identifier[lp] , identifier[s] , identifier[b] ]/ literal[int]
+ identifier[C] [ literal[string] + identifier[q] + literal[string] ][ identifier[l] , identifier[lp] , identifier[s] , identifier[b] ]/ literal[int] ,
literal[string] + identifier[ind] + literal[string] : identifier[C] [ literal[string] + identifier[q] + literal[string] ][ identifier[l] , identifier[lp] , identifier[s] , identifier[b] ]/ literal[int]
+ identifier[C] [ literal[string] + identifier[q] + literal[string] ][ identifier[lp] , identifier[l] , identifier[b] , identifier[s] ]. identifier[conj] ()/ literal[int] ,
literal[string] + identifier[ind] + literal[string] : identifier[C] [ literal[string] + identifier[q] + literal[string] ][ identifier[l] , identifier[lp] , identifier[s] , identifier[b] ]/ literal[int]
- identifier[C] [ literal[string] + identifier[q] + literal[string] ][ identifier[lp] , identifier[l] , identifier[b] , identifier[s] ]. identifier[conj] ()/ literal[int] ,
} | def JMS_to_Fierz_lep(C, ddll):
"""From JMS to semileptonic Fierz basis for Class V.
`ddll` should be of the form 'sbl_enu_tau', 'dbl_munu_e' etc."""
if ddll[:2] == 'uc':
s = uflav[ddll[0]]
b = uflav[ddll[1]]
q = 'u' # depends on [control=['if'], data=[]]
else:
s = dflav[ddll[0]]
b = dflav[ddll[1]]
q = 'd'
l = lflav[ddll[4:ddll.find('n')]]
lp = lflav[ddll[ddll.find('_', 5) + 1:len(ddll)]]
ind = ddll.replace('l_', '').replace('nu_', '')
return {'F' + ind + '9': C['V' + q + 'eLR'][s, b, l, lp] / 2 + C['Ve' + q + 'LL'][l, lp, s, b] / 2, 'F' + ind + '10': C['V' + q + 'eLR'][s, b, l, lp] / 2 - C['Ve' + q + 'LL'][l, lp, s, b] / 2, 'F' + ind + 'S': C['Se' + q + 'RL'][lp, l, b, s].conj() / 2 + C['Se' + q + 'RR'][l, lp, s, b] / 2, 'F' + ind + 'P': -C['Se' + q + 'RL'][lp, l, b, s].conj() / 2 + C['Se' + q + 'RR'][l, lp, s, b] / 2, 'F' + ind + 'T': C['Te' + q + 'RR'][l, lp, s, b] / 2 + C['Te' + q + 'RR'][lp, l, b, s].conj() / 2, 'F' + ind + 'T5': C['Te' + q + 'RR'][l, lp, s, b] / 2 - C['Te' + q + 'RR'][lp, l, b, s].conj() / 2, 'F' + ind + '9p': C['Ve' + q + 'LR'][l, lp, s, b] / 2 + C['Ve' + q + 'RR'][l, lp, s, b] / 2, 'F' + ind + '10p': -C['Ve' + q + 'LR'][l, lp, s, b] / 2 + C['Ve' + q + 'RR'][l, lp, s, b] / 2, 'F' + ind + 'Sp': C['Se' + q + 'RL'][l, lp, s, b] / 2 + C['Se' + q + 'RR'][lp, l, b, s].conj() / 2, 'F' + ind + 'Pp': C['Se' + q + 'RL'][l, lp, s, b] / 2 - C['Se' + q + 'RR'][lp, l, b, s].conj() / 2} |
def assign_per_atom_sasa(self):
"""Make a dictionary with SASA assigned to each ligand atom, stored as list of SASA values over
the simulation time."""
atom_names= [atom.name for atom in self.topology_data.universe.ligand_noH.atoms]
sasa_dict = {}
for atom in range(0,self.topology_data.universe.ligand_noH.n_atoms):
sasa_dict[atom_names[atom]]=[self.sasa[i][atom] for i in range(len(self.sasa))]
return sasa_dict | def function[assign_per_atom_sasa, parameter[self]]:
constant[Make a dictionary with SASA assigned to each ligand atom, stored as list of SASA values over
the simulation time.]
variable[atom_names] assign[=] <ast.ListComp object at 0x7da18bc72a70>
variable[sasa_dict] assign[=] dictionary[[], []]
for taget[name[atom]] in starred[call[name[range], parameter[constant[0], name[self].topology_data.universe.ligand_noH.n_atoms]]] begin[:]
call[name[sasa_dict]][call[name[atom_names]][name[atom]]] assign[=] <ast.ListComp object at 0x7da18bc71480>
return[name[sasa_dict]] | keyword[def] identifier[assign_per_atom_sasa] ( identifier[self] ):
literal[string]
identifier[atom_names] =[ identifier[atom] . identifier[name] keyword[for] identifier[atom] keyword[in] identifier[self] . identifier[topology_data] . identifier[universe] . identifier[ligand_noH] . identifier[atoms] ]
identifier[sasa_dict] ={}
keyword[for] identifier[atom] keyword[in] identifier[range] ( literal[int] , identifier[self] . identifier[topology_data] . identifier[universe] . identifier[ligand_noH] . identifier[n_atoms] ):
identifier[sasa_dict] [ identifier[atom_names] [ identifier[atom] ]]=[ identifier[self] . identifier[sasa] [ identifier[i] ][ identifier[atom] ] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[self] . identifier[sasa] ))]
keyword[return] identifier[sasa_dict] | def assign_per_atom_sasa(self):
"""Make a dictionary with SASA assigned to each ligand atom, stored as list of SASA values over
the simulation time."""
atom_names = [atom.name for atom in self.topology_data.universe.ligand_noH.atoms]
sasa_dict = {}
for atom in range(0, self.topology_data.universe.ligand_noH.n_atoms):
sasa_dict[atom_names[atom]] = [self.sasa[i][atom] for i in range(len(self.sasa))] # depends on [control=['for'], data=['atom']]
return sasa_dict |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.