code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def inserir(self, name):
"""Inserts a new Brand and returns its identifier
:param name: Brand name. String with a minimum 3 and maximum of 100 characters
:return: Dictionary with the following structure:
::
{'marca': {'id': < id_brand >}}
:raise InvalidParameterError: Name is null and invalid.
:raise NomeMarcaDuplicadoError: There is already a registered Brand with the value of name.
:raise DataBaseError: Networkapi failed to access the database.
:raise XMLError: Networkapi failed to generate the XML response.
"""
brand_map = dict()
brand_map['name'] = name
code, xml = self.submit({'brand': brand_map}, 'POST', 'brand/')
return self.response(code, xml) | def function[inserir, parameter[self, name]]:
constant[Inserts a new Brand and returns its identifier
:param name: Brand name. String with a minimum 3 and maximum of 100 characters
:return: Dictionary with the following structure:
::
{'marca': {'id': < id_brand >}}
:raise InvalidParameterError: Name is null and invalid.
:raise NomeMarcaDuplicadoError: There is already a registered Brand with the value of name.
:raise DataBaseError: Networkapi failed to access the database.
:raise XMLError: Networkapi failed to generate the XML response.
]
variable[brand_map] assign[=] call[name[dict], parameter[]]
call[name[brand_map]][constant[name]] assign[=] name[name]
<ast.Tuple object at 0x7da1b2344130> assign[=] call[name[self].submit, parameter[dictionary[[<ast.Constant object at 0x7da1b2344d60>], [<ast.Name object at 0x7da2047e9b40>]], constant[POST], constant[brand/]]]
return[call[name[self].response, parameter[name[code], name[xml]]]] | keyword[def] identifier[inserir] ( identifier[self] , identifier[name] ):
literal[string]
identifier[brand_map] = identifier[dict] ()
identifier[brand_map] [ literal[string] ]= identifier[name]
identifier[code] , identifier[xml] = identifier[self] . identifier[submit] ({ literal[string] : identifier[brand_map] }, literal[string] , literal[string] )
keyword[return] identifier[self] . identifier[response] ( identifier[code] , identifier[xml] ) | def inserir(self, name):
"""Inserts a new Brand and returns its identifier
:param name: Brand name. String with a minimum 3 and maximum of 100 characters
:return: Dictionary with the following structure:
::
{'marca': {'id': < id_brand >}}
:raise InvalidParameterError: Name is null and invalid.
:raise NomeMarcaDuplicadoError: There is already a registered Brand with the value of name.
:raise DataBaseError: Networkapi failed to access the database.
:raise XMLError: Networkapi failed to generate the XML response.
"""
brand_map = dict()
brand_map['name'] = name
(code, xml) = self.submit({'brand': brand_map}, 'POST', 'brand/')
return self.response(code, xml) |
def delete_queue(self, queue_name):
"""
Delete a queue with the specified name.
:param queue_name:
:return:
"""
self.connect()
channel = self.connection.channel()
channel.queue_delete(queue=queue_name)
self.close() | def function[delete_queue, parameter[self, queue_name]]:
constant[
Delete a queue with the specified name.
:param queue_name:
:return:
]
call[name[self].connect, parameter[]]
variable[channel] assign[=] call[name[self].connection.channel, parameter[]]
call[name[channel].queue_delete, parameter[]]
call[name[self].close, parameter[]] | keyword[def] identifier[delete_queue] ( identifier[self] , identifier[queue_name] ):
literal[string]
identifier[self] . identifier[connect] ()
identifier[channel] = identifier[self] . identifier[connection] . identifier[channel] ()
identifier[channel] . identifier[queue_delete] ( identifier[queue] = identifier[queue_name] )
identifier[self] . identifier[close] () | def delete_queue(self, queue_name):
"""
Delete a queue with the specified name.
:param queue_name:
:return:
"""
self.connect()
channel = self.connection.channel()
channel.queue_delete(queue=queue_name)
self.close() |
def parse(filename):
""" parse a scene release string and return a dictionary of parsed values."""
screensize = re.compile('720p|1080p', re.I)
source = re.compile(
'\.(AHDTV|MBluRay|MDVDR|CAM|TS|TELESYNC|DVDSCR|DVD9|BDSCR|DDC|R5LINE|R5|DVDRip|HDRip|BRRip|BDRip|WEBRip|WEB-?HD|HDtv|PDTV|WEBDL|BluRay)', re.I)
year = re.compile('(1|2)\d{3}')
series = re.compile('s\d{1,3}e\d{1,3}', re.I)
group = re.compile('[A-Za-z0-9]+$', re.I)
video = re.compile('DVDR|Xvid|MP4|NTSC|PAL|[xh][\.\s]?264', re.I)
audio = re.compile('AAC2[\.\s]0|AAC|AC3|DTS|DD5', re.I)
edition = re.compile(
'\.(UNRATED|DC|(Directors|EXTENDED)[\.\s](CUT|EDITION)|EXTENDED|3D|2D|\bNF\b)',
re.I)
tags = re.compile(
'\.(COMPLETE|LiMiTED|DL|DUAL|iNTERNAL|UNCUT|FS|FESTIVAL|DOKU|DOCU|DUBBED|SUBBED|WS)', re.I)
release = re.compile(
'REAL[\.\s]PROPER|REMASTERED|PROPER|REPACK|READNFO|READ[\.\s]NFO|DiRFiX|NFOFiX', re.I)
subtitles = re.compile(
'\.(MULTi(SUBS)?|FiNNiSH|NORDiC|DANiSH|SWEDiSH|NORWEGiAN|iTALiAN|SPANiSH|SWESUB)', re.I)
language = re.compile('\.(German|ITALIAN|Chinese|CZECH|RUSSIAN|FRENCH|TRUEFRENCH)', re.I)
title = filename
attrs = {'screenSize': screensize,
'source': source,
'year': year,
'series': series,
'release_group': group,
'video': video,
'audio': audio,
'edition': edition,
'tags': tags,
'release': release,
'subtitles': subtitles,
'language': language
}
data = {}
for attr in attrs:
match = methodcaller('search', filename)(attrs[attr])
if match:
matched = methodcaller('group')(match)
data[attr] = matched.strip('.')
title = re.sub(matched, '', title)
if 'series' in data:
s, e = re.split('e|E', data['series'])
# use lstrip to remove leading zeros
data['season'] = s[1:].lstrip('0')
data['episode'] = e.lstrip('0')
data['series'] = True
temptitle = title.replace('.', ' ').strip('-').strip()
data['title'] = re.sub('\s{2,}', ' ', temptitle)
return data | def function[parse, parameter[filename]]:
constant[ parse a scene release string and return a dictionary of parsed values.]
variable[screensize] assign[=] call[name[re].compile, parameter[constant[720p|1080p], name[re].I]]
variable[source] assign[=] call[name[re].compile, parameter[constant[\.(AHDTV|MBluRay|MDVDR|CAM|TS|TELESYNC|DVDSCR|DVD9|BDSCR|DDC|R5LINE|R5|DVDRip|HDRip|BRRip|BDRip|WEBRip|WEB-?HD|HDtv|PDTV|WEBDL|BluRay)], name[re].I]]
variable[year] assign[=] call[name[re].compile, parameter[constant[(1|2)\d{3}]]]
variable[series] assign[=] call[name[re].compile, parameter[constant[s\d{1,3}e\d{1,3}], name[re].I]]
variable[group] assign[=] call[name[re].compile, parameter[constant[[A-Za-z0-9]+$], name[re].I]]
variable[video] assign[=] call[name[re].compile, parameter[constant[DVDR|Xvid|MP4|NTSC|PAL|[xh][\.\s]?264], name[re].I]]
variable[audio] assign[=] call[name[re].compile, parameter[constant[AAC2[\.\s]0|AAC|AC3|DTS|DD5], name[re].I]]
variable[edition] assign[=] call[name[re].compile, parameter[constant[\.(UNRATED|DC|(Directors|EXTENDED)[\.\s](CUT|EDITION)|EXTENDED|3D|2D|NF)], name[re].I]]
variable[tags] assign[=] call[name[re].compile, parameter[constant[\.(COMPLETE|LiMiTED|DL|DUAL|iNTERNAL|UNCUT|FS|FESTIVAL|DOKU|DOCU|DUBBED|SUBBED|WS)], name[re].I]]
variable[release] assign[=] call[name[re].compile, parameter[constant[REAL[\.\s]PROPER|REMASTERED|PROPER|REPACK|READNFO|READ[\.\s]NFO|DiRFiX|NFOFiX], name[re].I]]
variable[subtitles] assign[=] call[name[re].compile, parameter[constant[\.(MULTi(SUBS)?|FiNNiSH|NORDiC|DANiSH|SWEDiSH|NORWEGiAN|iTALiAN|SPANiSH|SWESUB)], name[re].I]]
variable[language] assign[=] call[name[re].compile, parameter[constant[\.(German|ITALIAN|Chinese|CZECH|RUSSIAN|FRENCH|TRUEFRENCH)], name[re].I]]
variable[title] assign[=] name[filename]
variable[attrs] assign[=] dictionary[[<ast.Constant object at 0x7da1b1434610>, <ast.Constant object at 0x7da1b1436f20>, <ast.Constant object at 0x7da1b1437910>, <ast.Constant object at 0x7da1b1434940>, <ast.Constant object at 0x7da1b14363e0>, <ast.Constant object at 0x7da1b1437130>, <ast.Constant object at 0x7da1b1435180>, <ast.Constant object at 0x7da1b14379a0>, <ast.Constant object at 0x7da1b1434d30>, <ast.Constant object at 0x7da1b14353c0>, <ast.Constant object at 0x7da1b1436c80>, <ast.Constant object at 0x7da1b1435750>], [<ast.Name object at 0x7da1b1436e30>, <ast.Name object at 0x7da1b1436230>, <ast.Name object at 0x7da1b1437340>, <ast.Name object at 0x7da1b1437a30>, <ast.Name object at 0x7da1b1436920>, <ast.Name object at 0x7da1b14360e0>, <ast.Name object at 0x7da1b1435390>, <ast.Name object at 0x7da1b1435b70>, <ast.Name object at 0x7da1b14142e0>, <ast.Name object at 0x7da1b1416800>, <ast.Name object at 0x7da20c6aae60>, <ast.Name object at 0x7da20c6ab6a0>]]
variable[data] assign[=] dictionary[[], []]
for taget[name[attr]] in starred[name[attrs]] begin[:]
variable[match] assign[=] call[call[name[methodcaller], parameter[constant[search], name[filename]]], parameter[call[name[attrs]][name[attr]]]]
if name[match] begin[:]
variable[matched] assign[=] call[call[name[methodcaller], parameter[constant[group]]], parameter[name[match]]]
call[name[data]][name[attr]] assign[=] call[name[matched].strip, parameter[constant[.]]]
variable[title] assign[=] call[name[re].sub, parameter[name[matched], constant[], name[title]]]
if compare[constant[series] in name[data]] begin[:]
<ast.Tuple object at 0x7da20c6a8370> assign[=] call[name[re].split, parameter[constant[e|E], call[name[data]][constant[series]]]]
call[name[data]][constant[season]] assign[=] call[call[name[s]][<ast.Slice object at 0x7da20c6aa980>].lstrip, parameter[constant[0]]]
call[name[data]][constant[episode]] assign[=] call[name[e].lstrip, parameter[constant[0]]]
call[name[data]][constant[series]] assign[=] constant[True]
variable[temptitle] assign[=] call[call[call[name[title].replace, parameter[constant[.], constant[ ]]].strip, parameter[constant[-]]].strip, parameter[]]
call[name[data]][constant[title]] assign[=] call[name[re].sub, parameter[constant[\s{2,}], constant[ ], name[temptitle]]]
return[name[data]] | keyword[def] identifier[parse] ( identifier[filename] ):
literal[string]
identifier[screensize] = identifier[re] . identifier[compile] ( literal[string] , identifier[re] . identifier[I] )
identifier[source] = identifier[re] . identifier[compile] (
literal[string] , identifier[re] . identifier[I] )
identifier[year] = identifier[re] . identifier[compile] ( literal[string] )
identifier[series] = identifier[re] . identifier[compile] ( literal[string] , identifier[re] . identifier[I] )
identifier[group] = identifier[re] . identifier[compile] ( literal[string] , identifier[re] . identifier[I] )
identifier[video] = identifier[re] . identifier[compile] ( literal[string] , identifier[re] . identifier[I] )
identifier[audio] = identifier[re] . identifier[compile] ( literal[string] , identifier[re] . identifier[I] )
identifier[edition] = identifier[re] . identifier[compile] (
literal[string] ,
identifier[re] . identifier[I] )
identifier[tags] = identifier[re] . identifier[compile] (
literal[string] , identifier[re] . identifier[I] )
identifier[release] = identifier[re] . identifier[compile] (
literal[string] , identifier[re] . identifier[I] )
identifier[subtitles] = identifier[re] . identifier[compile] (
literal[string] , identifier[re] . identifier[I] )
identifier[language] = identifier[re] . identifier[compile] ( literal[string] , identifier[re] . identifier[I] )
identifier[title] = identifier[filename]
identifier[attrs] ={ literal[string] : identifier[screensize] ,
literal[string] : identifier[source] ,
literal[string] : identifier[year] ,
literal[string] : identifier[series] ,
literal[string] : identifier[group] ,
literal[string] : identifier[video] ,
literal[string] : identifier[audio] ,
literal[string] : identifier[edition] ,
literal[string] : identifier[tags] ,
literal[string] : identifier[release] ,
literal[string] : identifier[subtitles] ,
literal[string] : identifier[language]
}
identifier[data] ={}
keyword[for] identifier[attr] keyword[in] identifier[attrs] :
identifier[match] = identifier[methodcaller] ( literal[string] , identifier[filename] )( identifier[attrs] [ identifier[attr] ])
keyword[if] identifier[match] :
identifier[matched] = identifier[methodcaller] ( literal[string] )( identifier[match] )
identifier[data] [ identifier[attr] ]= identifier[matched] . identifier[strip] ( literal[string] )
identifier[title] = identifier[re] . identifier[sub] ( identifier[matched] , literal[string] , identifier[title] )
keyword[if] literal[string] keyword[in] identifier[data] :
identifier[s] , identifier[e] = identifier[re] . identifier[split] ( literal[string] , identifier[data] [ literal[string] ])
identifier[data] [ literal[string] ]= identifier[s] [ literal[int] :]. identifier[lstrip] ( literal[string] )
identifier[data] [ literal[string] ]= identifier[e] . identifier[lstrip] ( literal[string] )
identifier[data] [ literal[string] ]= keyword[True]
identifier[temptitle] = identifier[title] . identifier[replace] ( literal[string] , literal[string] ). identifier[strip] ( literal[string] ). identifier[strip] ()
identifier[data] [ literal[string] ]= identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[temptitle] )
keyword[return] identifier[data] | def parse(filename):
""" parse a scene release string and return a dictionary of parsed values."""
screensize = re.compile('720p|1080p', re.I)
source = re.compile('\\.(AHDTV|MBluRay|MDVDR|CAM|TS|TELESYNC|DVDSCR|DVD9|BDSCR|DDC|R5LINE|R5|DVDRip|HDRip|BRRip|BDRip|WEBRip|WEB-?HD|HDtv|PDTV|WEBDL|BluRay)', re.I)
year = re.compile('(1|2)\\d{3}')
series = re.compile('s\\d{1,3}e\\d{1,3}', re.I)
group = re.compile('[A-Za-z0-9]+$', re.I)
video = re.compile('DVDR|Xvid|MP4|NTSC|PAL|[xh][\\.\\s]?264', re.I)
audio = re.compile('AAC2[\\.\\s]0|AAC|AC3|DTS|DD5', re.I)
edition = re.compile('\\.(UNRATED|DC|(Directors|EXTENDED)[\\.\\s](CUT|EDITION)|EXTENDED|3D|2D|\x08NF\x08)', re.I)
tags = re.compile('\\.(COMPLETE|LiMiTED|DL|DUAL|iNTERNAL|UNCUT|FS|FESTIVAL|DOKU|DOCU|DUBBED|SUBBED|WS)', re.I)
release = re.compile('REAL[\\.\\s]PROPER|REMASTERED|PROPER|REPACK|READNFO|READ[\\.\\s]NFO|DiRFiX|NFOFiX', re.I)
subtitles = re.compile('\\.(MULTi(SUBS)?|FiNNiSH|NORDiC|DANiSH|SWEDiSH|NORWEGiAN|iTALiAN|SPANiSH|SWESUB)', re.I)
language = re.compile('\\.(German|ITALIAN|Chinese|CZECH|RUSSIAN|FRENCH|TRUEFRENCH)', re.I)
title = filename
attrs = {'screenSize': screensize, 'source': source, 'year': year, 'series': series, 'release_group': group, 'video': video, 'audio': audio, 'edition': edition, 'tags': tags, 'release': release, 'subtitles': subtitles, 'language': language}
data = {}
for attr in attrs:
match = methodcaller('search', filename)(attrs[attr])
if match:
matched = methodcaller('group')(match)
data[attr] = matched.strip('.')
title = re.sub(matched, '', title) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['attr']]
if 'series' in data:
(s, e) = re.split('e|E', data['series'])
# use lstrip to remove leading zeros
data['season'] = s[1:].lstrip('0')
data['episode'] = e.lstrip('0')
data['series'] = True # depends on [control=['if'], data=['data']]
temptitle = title.replace('.', ' ').strip('-').strip()
data['title'] = re.sub('\\s{2,}', ' ', temptitle)
return data |
def raise_for_status(self):
"""Raises stored :class:`HTTPError` or :class:`URLError`, if occurred.
"""
if not self.ok:
reason = self.reason or 'No response from %s' % self.url
if not self.status_code:
raise HttpConnectionError(reason, response=self)
if 400 <= self.status_code < 500:
http_error_msg = '%s Client Error - %s - %s %s' % (
self.status_code, reason, self.request.method, self.url)
else:
http_error_msg = '%s Server Error - %s - %s %s' % (
self.status_code, reason, self.request.method, self.url)
raise HttpRequestException(http_error_msg, response=self) | def function[raise_for_status, parameter[self]]:
constant[Raises stored :class:`HTTPError` or :class:`URLError`, if occurred.
]
if <ast.UnaryOp object at 0x7da20c6a9bd0> begin[:]
variable[reason] assign[=] <ast.BoolOp object at 0x7da18bc72500>
if <ast.UnaryOp object at 0x7da18bc708b0> begin[:]
<ast.Raise object at 0x7da18bc70eb0>
if compare[constant[400] less_or_equal[<=] name[self].status_code] begin[:]
variable[http_error_msg] assign[=] binary_operation[constant[%s Client Error - %s - %s %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da18bc734f0>, <ast.Name object at 0x7da18bc709a0>, <ast.Attribute object at 0x7da18bc738e0>, <ast.Attribute object at 0x7da18bc71f00>]]]
<ast.Raise object at 0x7da18bc71840> | keyword[def] identifier[raise_for_status] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[ok] :
identifier[reason] = identifier[self] . identifier[reason] keyword[or] literal[string] % identifier[self] . identifier[url]
keyword[if] keyword[not] identifier[self] . identifier[status_code] :
keyword[raise] identifier[HttpConnectionError] ( identifier[reason] , identifier[response] = identifier[self] )
keyword[if] literal[int] <= identifier[self] . identifier[status_code] < literal[int] :
identifier[http_error_msg] = literal[string] %(
identifier[self] . identifier[status_code] , identifier[reason] , identifier[self] . identifier[request] . identifier[method] , identifier[self] . identifier[url] )
keyword[else] :
identifier[http_error_msg] = literal[string] %(
identifier[self] . identifier[status_code] , identifier[reason] , identifier[self] . identifier[request] . identifier[method] , identifier[self] . identifier[url] )
keyword[raise] identifier[HttpRequestException] ( identifier[http_error_msg] , identifier[response] = identifier[self] ) | def raise_for_status(self):
"""Raises stored :class:`HTTPError` or :class:`URLError`, if occurred.
"""
if not self.ok:
reason = self.reason or 'No response from %s' % self.url
if not self.status_code:
raise HttpConnectionError(reason, response=self) # depends on [control=['if'], data=[]]
if 400 <= self.status_code < 500:
http_error_msg = '%s Client Error - %s - %s %s' % (self.status_code, reason, self.request.method, self.url) # depends on [control=['if'], data=[]]
else:
http_error_msg = '%s Server Error - %s - %s %s' % (self.status_code, reason, self.request.method, self.url)
raise HttpRequestException(http_error_msg, response=self) # depends on [control=['if'], data=[]] |
def is_active(self):
"""Determines whether this plugin is active.
This plugin is only active if TensorBoard sampled any text summaries.
Returns:
Whether this plugin is active.
"""
if not self._multiplexer:
return False
if self._index_cached is not None:
# If we already have computed the index, use it to determine whether
# the plugin should be active, and if so, return immediately.
if any(self._index_cached.values()):
return True
if self._multiplexer.PluginRunToTagToContent(metadata.PLUGIN_NAME):
# Text data is present in the multiplexer. No need to further check for
# data stored via the outdated plugin assets method.
return True
# We haven't conclusively determined if the plugin should be active. Launch
# a thread to compute index_impl() and return False to avoid blocking.
self._maybe_launch_index_impl_thread()
return False | def function[is_active, parameter[self]]:
constant[Determines whether this plugin is active.
This plugin is only active if TensorBoard sampled any text summaries.
Returns:
Whether this plugin is active.
]
if <ast.UnaryOp object at 0x7da1b21ce620> begin[:]
return[constant[False]]
if compare[name[self]._index_cached is_not constant[None]] begin[:]
if call[name[any], parameter[call[name[self]._index_cached.values, parameter[]]]] begin[:]
return[constant[True]]
if call[name[self]._multiplexer.PluginRunToTagToContent, parameter[name[metadata].PLUGIN_NAME]] begin[:]
return[constant[True]]
call[name[self]._maybe_launch_index_impl_thread, parameter[]]
return[constant[False]] | keyword[def] identifier[is_active] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_multiplexer] :
keyword[return] keyword[False]
keyword[if] identifier[self] . identifier[_index_cached] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[any] ( identifier[self] . identifier[_index_cached] . identifier[values] ()):
keyword[return] keyword[True]
keyword[if] identifier[self] . identifier[_multiplexer] . identifier[PluginRunToTagToContent] ( identifier[metadata] . identifier[PLUGIN_NAME] ):
keyword[return] keyword[True]
identifier[self] . identifier[_maybe_launch_index_impl_thread] ()
keyword[return] keyword[False] | def is_active(self):
"""Determines whether this plugin is active.
This plugin is only active if TensorBoard sampled any text summaries.
Returns:
Whether this plugin is active.
"""
if not self._multiplexer:
return False # depends on [control=['if'], data=[]]
if self._index_cached is not None:
# If we already have computed the index, use it to determine whether
# the plugin should be active, and if so, return immediately.
if any(self._index_cached.values()):
return True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if self._multiplexer.PluginRunToTagToContent(metadata.PLUGIN_NAME):
# Text data is present in the multiplexer. No need to further check for
# data stored via the outdated plugin assets method.
return True # depends on [control=['if'], data=[]]
# We haven't conclusively determined if the plugin should be active. Launch
# a thread to compute index_impl() and return False to avoid blocking.
self._maybe_launch_index_impl_thread()
return False |
def download(self, updates):
'''
Download the updates passed in the updates collection. Load the updates
collection using ``search`` or ``available``
Args:
updates (Updates): An instance of the Updates class containing a
the updates to be downloaded.
Returns:
dict: A dictionary containing the results of the download
Code Example:
.. code-block:: python
import salt.utils.win_update
wua = salt.utils.win_update.WindowsUpdateAgent()
# Download KB3195454
updates = wua.search('KB3195454')
results = wua.download(updates)
'''
# Check for empty list
if updates.count() == 0:
ret = {'Success': False,
'Updates': 'Nothing to download'}
return ret
# Initialize the downloader object and list collection
downloader = self._session.CreateUpdateDownloader()
self._session.ClientApplicationID = 'Salt: Download Update'
with salt.utils.winapi.Com():
download_list = win32com.client.Dispatch('Microsoft.Update.UpdateColl')
ret = {'Updates': {}}
# Check for updates that aren't already downloaded
for update in updates.updates:
# Define uid to keep the lines shorter
uid = update.Identity.UpdateID
ret['Updates'][uid] = {}
ret['Updates'][uid]['Title'] = update.Title
ret['Updates'][uid]['AlreadyDownloaded'] = \
bool(update.IsDownloaded)
# Accept EULA
if not salt.utils.data.is_true(update.EulaAccepted):
log.debug('Accepting EULA: %s', update.Title)
update.AcceptEula() # pylint: disable=W0104
# Update already downloaded
if not salt.utils.data.is_true(update.IsDownloaded):
log.debug('To Be Downloaded: %s', uid)
log.debug('\tTitle: %s', update.Title)
download_list.Add(update)
# Check the download list
if download_list.Count == 0:
ret = {'Success': True,
'Updates': 'Nothing to download'}
return ret
# Send the list to the downloader
downloader.Updates = download_list
# Download the list
try:
log.debug('Downloading Updates')
result = downloader.Download()
except pywintypes.com_error as error:
# Something happened, raise an error
hr, msg, exc, arg = error.args # pylint: disable=W0633
try:
failure_code = self.fail_codes[exc[5]]
except KeyError:
failure_code = 'Unknown Failure: {0}'.format(error)
log.error('Download Failed: %s', failure_code)
raise CommandExecutionError(failure_code)
# Lookup dictionary
result_code = {0: 'Download Not Started',
1: 'Download In Progress',
2: 'Download Succeeded',
3: 'Download Succeeded With Errors',
4: 'Download Failed',
5: 'Download Aborted'}
log.debug('Download Complete')
log.debug(result_code[result.ResultCode])
ret['Message'] = result_code[result.ResultCode]
# Was the download successful?
if result.ResultCode in [2, 3]:
log.debug('Downloaded Successfully')
ret['Success'] = True
else:
log.debug('Download Failed')
ret['Success'] = False
# Report results for each update
for i in range(download_list.Count):
uid = download_list.Item(i).Identity.UpdateID
ret['Updates'][uid]['Result'] = \
result_code[result.GetUpdateResult(i).ResultCode]
return ret | def function[download, parameter[self, updates]]:
constant[
Download the updates passed in the updates collection. Load the updates
collection using ``search`` or ``available``
Args:
updates (Updates): An instance of the Updates class containing a
the updates to be downloaded.
Returns:
dict: A dictionary containing the results of the download
Code Example:
.. code-block:: python
import salt.utils.win_update
wua = salt.utils.win_update.WindowsUpdateAgent()
# Download KB3195454
updates = wua.search('KB3195454')
results = wua.download(updates)
]
if compare[call[name[updates].count, parameter[]] equal[==] constant[0]] begin[:]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b21fbee0>, <ast.Constant object at 0x7da1b21fbf10>], [<ast.Constant object at 0x7da1b21fbf40>, <ast.Constant object at 0x7da1b21fb9a0>]]
return[name[ret]]
variable[downloader] assign[=] call[name[self]._session.CreateUpdateDownloader, parameter[]]
name[self]._session.ClientApplicationID assign[=] constant[Salt: Download Update]
with call[name[salt].utils.winapi.Com, parameter[]] begin[:]
variable[download_list] assign[=] call[name[win32com].client.Dispatch, parameter[constant[Microsoft.Update.UpdateColl]]]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b21fb5e0>], [<ast.Dict object at 0x7da1b21fb5b0>]]
for taget[name[update]] in starred[name[updates].updates] begin[:]
variable[uid] assign[=] name[update].Identity.UpdateID
call[call[name[ret]][constant[Updates]]][name[uid]] assign[=] dictionary[[], []]
call[call[call[name[ret]][constant[Updates]]][name[uid]]][constant[Title]] assign[=] name[update].Title
call[call[call[name[ret]][constant[Updates]]][name[uid]]][constant[AlreadyDownloaded]] assign[=] call[name[bool], parameter[name[update].IsDownloaded]]
if <ast.UnaryOp object at 0x7da1b21fad40> begin[:]
call[name[log].debug, parameter[constant[Accepting EULA: %s], name[update].Title]]
call[name[update].AcceptEula, parameter[]]
if <ast.UnaryOp object at 0x7da1b21fa920> begin[:]
call[name[log].debug, parameter[constant[To Be Downloaded: %s], name[uid]]]
call[name[log].debug, parameter[constant[ Title: %s], name[update].Title]]
call[name[download_list].Add, parameter[name[update]]]
if compare[name[download_list].Count equal[==] constant[0]] begin[:]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b21fa290>, <ast.Constant object at 0x7da1b21fa260>], [<ast.Constant object at 0x7da1b21fa230>, <ast.Constant object at 0x7da1b21fa200>]]
return[name[ret]]
name[downloader].Updates assign[=] name[download_list]
<ast.Try object at 0x7da1b21f96c0>
variable[result_code] assign[=] dictionary[[<ast.Constant object at 0x7da1b1f28490>, <ast.Constant object at 0x7da1b1f29240>, <ast.Constant object at 0x7da1b1f29750>, <ast.Constant object at 0x7da1b1f28970>, <ast.Constant object at 0x7da1b1f2a0b0>, <ast.Constant object at 0x7da1b1f295d0>], [<ast.Constant object at 0x7da1b1f29180>, <ast.Constant object at 0x7da1b1f28190>, <ast.Constant object at 0x7da1b1f2a0e0>, <ast.Constant object at 0x7da1b1f29660>, <ast.Constant object at 0x7da1b1f285e0>, <ast.Constant object at 0x7da1b1f28580>]]
call[name[log].debug, parameter[constant[Download Complete]]]
call[name[log].debug, parameter[call[name[result_code]][name[result].ResultCode]]]
call[name[ret]][constant[Message]] assign[=] call[name[result_code]][name[result].ResultCode]
if compare[name[result].ResultCode in list[[<ast.Constant object at 0x7da1b1f28c10>, <ast.Constant object at 0x7da1b1f28a90>]]] begin[:]
call[name[log].debug, parameter[constant[Downloaded Successfully]]]
call[name[ret]][constant[Success]] assign[=] constant[True]
for taget[name[i]] in starred[call[name[range], parameter[name[download_list].Count]]] begin[:]
variable[uid] assign[=] call[name[download_list].Item, parameter[name[i]]].Identity.UpdateID
call[call[call[name[ret]][constant[Updates]]][name[uid]]][constant[Result]] assign[=] call[name[result_code]][call[name[result].GetUpdateResult, parameter[name[i]]].ResultCode]
return[name[ret]] | keyword[def] identifier[download] ( identifier[self] , identifier[updates] ):
literal[string]
keyword[if] identifier[updates] . identifier[count] ()== literal[int] :
identifier[ret] ={ literal[string] : keyword[False] ,
literal[string] : literal[string] }
keyword[return] identifier[ret]
identifier[downloader] = identifier[self] . identifier[_session] . identifier[CreateUpdateDownloader] ()
identifier[self] . identifier[_session] . identifier[ClientApplicationID] = literal[string]
keyword[with] identifier[salt] . identifier[utils] . identifier[winapi] . identifier[Com] ():
identifier[download_list] = identifier[win32com] . identifier[client] . identifier[Dispatch] ( literal[string] )
identifier[ret] ={ literal[string] :{}}
keyword[for] identifier[update] keyword[in] identifier[updates] . identifier[updates] :
identifier[uid] = identifier[update] . identifier[Identity] . identifier[UpdateID]
identifier[ret] [ literal[string] ][ identifier[uid] ]={}
identifier[ret] [ literal[string] ][ identifier[uid] ][ literal[string] ]= identifier[update] . identifier[Title]
identifier[ret] [ literal[string] ][ identifier[uid] ][ literal[string] ]= identifier[bool] ( identifier[update] . identifier[IsDownloaded] )
keyword[if] keyword[not] identifier[salt] . identifier[utils] . identifier[data] . identifier[is_true] ( identifier[update] . identifier[EulaAccepted] ):
identifier[log] . identifier[debug] ( literal[string] , identifier[update] . identifier[Title] )
identifier[update] . identifier[AcceptEula] ()
keyword[if] keyword[not] identifier[salt] . identifier[utils] . identifier[data] . identifier[is_true] ( identifier[update] . identifier[IsDownloaded] ):
identifier[log] . identifier[debug] ( literal[string] , identifier[uid] )
identifier[log] . identifier[debug] ( literal[string] , identifier[update] . identifier[Title] )
identifier[download_list] . identifier[Add] ( identifier[update] )
keyword[if] identifier[download_list] . identifier[Count] == literal[int] :
identifier[ret] ={ literal[string] : keyword[True] ,
literal[string] : literal[string] }
keyword[return] identifier[ret]
identifier[downloader] . identifier[Updates] = identifier[download_list]
keyword[try] :
identifier[log] . identifier[debug] ( literal[string] )
identifier[result] = identifier[downloader] . identifier[Download] ()
keyword[except] identifier[pywintypes] . identifier[com_error] keyword[as] identifier[error] :
identifier[hr] , identifier[msg] , identifier[exc] , identifier[arg] = identifier[error] . identifier[args]
keyword[try] :
identifier[failure_code] = identifier[self] . identifier[fail_codes] [ identifier[exc] [ literal[int] ]]
keyword[except] identifier[KeyError] :
identifier[failure_code] = literal[string] . identifier[format] ( identifier[error] )
identifier[log] . identifier[error] ( literal[string] , identifier[failure_code] )
keyword[raise] identifier[CommandExecutionError] ( identifier[failure_code] )
identifier[result_code] ={ literal[int] : literal[string] ,
literal[int] : literal[string] ,
literal[int] : literal[string] ,
literal[int] : literal[string] ,
literal[int] : literal[string] ,
literal[int] : literal[string] }
identifier[log] . identifier[debug] ( literal[string] )
identifier[log] . identifier[debug] ( identifier[result_code] [ identifier[result] . identifier[ResultCode] ])
identifier[ret] [ literal[string] ]= identifier[result_code] [ identifier[result] . identifier[ResultCode] ]
keyword[if] identifier[result] . identifier[ResultCode] keyword[in] [ literal[int] , literal[int] ]:
identifier[log] . identifier[debug] ( literal[string] )
identifier[ret] [ literal[string] ]= keyword[True]
keyword[else] :
identifier[log] . identifier[debug] ( literal[string] )
identifier[ret] [ literal[string] ]= keyword[False]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[download_list] . identifier[Count] ):
identifier[uid] = identifier[download_list] . identifier[Item] ( identifier[i] ). identifier[Identity] . identifier[UpdateID]
identifier[ret] [ literal[string] ][ identifier[uid] ][ literal[string] ]= identifier[result_code] [ identifier[result] . identifier[GetUpdateResult] ( identifier[i] ). identifier[ResultCode] ]
keyword[return] identifier[ret] | def download(self, updates):
"""
Download the updates passed in the updates collection. Load the updates
collection using ``search`` or ``available``
Args:
updates (Updates): An instance of the Updates class containing a
the updates to be downloaded.
Returns:
dict: A dictionary containing the results of the download
Code Example:
.. code-block:: python
import salt.utils.win_update
wua = salt.utils.win_update.WindowsUpdateAgent()
# Download KB3195454
updates = wua.search('KB3195454')
results = wua.download(updates)
"""
# Check for empty list
if updates.count() == 0:
ret = {'Success': False, 'Updates': 'Nothing to download'}
return ret # depends on [control=['if'], data=[]]
# Initialize the downloader object and list collection
downloader = self._session.CreateUpdateDownloader()
self._session.ClientApplicationID = 'Salt: Download Update'
with salt.utils.winapi.Com():
download_list = win32com.client.Dispatch('Microsoft.Update.UpdateColl') # depends on [control=['with'], data=[]]
ret = {'Updates': {}}
# Check for updates that aren't already downloaded
for update in updates.updates:
# Define uid to keep the lines shorter
uid = update.Identity.UpdateID
ret['Updates'][uid] = {}
ret['Updates'][uid]['Title'] = update.Title
ret['Updates'][uid]['AlreadyDownloaded'] = bool(update.IsDownloaded)
# Accept EULA
if not salt.utils.data.is_true(update.EulaAccepted):
log.debug('Accepting EULA: %s', update.Title)
update.AcceptEula() # pylint: disable=W0104 # depends on [control=['if'], data=[]]
# Update already downloaded
if not salt.utils.data.is_true(update.IsDownloaded):
log.debug('To Be Downloaded: %s', uid)
log.debug('\tTitle: %s', update.Title)
download_list.Add(update) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['update']]
# Check the download list
if download_list.Count == 0:
ret = {'Success': True, 'Updates': 'Nothing to download'}
return ret # depends on [control=['if'], data=[]]
# Send the list to the downloader
downloader.Updates = download_list
# Download the list
try:
log.debug('Downloading Updates')
result = downloader.Download() # depends on [control=['try'], data=[]]
except pywintypes.com_error as error:
# Something happened, raise an error
(hr, msg, exc, arg) = error.args # pylint: disable=W0633
try:
failure_code = self.fail_codes[exc[5]] # depends on [control=['try'], data=[]]
except KeyError:
failure_code = 'Unknown Failure: {0}'.format(error) # depends on [control=['except'], data=[]]
log.error('Download Failed: %s', failure_code)
raise CommandExecutionError(failure_code) # depends on [control=['except'], data=['error']]
# Lookup dictionary
result_code = {0: 'Download Not Started', 1: 'Download In Progress', 2: 'Download Succeeded', 3: 'Download Succeeded With Errors', 4: 'Download Failed', 5: 'Download Aborted'}
log.debug('Download Complete')
log.debug(result_code[result.ResultCode])
ret['Message'] = result_code[result.ResultCode]
# Was the download successful?
if result.ResultCode in [2, 3]:
log.debug('Downloaded Successfully')
ret['Success'] = True # depends on [control=['if'], data=[]]
else:
log.debug('Download Failed')
ret['Success'] = False
# Report results for each update
for i in range(download_list.Count):
uid = download_list.Item(i).Identity.UpdateID
ret['Updates'][uid]['Result'] = result_code[result.GetUpdateResult(i).ResultCode] # depends on [control=['for'], data=['i']]
return ret |
def barnes_point(sq_dist, values, kappa, gamma=None):
r"""Generate a single pass barnes interpolation value for a point.
The calculated value is based on the given distances, kappa and gamma values.
Parameters
----------
sq_dist: (N, ) ndarray
Squared distance between observations and grid point
values: (N, ) ndarray
Observation values in same order as sq_dist
kappa: float
Response parameter for barnes interpolation.
gamma: float
Adjustable smoothing parameter for the barnes interpolation. Default 1.
Returns
-------
value: float
Interpolation value for grid point.
"""
if gamma is None:
gamma = 1
weights = tools.barnes_weights(sq_dist, kappa, gamma)
total_weights = np.sum(weights)
return sum(v * (w / total_weights) for (w, v) in zip(weights, values)) | def function[barnes_point, parameter[sq_dist, values, kappa, gamma]]:
constant[Generate a single pass barnes interpolation value for a point.
The calculated value is based on the given distances, kappa and gamma values.
Parameters
----------
sq_dist: (N, ) ndarray
Squared distance between observations and grid point
values: (N, ) ndarray
Observation values in same order as sq_dist
kappa: float
Response parameter for barnes interpolation.
gamma: float
Adjustable smoothing parameter for the barnes interpolation. Default 1.
Returns
-------
value: float
Interpolation value for grid point.
]
if compare[name[gamma] is constant[None]] begin[:]
variable[gamma] assign[=] constant[1]
variable[weights] assign[=] call[name[tools].barnes_weights, parameter[name[sq_dist], name[kappa], name[gamma]]]
variable[total_weights] assign[=] call[name[np].sum, parameter[name[weights]]]
return[call[name[sum], parameter[<ast.GeneratorExp object at 0x7da1b2219750>]]] | keyword[def] identifier[barnes_point] ( identifier[sq_dist] , identifier[values] , identifier[kappa] , identifier[gamma] = keyword[None] ):
literal[string]
keyword[if] identifier[gamma] keyword[is] keyword[None] :
identifier[gamma] = literal[int]
identifier[weights] = identifier[tools] . identifier[barnes_weights] ( identifier[sq_dist] , identifier[kappa] , identifier[gamma] )
identifier[total_weights] = identifier[np] . identifier[sum] ( identifier[weights] )
keyword[return] identifier[sum] ( identifier[v] *( identifier[w] / identifier[total_weights] ) keyword[for] ( identifier[w] , identifier[v] ) keyword[in] identifier[zip] ( identifier[weights] , identifier[values] )) | def barnes_point(sq_dist, values, kappa, gamma=None):
"""Generate a single pass barnes interpolation value for a point.
The calculated value is based on the given distances, kappa and gamma values.
Parameters
----------
sq_dist: (N, ) ndarray
Squared distance between observations and grid point
values: (N, ) ndarray
Observation values in same order as sq_dist
kappa: float
Response parameter for barnes interpolation.
gamma: float
Adjustable smoothing parameter for the barnes interpolation. Default 1.
Returns
-------
value: float
Interpolation value for grid point.
"""
if gamma is None:
gamma = 1 # depends on [control=['if'], data=['gamma']]
weights = tools.barnes_weights(sq_dist, kappa, gamma)
total_weights = np.sum(weights)
return sum((v * (w / total_weights) for (w, v) in zip(weights, values))) |
def get_reference(self, datas, name):
"""
Get serialized reference datas
Because every reference is turned to a dict (that stands on ``keys``
variable that is a list of key names), every variables must have the
same exact length of word than the key name list.
A reference name starts with 'styleguide-reference-' followed by
name for reference.
A reference can contains variable ``--structure`` setted to ``"flat"``,
``"list"`` or ``"string"`` to define serialization structure.
Arguments:
datas (dict): Data where to search for reference declaration. This
is commonly the fully parsed manifest.
name (string): Reference name to get and serialize.
Returns:
collections.OrderedDict: Serialized reference datas.
"""
rule_name = '-'.join((RULE_REFERENCE, name))
structure_mode = 'nested'
if rule_name not in datas:
msg = "Unable to find enabled reference '{}'"
raise SerializerError(msg.format(name))
properties = datas.get(rule_name)
# Search for "structure" variable
if 'structure' in properties:
if properties['structure'] == 'flat':
structure_mode = 'flat'
elif properties['structure'] == 'list':
structure_mode = 'list'
elif properties['structure'] == 'string':
structure_mode = 'string'
elif properties['structure'] == 'json':
structure_mode = 'json'
elif properties['structure'] == 'nested':
pass
else:
msg = "Invalid structure mode name '{}' for reference '{}'"
raise SerializerError(msg.format(structure_mode, name))
del properties['structure']
# Validate variable names
for item in properties.keys():
self.validate_variable_name(item)
# Perform serialize according to structure mode
if structure_mode == 'flat':
context = self.serialize_to_flat(name, properties)
elif structure_mode == 'list':
context = self.serialize_to_list(name, properties)
elif structure_mode == 'string':
context = self.serialize_to_string(name, properties)
elif structure_mode == 'nested':
context = self.serialize_to_nested(name, properties)
elif structure_mode == 'json':
context = self.serialize_to_json(name, properties)
return context | def function[get_reference, parameter[self, datas, name]]:
constant[
Get serialized reference datas
Because every reference is turned to a dict (that stands on ``keys``
variable that is a list of key names), every variables must have the
same exact length of word than the key name list.
A reference name starts with 'styleguide-reference-' followed by
name for reference.
A reference can contains variable ``--structure`` setted to ``"flat"``,
``"list"`` or ``"string"`` to define serialization structure.
Arguments:
datas (dict): Data where to search for reference declaration. This
is commonly the fully parsed manifest.
name (string): Reference name to get and serialize.
Returns:
collections.OrderedDict: Serialized reference datas.
]
variable[rule_name] assign[=] call[constant[-].join, parameter[tuple[[<ast.Name object at 0x7da2054a4a60>, <ast.Name object at 0x7da2054a76a0>]]]]
variable[structure_mode] assign[=] constant[nested]
if compare[name[rule_name] <ast.NotIn object at 0x7da2590d7190> name[datas]] begin[:]
variable[msg] assign[=] constant[Unable to find enabled reference '{}']
<ast.Raise object at 0x7da2054a4df0>
variable[properties] assign[=] call[name[datas].get, parameter[name[rule_name]]]
if compare[constant[structure] in name[properties]] begin[:]
if compare[call[name[properties]][constant[structure]] equal[==] constant[flat]] begin[:]
variable[structure_mode] assign[=] constant[flat]
<ast.Delete object at 0x7da2054a7190>
for taget[name[item]] in starred[call[name[properties].keys, parameter[]]] begin[:]
call[name[self].validate_variable_name, parameter[name[item]]]
if compare[name[structure_mode] equal[==] constant[flat]] begin[:]
variable[context] assign[=] call[name[self].serialize_to_flat, parameter[name[name], name[properties]]]
return[name[context]] | keyword[def] identifier[get_reference] ( identifier[self] , identifier[datas] , identifier[name] ):
literal[string]
identifier[rule_name] = literal[string] . identifier[join] (( identifier[RULE_REFERENCE] , identifier[name] ))
identifier[structure_mode] = literal[string]
keyword[if] identifier[rule_name] keyword[not] keyword[in] identifier[datas] :
identifier[msg] = literal[string]
keyword[raise] identifier[SerializerError] ( identifier[msg] . identifier[format] ( identifier[name] ))
identifier[properties] = identifier[datas] . identifier[get] ( identifier[rule_name] )
keyword[if] literal[string] keyword[in] identifier[properties] :
keyword[if] identifier[properties] [ literal[string] ]== literal[string] :
identifier[structure_mode] = literal[string]
keyword[elif] identifier[properties] [ literal[string] ]== literal[string] :
identifier[structure_mode] = literal[string]
keyword[elif] identifier[properties] [ literal[string] ]== literal[string] :
identifier[structure_mode] = literal[string]
keyword[elif] identifier[properties] [ literal[string] ]== literal[string] :
identifier[structure_mode] = literal[string]
keyword[elif] identifier[properties] [ literal[string] ]== literal[string] :
keyword[pass]
keyword[else] :
identifier[msg] = literal[string]
keyword[raise] identifier[SerializerError] ( identifier[msg] . identifier[format] ( identifier[structure_mode] , identifier[name] ))
keyword[del] identifier[properties] [ literal[string] ]
keyword[for] identifier[item] keyword[in] identifier[properties] . identifier[keys] ():
identifier[self] . identifier[validate_variable_name] ( identifier[item] )
keyword[if] identifier[structure_mode] == literal[string] :
identifier[context] = identifier[self] . identifier[serialize_to_flat] ( identifier[name] , identifier[properties] )
keyword[elif] identifier[structure_mode] == literal[string] :
identifier[context] = identifier[self] . identifier[serialize_to_list] ( identifier[name] , identifier[properties] )
keyword[elif] identifier[structure_mode] == literal[string] :
identifier[context] = identifier[self] . identifier[serialize_to_string] ( identifier[name] , identifier[properties] )
keyword[elif] identifier[structure_mode] == literal[string] :
identifier[context] = identifier[self] . identifier[serialize_to_nested] ( identifier[name] , identifier[properties] )
keyword[elif] identifier[structure_mode] == literal[string] :
identifier[context] = identifier[self] . identifier[serialize_to_json] ( identifier[name] , identifier[properties] )
keyword[return] identifier[context] | def get_reference(self, datas, name):
"""
Get serialized reference datas
Because every reference is turned to a dict (that stands on ``keys``
variable that is a list of key names), every variables must have the
same exact length of word than the key name list.
A reference name starts with 'styleguide-reference-' followed by
name for reference.
A reference can contains variable ``--structure`` setted to ``"flat"``,
``"list"`` or ``"string"`` to define serialization structure.
Arguments:
datas (dict): Data where to search for reference declaration. This
is commonly the fully parsed manifest.
name (string): Reference name to get and serialize.
Returns:
collections.OrderedDict: Serialized reference datas.
"""
rule_name = '-'.join((RULE_REFERENCE, name))
structure_mode = 'nested'
if rule_name not in datas:
msg = "Unable to find enabled reference '{}'"
raise SerializerError(msg.format(name)) # depends on [control=['if'], data=[]]
properties = datas.get(rule_name)
# Search for "structure" variable
if 'structure' in properties:
if properties['structure'] == 'flat':
structure_mode = 'flat' # depends on [control=['if'], data=[]]
elif properties['structure'] == 'list':
structure_mode = 'list' # depends on [control=['if'], data=[]]
elif properties['structure'] == 'string':
structure_mode = 'string' # depends on [control=['if'], data=[]]
elif properties['structure'] == 'json':
structure_mode = 'json' # depends on [control=['if'], data=[]]
elif properties['structure'] == 'nested':
pass # depends on [control=['if'], data=[]]
else:
msg = "Invalid structure mode name '{}' for reference '{}'"
raise SerializerError(msg.format(structure_mode, name))
del properties['structure'] # depends on [control=['if'], data=['properties']]
# Validate variable names
for item in properties.keys():
self.validate_variable_name(item) # depends on [control=['for'], data=['item']]
# Perform serialize according to structure mode
if structure_mode == 'flat':
context = self.serialize_to_flat(name, properties) # depends on [control=['if'], data=[]]
elif structure_mode == 'list':
context = self.serialize_to_list(name, properties) # depends on [control=['if'], data=[]]
elif structure_mode == 'string':
context = self.serialize_to_string(name, properties) # depends on [control=['if'], data=[]]
elif structure_mode == 'nested':
context = self.serialize_to_nested(name, properties) # depends on [control=['if'], data=[]]
elif structure_mode == 'json':
context = self.serialize_to_json(name, properties) # depends on [control=['if'], data=[]]
return context |
def read_config(path):
"""
Read a configuration from disk.
Arguments
path -- the loation to deserialize
"""
parser = _make_parser()
if parser.read(path):
return parser
raise Exception("Failed to read {}".format(path)) | def function[read_config, parameter[path]]:
constant[
Read a configuration from disk.
Arguments
path -- the loation to deserialize
]
variable[parser] assign[=] call[name[_make_parser], parameter[]]
if call[name[parser].read, parameter[name[path]]] begin[:]
return[name[parser]]
<ast.Raise object at 0x7da18f00cfa0> | keyword[def] identifier[read_config] ( identifier[path] ):
literal[string]
identifier[parser] = identifier[_make_parser] ()
keyword[if] identifier[parser] . identifier[read] ( identifier[path] ):
keyword[return] identifier[parser]
keyword[raise] identifier[Exception] ( literal[string] . identifier[format] ( identifier[path] )) | def read_config(path):
"""
Read a configuration from disk.
Arguments
path -- the loation to deserialize
"""
parser = _make_parser()
if parser.read(path):
return parser # depends on [control=['if'], data=[]]
raise Exception('Failed to read {}'.format(path)) |
def authenticate_http_request(token=None):
"""Validate auth0 tokens passed in the request's header, hence ensuring
that the user is authenticated. Code copied from:
https://github.com/auth0/auth0-python/tree/master/examples/flask-api
Return a PntCommonException if failed to validate authentication.
Otherwise, return the token's payload (Also stored in stack.top.current_user)
"""
if token:
auth = token
else:
auth = request.headers.get('Authorization', None)
if not auth:
auth = request.cookies.get('token', None)
if auth:
auth = unquote_plus(auth)
log.debug("Validating Auth header [%s]" % auth)
if not auth:
raise AuthMissingHeaderError('There is no Authorization header in the HTTP request')
parts = auth.split()
if parts[0].lower() != 'bearer':
raise AuthInvalidTokenError('Authorization header must start with Bearer')
elif len(parts) == 1:
raise AuthInvalidTokenError('Token not found in Authorization header')
elif len(parts) > 2:
raise AuthInvalidTokenError('Authorization header must be Bearer + \s + token')
token = parts[1]
return load_auth_token(token) | def function[authenticate_http_request, parameter[token]]:
constant[Validate auth0 tokens passed in the request's header, hence ensuring
that the user is authenticated. Code copied from:
https://github.com/auth0/auth0-python/tree/master/examples/flask-api
Return a PntCommonException if failed to validate authentication.
Otherwise, return the token's payload (Also stored in stack.top.current_user)
]
if name[token] begin[:]
variable[auth] assign[=] name[token]
if <ast.UnaryOp object at 0x7da20c991f90> begin[:]
variable[auth] assign[=] call[name[request].cookies.get, parameter[constant[token], constant[None]]]
if name[auth] begin[:]
variable[auth] assign[=] call[name[unquote_plus], parameter[name[auth]]]
call[name[log].debug, parameter[binary_operation[constant[Validating Auth header [%s]] <ast.Mod object at 0x7da2590d6920> name[auth]]]]
if <ast.UnaryOp object at 0x7da20c993460> begin[:]
<ast.Raise object at 0x7da20c991300>
variable[parts] assign[=] call[name[auth].split, parameter[]]
if compare[call[call[name[parts]][constant[0]].lower, parameter[]] not_equal[!=] constant[bearer]] begin[:]
<ast.Raise object at 0x7da1b1b87cd0>
variable[token] assign[=] call[name[parts]][constant[1]]
return[call[name[load_auth_token], parameter[name[token]]]] | keyword[def] identifier[authenticate_http_request] ( identifier[token] = keyword[None] ):
literal[string]
keyword[if] identifier[token] :
identifier[auth] = identifier[token]
keyword[else] :
identifier[auth] = identifier[request] . identifier[headers] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] keyword[not] identifier[auth] :
identifier[auth] = identifier[request] . identifier[cookies] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] identifier[auth] :
identifier[auth] = identifier[unquote_plus] ( identifier[auth] )
identifier[log] . identifier[debug] ( literal[string] % identifier[auth] )
keyword[if] keyword[not] identifier[auth] :
keyword[raise] identifier[AuthMissingHeaderError] ( literal[string] )
identifier[parts] = identifier[auth] . identifier[split] ()
keyword[if] identifier[parts] [ literal[int] ]. identifier[lower] ()!= literal[string] :
keyword[raise] identifier[AuthInvalidTokenError] ( literal[string] )
keyword[elif] identifier[len] ( identifier[parts] )== literal[int] :
keyword[raise] identifier[AuthInvalidTokenError] ( literal[string] )
keyword[elif] identifier[len] ( identifier[parts] )> literal[int] :
keyword[raise] identifier[AuthInvalidTokenError] ( literal[string] )
identifier[token] = identifier[parts] [ literal[int] ]
keyword[return] identifier[load_auth_token] ( identifier[token] ) | def authenticate_http_request(token=None):
"""Validate auth0 tokens passed in the request's header, hence ensuring
that the user is authenticated. Code copied from:
https://github.com/auth0/auth0-python/tree/master/examples/flask-api
Return a PntCommonException if failed to validate authentication.
Otherwise, return the token's payload (Also stored in stack.top.current_user)
"""
if token:
auth = token # depends on [control=['if'], data=[]]
else:
auth = request.headers.get('Authorization', None)
if not auth:
auth = request.cookies.get('token', None)
if auth:
auth = unquote_plus(auth) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
log.debug('Validating Auth header [%s]' % auth)
if not auth:
raise AuthMissingHeaderError('There is no Authorization header in the HTTP request') # depends on [control=['if'], data=[]]
parts = auth.split()
if parts[0].lower() != 'bearer':
raise AuthInvalidTokenError('Authorization header must start with Bearer') # depends on [control=['if'], data=[]]
elif len(parts) == 1:
raise AuthInvalidTokenError('Token not found in Authorization header') # depends on [control=['if'], data=[]]
elif len(parts) > 2:
raise AuthInvalidTokenError('Authorization header must be Bearer + \\s + token') # depends on [control=['if'], data=[]]
token = parts[1]
return load_auth_token(token) |
def _parse_pattern(self, element):
"""
Parse the trigger pattern
:param element: The XML Element object
:type element: etree._Element
"""
# If this is a raw regular expression, compile it and immediately return
self._log.info('Parsing Trigger Pattern: ' + element.text)
self.pattern_words, self.pattern_len = self.count_words(element.text)
self._log.debug('Pattern contains {wc} words with a total length of {cc}'
.format(wc=self.pattern_words, cc=self.pattern_len))
regex = bool_attribute(self._element, 'regex', False)
if regex:
self._log.info('Attempting to compile trigger as a raw regex')
try:
self.pattern = re.compile(element.text)
except sre_constants.error:
self._log.warn('Attempted to compile an invalid regular expression in {path} ; {regex}'
.format(path=self.file_path, regex=element.text))
raise AgentMLSyntaxError
return
self.pattern = normalize(element.text, True)
self._log.debug('Normalizing pattern: ' + self.pattern)
compile_as_regex = False
# Wildcard patterns and replacements
captured_wildcard = re.compile(r'(?<!\\)\(\*\)')
wildcard = re.compile(r'(?<!\\)\*')
capt_wild_numeric = re.compile(r'(?<!\\)\(#\)')
wild_numeric = re.compile(r'(?<!\\)#')
capt_wild_alpha = re.compile(r'(?<!\\)\(_\)')
wild_alpha = re.compile(r'(?<!\\)_')
wildcard_replacements = [
(captured_wildcard, r'(.+)'),
(wildcard, r'(?:.+)'),
(capt_wild_numeric, r'(\d+)'),
(wild_numeric, r'(?:\d+)'),
(capt_wild_alpha, r'([a-zA-Z]+)'),
(wild_alpha, r'(?:[a-zA-Z]+)'),
]
for wildcard, replacement in wildcard_replacements:
self.pattern, match = self.replace_wildcards(self.pattern, wildcard, replacement)
compile_as_regex = bool(match) or compile_as_regex
# Required and optional choices
req_choice = re.compile(r'\(([\w\s\|]+)\)')
opt_choice = re.compile(r'\s?\[([\w\s\|]+)\]\s?')
if req_choice.search(self.pattern):
def sub_required(pattern):
patterns = pattern.group(1).split('|')
return r'(\b{options})\b'.format(options='|'.join(patterns))
self.pattern = req_choice.sub(sub_required, self.pattern)
self._log.debug('Parsing Pattern required choices: ' + self.pattern)
compile_as_regex = True
if opt_choice.search(self.pattern):
def sub_optional(pattern):
patterns = pattern.group(1).split('|')
return r'\s?(?:\b(?:{options})\b)?\s?'.format(options='|'.join(patterns))
self.pattern = opt_choice.sub(sub_optional, self.pattern)
self._log.debug('Parsing Pattern optional choices: ' + self.pattern)
compile_as_regex = True
if compile_as_regex:
self._log.debug('Compiling Pattern as regex')
self.pattern = re.compile('^{pattern}$'.format(pattern=self.pattern), re.IGNORECASE)
else:
self._log.debug('Pattern is atomic')
self.pattern_is_atomic = True
# Replace any escaped wildcard symbols
self._log.debug('Replacing any escaped sequences in Pattern')
self.pattern = self.pattern.replace('\*', '*')
self.pattern = self.pattern.replace('\#', '#')
self.pattern = self.pattern.replace('\_', '_')
# TODO: This needs revisiting
self.pattern = self.pattern.replace('\(*)', '(*)')
self.pattern = self.pattern.replace('\(#)', '(#)')
self.pattern = self.pattern.replace('\(_)', '(_)') | def function[_parse_pattern, parameter[self, element]]:
constant[
Parse the trigger pattern
:param element: The XML Element object
:type element: etree._Element
]
call[name[self]._log.info, parameter[binary_operation[constant[Parsing Trigger Pattern: ] + name[element].text]]]
<ast.Tuple object at 0x7da1b1457c40> assign[=] call[name[self].count_words, parameter[name[element].text]]
call[name[self]._log.debug, parameter[call[constant[Pattern contains {wc} words with a total length of {cc}].format, parameter[]]]]
variable[regex] assign[=] call[name[bool_attribute], parameter[name[self]._element, constant[regex], constant[False]]]
if name[regex] begin[:]
call[name[self]._log.info, parameter[constant[Attempting to compile trigger as a raw regex]]]
<ast.Try object at 0x7da1b1457460>
return[None]
name[self].pattern assign[=] call[name[normalize], parameter[name[element].text, constant[True]]]
call[name[self]._log.debug, parameter[binary_operation[constant[Normalizing pattern: ] + name[self].pattern]]]
variable[compile_as_regex] assign[=] constant[False]
variable[captured_wildcard] assign[=] call[name[re].compile, parameter[constant[(?<!\\)\(\*\)]]]
variable[wildcard] assign[=] call[name[re].compile, parameter[constant[(?<!\\)\*]]]
variable[capt_wild_numeric] assign[=] call[name[re].compile, parameter[constant[(?<!\\)\(#\)]]]
variable[wild_numeric] assign[=] call[name[re].compile, parameter[constant[(?<!\\)#]]]
variable[capt_wild_alpha] assign[=] call[name[re].compile, parameter[constant[(?<!\\)\(_\)]]]
variable[wild_alpha] assign[=] call[name[re].compile, parameter[constant[(?<!\\)_]]]
variable[wildcard_replacements] assign[=] list[[<ast.Tuple object at 0x7da1b1456320>, <ast.Tuple object at 0x7da1b1456290>, <ast.Tuple object at 0x7da1b1456200>, <ast.Tuple object at 0x7da1b1456170>, <ast.Tuple object at 0x7da1b14560e0>, <ast.Tuple object at 0x7da1b1456050>]]
for taget[tuple[[<ast.Name object at 0x7da1b1455f60>, <ast.Name object at 0x7da1b1455f30>]]] in starred[name[wildcard_replacements]] begin[:]
<ast.Tuple object at 0x7da1b1455ea0> assign[=] call[name[self].replace_wildcards, parameter[name[self].pattern, name[wildcard], name[replacement]]]
variable[compile_as_regex] assign[=] <ast.BoolOp object at 0x7da1b1437f70>
variable[req_choice] assign[=] call[name[re].compile, parameter[constant[\(([\w\s\|]+)\)]]]
variable[opt_choice] assign[=] call[name[re].compile, parameter[constant[\s?\[([\w\s\|]+)\]\s?]]]
if call[name[req_choice].search, parameter[name[self].pattern]] begin[:]
def function[sub_required, parameter[pattern]]:
variable[patterns] assign[=] call[call[name[pattern].group, parameter[constant[1]]].split, parameter[constant[|]]]
return[call[constant[(\b{options})\b].format, parameter[]]]
name[self].pattern assign[=] call[name[req_choice].sub, parameter[name[sub_required], name[self].pattern]]
call[name[self]._log.debug, parameter[binary_operation[constant[Parsing Pattern required choices: ] + name[self].pattern]]]
variable[compile_as_regex] assign[=] constant[True]
if call[name[opt_choice].search, parameter[name[self].pattern]] begin[:]
def function[sub_optional, parameter[pattern]]:
variable[patterns] assign[=] call[call[name[pattern].group, parameter[constant[1]]].split, parameter[constant[|]]]
return[call[constant[\s?(?:\b(?:{options})\b)?\s?].format, parameter[]]]
name[self].pattern assign[=] call[name[opt_choice].sub, parameter[name[sub_optional], name[self].pattern]]
call[name[self]._log.debug, parameter[binary_operation[constant[Parsing Pattern optional choices: ] + name[self].pattern]]]
variable[compile_as_regex] assign[=] constant[True]
if name[compile_as_regex] begin[:]
call[name[self]._log.debug, parameter[constant[Compiling Pattern as regex]]]
name[self].pattern assign[=] call[name[re].compile, parameter[call[constant[^{pattern}$].format, parameter[]], name[re].IGNORECASE]] | keyword[def] identifier[_parse_pattern] ( identifier[self] , identifier[element] ):
literal[string]
identifier[self] . identifier[_log] . identifier[info] ( literal[string] + identifier[element] . identifier[text] )
identifier[self] . identifier[pattern_words] , identifier[self] . identifier[pattern_len] = identifier[self] . identifier[count_words] ( identifier[element] . identifier[text] )
identifier[self] . identifier[_log] . identifier[debug] ( literal[string]
. identifier[format] ( identifier[wc] = identifier[self] . identifier[pattern_words] , identifier[cc] = identifier[self] . identifier[pattern_len] ))
identifier[regex] = identifier[bool_attribute] ( identifier[self] . identifier[_element] , literal[string] , keyword[False] )
keyword[if] identifier[regex] :
identifier[self] . identifier[_log] . identifier[info] ( literal[string] )
keyword[try] :
identifier[self] . identifier[pattern] = identifier[re] . identifier[compile] ( identifier[element] . identifier[text] )
keyword[except] identifier[sre_constants] . identifier[error] :
identifier[self] . identifier[_log] . identifier[warn] ( literal[string]
. identifier[format] ( identifier[path] = identifier[self] . identifier[file_path] , identifier[regex] = identifier[element] . identifier[text] ))
keyword[raise] identifier[AgentMLSyntaxError]
keyword[return]
identifier[self] . identifier[pattern] = identifier[normalize] ( identifier[element] . identifier[text] , keyword[True] )
identifier[self] . identifier[_log] . identifier[debug] ( literal[string] + identifier[self] . identifier[pattern] )
identifier[compile_as_regex] = keyword[False]
identifier[captured_wildcard] = identifier[re] . identifier[compile] ( literal[string] )
identifier[wildcard] = identifier[re] . identifier[compile] ( literal[string] )
identifier[capt_wild_numeric] = identifier[re] . identifier[compile] ( literal[string] )
identifier[wild_numeric] = identifier[re] . identifier[compile] ( literal[string] )
identifier[capt_wild_alpha] = identifier[re] . identifier[compile] ( literal[string] )
identifier[wild_alpha] = identifier[re] . identifier[compile] ( literal[string] )
identifier[wildcard_replacements] =[
( identifier[captured_wildcard] , literal[string] ),
( identifier[wildcard] , literal[string] ),
( identifier[capt_wild_numeric] , literal[string] ),
( identifier[wild_numeric] , literal[string] ),
( identifier[capt_wild_alpha] , literal[string] ),
( identifier[wild_alpha] , literal[string] ),
]
keyword[for] identifier[wildcard] , identifier[replacement] keyword[in] identifier[wildcard_replacements] :
identifier[self] . identifier[pattern] , identifier[match] = identifier[self] . identifier[replace_wildcards] ( identifier[self] . identifier[pattern] , identifier[wildcard] , identifier[replacement] )
identifier[compile_as_regex] = identifier[bool] ( identifier[match] ) keyword[or] identifier[compile_as_regex]
identifier[req_choice] = identifier[re] . identifier[compile] ( literal[string] )
identifier[opt_choice] = identifier[re] . identifier[compile] ( literal[string] )
keyword[if] identifier[req_choice] . identifier[search] ( identifier[self] . identifier[pattern] ):
keyword[def] identifier[sub_required] ( identifier[pattern] ):
identifier[patterns] = identifier[pattern] . identifier[group] ( literal[int] ). identifier[split] ( literal[string] )
keyword[return] literal[string] . identifier[format] ( identifier[options] = literal[string] . identifier[join] ( identifier[patterns] ))
identifier[self] . identifier[pattern] = identifier[req_choice] . identifier[sub] ( identifier[sub_required] , identifier[self] . identifier[pattern] )
identifier[self] . identifier[_log] . identifier[debug] ( literal[string] + identifier[self] . identifier[pattern] )
identifier[compile_as_regex] = keyword[True]
keyword[if] identifier[opt_choice] . identifier[search] ( identifier[self] . identifier[pattern] ):
keyword[def] identifier[sub_optional] ( identifier[pattern] ):
identifier[patterns] = identifier[pattern] . identifier[group] ( literal[int] ). identifier[split] ( literal[string] )
keyword[return] literal[string] . identifier[format] ( identifier[options] = literal[string] . identifier[join] ( identifier[patterns] ))
identifier[self] . identifier[pattern] = identifier[opt_choice] . identifier[sub] ( identifier[sub_optional] , identifier[self] . identifier[pattern] )
identifier[self] . identifier[_log] . identifier[debug] ( literal[string] + identifier[self] . identifier[pattern] )
identifier[compile_as_regex] = keyword[True]
keyword[if] identifier[compile_as_regex] :
identifier[self] . identifier[_log] . identifier[debug] ( literal[string] )
identifier[self] . identifier[pattern] = identifier[re] . identifier[compile] ( literal[string] . identifier[format] ( identifier[pattern] = identifier[self] . identifier[pattern] ), identifier[re] . identifier[IGNORECASE] )
keyword[else] :
identifier[self] . identifier[_log] . identifier[debug] ( literal[string] )
identifier[self] . identifier[pattern_is_atomic] = keyword[True]
identifier[self] . identifier[_log] . identifier[debug] ( literal[string] )
identifier[self] . identifier[pattern] = identifier[self] . identifier[pattern] . identifier[replace] ( literal[string] , literal[string] )
identifier[self] . identifier[pattern] = identifier[self] . identifier[pattern] . identifier[replace] ( literal[string] , literal[string] )
identifier[self] . identifier[pattern] = identifier[self] . identifier[pattern] . identifier[replace] ( literal[string] , literal[string] )
identifier[self] . identifier[pattern] = identifier[self] . identifier[pattern] . identifier[replace] ( literal[string] , literal[string] )
identifier[self] . identifier[pattern] = identifier[self] . identifier[pattern] . identifier[replace] ( literal[string] , literal[string] )
identifier[self] . identifier[pattern] = identifier[self] . identifier[pattern] . identifier[replace] ( literal[string] , literal[string] ) | def _parse_pattern(self, element):
"""
Parse the trigger pattern
:param element: The XML Element object
:type element: etree._Element
"""
# If this is a raw regular expression, compile it and immediately return
self._log.info('Parsing Trigger Pattern: ' + element.text)
(self.pattern_words, self.pattern_len) = self.count_words(element.text)
self._log.debug('Pattern contains {wc} words with a total length of {cc}'.format(wc=self.pattern_words, cc=self.pattern_len))
regex = bool_attribute(self._element, 'regex', False)
if regex:
self._log.info('Attempting to compile trigger as a raw regex')
try:
self.pattern = re.compile(element.text) # depends on [control=['try'], data=[]]
except sre_constants.error:
self._log.warn('Attempted to compile an invalid regular expression in {path} ; {regex}'.format(path=self.file_path, regex=element.text))
raise AgentMLSyntaxError # depends on [control=['except'], data=[]]
return # depends on [control=['if'], data=[]]
self.pattern = normalize(element.text, True)
self._log.debug('Normalizing pattern: ' + self.pattern)
compile_as_regex = False
# Wildcard patterns and replacements
captured_wildcard = re.compile('(?<!\\\\)\\(\\*\\)')
wildcard = re.compile('(?<!\\\\)\\*')
capt_wild_numeric = re.compile('(?<!\\\\)\\(#\\)')
wild_numeric = re.compile('(?<!\\\\)#')
capt_wild_alpha = re.compile('(?<!\\\\)\\(_\\)')
wild_alpha = re.compile('(?<!\\\\)_')
wildcard_replacements = [(captured_wildcard, '(.+)'), (wildcard, '(?:.+)'), (capt_wild_numeric, '(\\d+)'), (wild_numeric, '(?:\\d+)'), (capt_wild_alpha, '([a-zA-Z]+)'), (wild_alpha, '(?:[a-zA-Z]+)')]
for (wildcard, replacement) in wildcard_replacements:
(self.pattern, match) = self.replace_wildcards(self.pattern, wildcard, replacement)
compile_as_regex = bool(match) or compile_as_regex # depends on [control=['for'], data=[]]
# Required and optional choices
req_choice = re.compile('\\(([\\w\\s\\|]+)\\)')
opt_choice = re.compile('\\s?\\[([\\w\\s\\|]+)\\]\\s?')
if req_choice.search(self.pattern):
def sub_required(pattern):
patterns = pattern.group(1).split('|')
return '(\\b{options})\\b'.format(options='|'.join(patterns))
self.pattern = req_choice.sub(sub_required, self.pattern)
self._log.debug('Parsing Pattern required choices: ' + self.pattern)
compile_as_regex = True # depends on [control=['if'], data=[]]
if opt_choice.search(self.pattern):
def sub_optional(pattern):
patterns = pattern.group(1).split('|')
return '\\s?(?:\\b(?:{options})\\b)?\\s?'.format(options='|'.join(patterns))
self.pattern = opt_choice.sub(sub_optional, self.pattern)
self._log.debug('Parsing Pattern optional choices: ' + self.pattern)
compile_as_regex = True # depends on [control=['if'], data=[]]
if compile_as_regex:
self._log.debug('Compiling Pattern as regex')
self.pattern = re.compile('^{pattern}$'.format(pattern=self.pattern), re.IGNORECASE) # depends on [control=['if'], data=[]]
else:
self._log.debug('Pattern is atomic')
self.pattern_is_atomic = True
# Replace any escaped wildcard symbols
self._log.debug('Replacing any escaped sequences in Pattern')
self.pattern = self.pattern.replace('\\*', '*')
self.pattern = self.pattern.replace('\\#', '#')
self.pattern = self.pattern.replace('\\_', '_')
# TODO: This needs revisiting
self.pattern = self.pattern.replace('\\(*)', '(*)')
self.pattern = self.pattern.replace('\\(#)', '(#)')
self.pattern = self.pattern.replace('\\(_)', '(_)') |
def _ascii2(value):
"""
A variant of the `ascii()` built-in function known from Python 3 that:
(1) ensures ASCII-only output, and
(2) produces a nicer formatting for use in exception and warning messages
and other human consumption.
This function calls `ascii()` and post-processes its output as follows:
* For unicode strings, a leading 'u' is stripped (u'xxx' becomes 'xxx'),
if present.
* For byte strings, a leading 'b' is stripped (b'xxx' becomes 'xxx'),
if present.
* For unicode strings, non-ASCII Unicode characters in the range U+0000 to
U+00FF are represented as '/u00hh' instead of the confusing '/xhh'
('/' being a backslash, 'hh' being a 2-digit hex number).
This function correctly handles values of collection types such as list,
tuple, dict, and set, by producing the usual Python representation string
for them. If the type is not the standard Python type (i.e. OrderedDict
instead of dict), the type name is also shown in the result.
Returns:
str: ASCII string
"""
if isinstance(value, Mapping):
# NocaseDict in current impl. is not a Mapping; it uses
# its own repr() implementation (via ascii(), called further down)
items = [_ascii2(k) + ": " + _ascii2(v)
for k, v in six.iteritems(value)]
item_str = "{" + ", ".join(items) + "}"
if value.__class__.__name__ == 'dict':
return item_str
return "{0}({1})".format(value.__class__.__name__, item_str)
if isinstance(value, Set):
items = [_ascii2(v) for v in value]
item_str = "{" + ", ".join(items) + "}"
if value.__class__.__name__ == 'set':
return item_str
return "{0}({1})".format(value.__class__.__name__, item_str)
if isinstance(value, MutableSequence):
items = [_ascii2(v) for v in value]
item_str = "[" + ", ".join(items) + "]"
if value.__class__.__name__ == 'list':
return item_str
return "{0}({1})".format(value.__class__.__name__, item_str)
if isinstance(value, Sequence) and \
not isinstance(value, (six.text_type, six.binary_type)):
items = [_ascii2(v) for v in value]
if len(items) == 1:
item_str = "(" + ", ".join(items) + ",)"
else:
item_str = "(" + ", ".join(items) + ")"
if value.__class__.__name__ == 'tuple':
return item_str
return "{0}({1})".format(value.__class__.__name__, item_str)
if isinstance(value, six.text_type):
ret = ascii(value) # returns type str in py2 and py3
if ret.startswith('u'):
ret = ret[1:]
# Convert /xhh into /u00hh.
# The two look-behind patterns address at least some of the cases that
# should not be converted: Up to 5 backslashes in repr() result are
# handled correctly. The failure that happens starting with 6
# backslashes and even numbers of backslashes above that is not
# dramatic: The /xhh is converted to /u00hh even though it shouldn't.
ret = re.sub(r'(?<![^\\]\\)(?<![^\\]\\\\\\)\\x([0-9a-fA-F]{2})',
r'\\u00\1', ret)
elif isinstance(value, six.binary_type):
ret = ascii(value) # returns type str in py2 and py3
if ret.startswith('b'):
ret = ret[1:]
elif isinstance(value, (six.integer_types, float)):
# str() on Python containers calls repr() on the items. PEP 3140
# that attempted to fix that, has been rejected. See
# https://www.python.org/dev/peps/pep-3140/.
# We don't want to make that same mistake, and because ascii() calls
# repr(), we call str() on the items explicitly. This makes a
# difference for example for all pywbem.CIMInt values.
ret = str(value)
else:
ret = ascii(value) # returns type str in py2 and py3
return ret | def function[_ascii2, parameter[value]]:
constant[
A variant of the `ascii()` built-in function known from Python 3 that:
(1) ensures ASCII-only output, and
(2) produces a nicer formatting for use in exception and warning messages
and other human consumption.
This function calls `ascii()` and post-processes its output as follows:
* For unicode strings, a leading 'u' is stripped (u'xxx' becomes 'xxx'),
if present.
* For byte strings, a leading 'b' is stripped (b'xxx' becomes 'xxx'),
if present.
* For unicode strings, non-ASCII Unicode characters in the range U+0000 to
U+00FF are represented as '/u00hh' instead of the confusing '/xhh'
('/' being a backslash, 'hh' being a 2-digit hex number).
This function correctly handles values of collection types such as list,
tuple, dict, and set, by producing the usual Python representation string
for them. If the type is not the standard Python type (i.e. OrderedDict
instead of dict), the type name is also shown in the result.
Returns:
str: ASCII string
]
if call[name[isinstance], parameter[name[value], name[Mapping]]] begin[:]
variable[items] assign[=] <ast.ListComp object at 0x7da2054a56c0>
variable[item_str] assign[=] binary_operation[binary_operation[constant[{] + call[constant[, ].join, parameter[name[items]]]] + constant[}]]
if compare[name[value].__class__.__name__ equal[==] constant[dict]] begin[:]
return[name[item_str]]
return[call[constant[{0}({1})].format, parameter[name[value].__class__.__name__, name[item_str]]]]
if call[name[isinstance], parameter[name[value], name[Set]]] begin[:]
variable[items] assign[=] <ast.ListComp object at 0x7da1b0b11840>
variable[item_str] assign[=] binary_operation[binary_operation[constant[{] + call[constant[, ].join, parameter[name[items]]]] + constant[}]]
if compare[name[value].__class__.__name__ equal[==] constant[set]] begin[:]
return[name[item_str]]
return[call[constant[{0}({1})].format, parameter[name[value].__class__.__name__, name[item_str]]]]
if call[name[isinstance], parameter[name[value], name[MutableSequence]]] begin[:]
variable[items] assign[=] <ast.ListComp object at 0x7da20c7941f0>
variable[item_str] assign[=] binary_operation[binary_operation[constant[[] + call[constant[, ].join, parameter[name[items]]]] + constant[]]]
if compare[name[value].__class__.__name__ equal[==] constant[list]] begin[:]
return[name[item_str]]
return[call[constant[{0}({1})].format, parameter[name[value].__class__.__name__, name[item_str]]]]
if <ast.BoolOp object at 0x7da20c794bb0> begin[:]
variable[items] assign[=] <ast.ListComp object at 0x7da20c794370>
if compare[call[name[len], parameter[name[items]]] equal[==] constant[1]] begin[:]
variable[item_str] assign[=] binary_operation[binary_operation[constant[(] + call[constant[, ].join, parameter[name[items]]]] + constant[,)]]
if compare[name[value].__class__.__name__ equal[==] constant[tuple]] begin[:]
return[name[item_str]]
return[call[constant[{0}({1})].format, parameter[name[value].__class__.__name__, name[item_str]]]]
if call[name[isinstance], parameter[name[value], name[six].text_type]] begin[:]
variable[ret] assign[=] call[name[ascii], parameter[name[value]]]
if call[name[ret].startswith, parameter[constant[u]]] begin[:]
variable[ret] assign[=] call[name[ret]][<ast.Slice object at 0x7da20c7950c0>]
variable[ret] assign[=] call[name[re].sub, parameter[constant[(?<![^\\]\\)(?<![^\\]\\\\\\)\\x([0-9a-fA-F]{2})], constant[\\u00\1], name[ret]]]
return[name[ret]] | keyword[def] identifier[_ascii2] ( identifier[value] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[value] , identifier[Mapping] ):
identifier[items] =[ identifier[_ascii2] ( identifier[k] )+ literal[string] + identifier[_ascii2] ( identifier[v] )
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[six] . identifier[iteritems] ( identifier[value] )]
identifier[item_str] = literal[string] + literal[string] . identifier[join] ( identifier[items] )+ literal[string]
keyword[if] identifier[value] . identifier[__class__] . identifier[__name__] == literal[string] :
keyword[return] identifier[item_str]
keyword[return] literal[string] . identifier[format] ( identifier[value] . identifier[__class__] . identifier[__name__] , identifier[item_str] )
keyword[if] identifier[isinstance] ( identifier[value] , identifier[Set] ):
identifier[items] =[ identifier[_ascii2] ( identifier[v] ) keyword[for] identifier[v] keyword[in] identifier[value] ]
identifier[item_str] = literal[string] + literal[string] . identifier[join] ( identifier[items] )+ literal[string]
keyword[if] identifier[value] . identifier[__class__] . identifier[__name__] == literal[string] :
keyword[return] identifier[item_str]
keyword[return] literal[string] . identifier[format] ( identifier[value] . identifier[__class__] . identifier[__name__] , identifier[item_str] )
keyword[if] identifier[isinstance] ( identifier[value] , identifier[MutableSequence] ):
identifier[items] =[ identifier[_ascii2] ( identifier[v] ) keyword[for] identifier[v] keyword[in] identifier[value] ]
identifier[item_str] = literal[string] + literal[string] . identifier[join] ( identifier[items] )+ literal[string]
keyword[if] identifier[value] . identifier[__class__] . identifier[__name__] == literal[string] :
keyword[return] identifier[item_str]
keyword[return] literal[string] . identifier[format] ( identifier[value] . identifier[__class__] . identifier[__name__] , identifier[item_str] )
keyword[if] identifier[isinstance] ( identifier[value] , identifier[Sequence] ) keyword[and] keyword[not] identifier[isinstance] ( identifier[value] ,( identifier[six] . identifier[text_type] , identifier[six] . identifier[binary_type] )):
identifier[items] =[ identifier[_ascii2] ( identifier[v] ) keyword[for] identifier[v] keyword[in] identifier[value] ]
keyword[if] identifier[len] ( identifier[items] )== literal[int] :
identifier[item_str] = literal[string] + literal[string] . identifier[join] ( identifier[items] )+ literal[string]
keyword[else] :
identifier[item_str] = literal[string] + literal[string] . identifier[join] ( identifier[items] )+ literal[string]
keyword[if] identifier[value] . identifier[__class__] . identifier[__name__] == literal[string] :
keyword[return] identifier[item_str]
keyword[return] literal[string] . identifier[format] ( identifier[value] . identifier[__class__] . identifier[__name__] , identifier[item_str] )
keyword[if] identifier[isinstance] ( identifier[value] , identifier[six] . identifier[text_type] ):
identifier[ret] = identifier[ascii] ( identifier[value] )
keyword[if] identifier[ret] . identifier[startswith] ( literal[string] ):
identifier[ret] = identifier[ret] [ literal[int] :]
identifier[ret] = identifier[re] . identifier[sub] ( literal[string] ,
literal[string] , identifier[ret] )
keyword[elif] identifier[isinstance] ( identifier[value] , identifier[six] . identifier[binary_type] ):
identifier[ret] = identifier[ascii] ( identifier[value] )
keyword[if] identifier[ret] . identifier[startswith] ( literal[string] ):
identifier[ret] = identifier[ret] [ literal[int] :]
keyword[elif] identifier[isinstance] ( identifier[value] ,( identifier[six] . identifier[integer_types] , identifier[float] )):
identifier[ret] = identifier[str] ( identifier[value] )
keyword[else] :
identifier[ret] = identifier[ascii] ( identifier[value] )
keyword[return] identifier[ret] | def _ascii2(value):
"""
A variant of the `ascii()` built-in function known from Python 3 that:
(1) ensures ASCII-only output, and
(2) produces a nicer formatting for use in exception and warning messages
and other human consumption.
This function calls `ascii()` and post-processes its output as follows:
* For unicode strings, a leading 'u' is stripped (u'xxx' becomes 'xxx'),
if present.
* For byte strings, a leading 'b' is stripped (b'xxx' becomes 'xxx'),
if present.
* For unicode strings, non-ASCII Unicode characters in the range U+0000 to
U+00FF are represented as '/u00hh' instead of the confusing '/xhh'
('/' being a backslash, 'hh' being a 2-digit hex number).
This function correctly handles values of collection types such as list,
tuple, dict, and set, by producing the usual Python representation string
for them. If the type is not the standard Python type (i.e. OrderedDict
instead of dict), the type name is also shown in the result.
Returns:
str: ASCII string
"""
if isinstance(value, Mapping):
# NocaseDict in current impl. is not a Mapping; it uses
# its own repr() implementation (via ascii(), called further down)
items = [_ascii2(k) + ': ' + _ascii2(v) for (k, v) in six.iteritems(value)]
item_str = '{' + ', '.join(items) + '}'
if value.__class__.__name__ == 'dict':
return item_str # depends on [control=['if'], data=[]]
return '{0}({1})'.format(value.__class__.__name__, item_str) # depends on [control=['if'], data=[]]
if isinstance(value, Set):
items = [_ascii2(v) for v in value]
item_str = '{' + ', '.join(items) + '}'
if value.__class__.__name__ == 'set':
return item_str # depends on [control=['if'], data=[]]
return '{0}({1})'.format(value.__class__.__name__, item_str) # depends on [control=['if'], data=[]]
if isinstance(value, MutableSequence):
items = [_ascii2(v) for v in value]
item_str = '[' + ', '.join(items) + ']'
if value.__class__.__name__ == 'list':
return item_str # depends on [control=['if'], data=[]]
return '{0}({1})'.format(value.__class__.__name__, item_str) # depends on [control=['if'], data=[]]
if isinstance(value, Sequence) and (not isinstance(value, (six.text_type, six.binary_type))):
items = [_ascii2(v) for v in value]
if len(items) == 1:
item_str = '(' + ', '.join(items) + ',)' # depends on [control=['if'], data=[]]
else:
item_str = '(' + ', '.join(items) + ')'
if value.__class__.__name__ == 'tuple':
return item_str # depends on [control=['if'], data=[]]
return '{0}({1})'.format(value.__class__.__name__, item_str) # depends on [control=['if'], data=[]]
if isinstance(value, six.text_type):
ret = ascii(value) # returns type str in py2 and py3
if ret.startswith('u'):
ret = ret[1:] # depends on [control=['if'], data=[]]
# Convert /xhh into /u00hh.
# The two look-behind patterns address at least some of the cases that
# should not be converted: Up to 5 backslashes in repr() result are
# handled correctly. The failure that happens starting with 6
# backslashes and even numbers of backslashes above that is not
# dramatic: The /xhh is converted to /u00hh even though it shouldn't.
ret = re.sub('(?<![^\\\\]\\\\)(?<![^\\\\]\\\\\\\\\\\\)\\\\x([0-9a-fA-F]{2})', '\\\\u00\\1', ret) # depends on [control=['if'], data=[]]
elif isinstance(value, six.binary_type):
ret = ascii(value) # returns type str in py2 and py3
if ret.startswith('b'):
ret = ret[1:] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif isinstance(value, (six.integer_types, float)):
# str() on Python containers calls repr() on the items. PEP 3140
# that attempted to fix that, has been rejected. See
# https://www.python.org/dev/peps/pep-3140/.
# We don't want to make that same mistake, and because ascii() calls
# repr(), we call str() on the items explicitly. This makes a
# difference for example for all pywbem.CIMInt values.
ret = str(value) # depends on [control=['if'], data=[]]
else:
ret = ascii(value) # returns type str in py2 and py3
return ret |
def _normalize_params(params):
"""
Returns a normalized query string sorted first by key, then by value
excluding the ``realm`` and ``oauth_signature`` parameters as specified
here: http://oauth.net/core/1.0a/#rfc.section.9.1.1.
:param params:
:class:`dict` or :class:`list` of tuples.
"""
if isinstance(params, dict):
params = list(params.items())
# remove "realm" and "oauth_signature"
params = sorted([
(k, v) for k, v in params
if k not in ('oauth_signature', 'realm')
])
# sort
# convert to query string
qs = parse.urlencode(params)
# replace "+" to "%20"
qs = qs.replace('+', '%20')
# replace "%7E" to "%20"
qs = qs.replace('%7E', '~')
return qs | def function[_normalize_params, parameter[params]]:
constant[
Returns a normalized query string sorted first by key, then by value
excluding the ``realm`` and ``oauth_signature`` parameters as specified
here: http://oauth.net/core/1.0a/#rfc.section.9.1.1.
:param params:
:class:`dict` or :class:`list` of tuples.
]
if call[name[isinstance], parameter[name[params], name[dict]]] begin[:]
variable[params] assign[=] call[name[list], parameter[call[name[params].items, parameter[]]]]
variable[params] assign[=] call[name[sorted], parameter[<ast.ListComp object at 0x7da1b03b9b70>]]
variable[qs] assign[=] call[name[parse].urlencode, parameter[name[params]]]
variable[qs] assign[=] call[name[qs].replace, parameter[constant[+], constant[%20]]]
variable[qs] assign[=] call[name[qs].replace, parameter[constant[%7E], constant[~]]]
return[name[qs]] | keyword[def] identifier[_normalize_params] ( identifier[params] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[params] , identifier[dict] ):
identifier[params] = identifier[list] ( identifier[params] . identifier[items] ())
identifier[params] = identifier[sorted] ([
( identifier[k] , identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[params]
keyword[if] identifier[k] keyword[not] keyword[in] ( literal[string] , literal[string] )
])
identifier[qs] = identifier[parse] . identifier[urlencode] ( identifier[params] )
identifier[qs] = identifier[qs] . identifier[replace] ( literal[string] , literal[string] )
identifier[qs] = identifier[qs] . identifier[replace] ( literal[string] , literal[string] )
keyword[return] identifier[qs] | def _normalize_params(params):
"""
Returns a normalized query string sorted first by key, then by value
excluding the ``realm`` and ``oauth_signature`` parameters as specified
here: http://oauth.net/core/1.0a/#rfc.section.9.1.1.
:param params:
:class:`dict` or :class:`list` of tuples.
"""
if isinstance(params, dict):
params = list(params.items()) # depends on [control=['if'], data=[]]
# remove "realm" and "oauth_signature"
params = sorted([(k, v) for (k, v) in params if k not in ('oauth_signature', 'realm')])
# sort
# convert to query string
qs = parse.urlencode(params)
# replace "+" to "%20"
qs = qs.replace('+', '%20')
# replace "%7E" to "%20"
qs = qs.replace('%7E', '~')
return qs |
def joint_shape(*args) -> tuple:
"""Given a set of arrays, return the joint shape.
Parameters
----------
args : array-likes
Returns
-------
tuple of int
Joint shape.
"""
if len(args) == 0:
return ()
shape = []
shapes = [a.shape for a in args]
ndim = args[0].ndim
for i in range(ndim):
shape.append(max([s[i] for s in shapes]))
return tuple(shape) | def function[joint_shape, parameter[]]:
constant[Given a set of arrays, return the joint shape.
Parameters
----------
args : array-likes
Returns
-------
tuple of int
Joint shape.
]
if compare[call[name[len], parameter[name[args]]] equal[==] constant[0]] begin[:]
return[tuple[[]]]
variable[shape] assign[=] list[[]]
variable[shapes] assign[=] <ast.ListComp object at 0x7da1b0b9ea40>
variable[ndim] assign[=] call[name[args]][constant[0]].ndim
for taget[name[i]] in starred[call[name[range], parameter[name[ndim]]]] begin[:]
call[name[shape].append, parameter[call[name[max], parameter[<ast.ListComp object at 0x7da1b0b9ec80>]]]]
return[call[name[tuple], parameter[name[shape]]]] | keyword[def] identifier[joint_shape] (* identifier[args] )-> identifier[tuple] :
literal[string]
keyword[if] identifier[len] ( identifier[args] )== literal[int] :
keyword[return] ()
identifier[shape] =[]
identifier[shapes] =[ identifier[a] . identifier[shape] keyword[for] identifier[a] keyword[in] identifier[args] ]
identifier[ndim] = identifier[args] [ literal[int] ]. identifier[ndim]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[ndim] ):
identifier[shape] . identifier[append] ( identifier[max] ([ identifier[s] [ identifier[i] ] keyword[for] identifier[s] keyword[in] identifier[shapes] ]))
keyword[return] identifier[tuple] ( identifier[shape] ) | def joint_shape(*args) -> tuple:
"""Given a set of arrays, return the joint shape.
Parameters
----------
args : array-likes
Returns
-------
tuple of int
Joint shape.
"""
if len(args) == 0:
return () # depends on [control=['if'], data=[]]
shape = []
shapes = [a.shape for a in args]
ndim = args[0].ndim
for i in range(ndim):
shape.append(max([s[i] for s in shapes])) # depends on [control=['for'], data=['i']]
return tuple(shape) |
def update(self, other):
"""Merges other item with this object
Args:
other: Object containing items to merge into this object
Must be a dictionary or NdMapping type
"""
if isinstance(other, NdMapping):
dims = [d for d in other.kdims if d not in self.kdims]
if len(dims) == other.ndims:
raise KeyError("Cannot update with NdMapping that has"
" a different set of key dimensions.")
elif dims:
other = other.drop_dimension(dims)
other = other.data
for key, data in other.items():
self._add_item(key, data, sort=False)
if self.sort:
self._resort() | def function[update, parameter[self, other]]:
constant[Merges other item with this object
Args:
other: Object containing items to merge into this object
Must be a dictionary or NdMapping type
]
if call[name[isinstance], parameter[name[other], name[NdMapping]]] begin[:]
variable[dims] assign[=] <ast.ListComp object at 0x7da204623d60>
if compare[call[name[len], parameter[name[dims]]] equal[==] name[other].ndims] begin[:]
<ast.Raise object at 0x7da2046225c0>
variable[other] assign[=] name[other].data
for taget[tuple[[<ast.Name object at 0x7da204623250>, <ast.Name object at 0x7da204622800>]]] in starred[call[name[other].items, parameter[]]] begin[:]
call[name[self]._add_item, parameter[name[key], name[data]]]
if name[self].sort begin[:]
call[name[self]._resort, parameter[]] | keyword[def] identifier[update] ( identifier[self] , identifier[other] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[other] , identifier[NdMapping] ):
identifier[dims] =[ identifier[d] keyword[for] identifier[d] keyword[in] identifier[other] . identifier[kdims] keyword[if] identifier[d] keyword[not] keyword[in] identifier[self] . identifier[kdims] ]
keyword[if] identifier[len] ( identifier[dims] )== identifier[other] . identifier[ndims] :
keyword[raise] identifier[KeyError] ( literal[string]
literal[string] )
keyword[elif] identifier[dims] :
identifier[other] = identifier[other] . identifier[drop_dimension] ( identifier[dims] )
identifier[other] = identifier[other] . identifier[data]
keyword[for] identifier[key] , identifier[data] keyword[in] identifier[other] . identifier[items] ():
identifier[self] . identifier[_add_item] ( identifier[key] , identifier[data] , identifier[sort] = keyword[False] )
keyword[if] identifier[self] . identifier[sort] :
identifier[self] . identifier[_resort] () | def update(self, other):
"""Merges other item with this object
Args:
other: Object containing items to merge into this object
Must be a dictionary or NdMapping type
"""
if isinstance(other, NdMapping):
dims = [d for d in other.kdims if d not in self.kdims]
if len(dims) == other.ndims:
raise KeyError('Cannot update with NdMapping that has a different set of key dimensions.') # depends on [control=['if'], data=[]]
elif dims:
other = other.drop_dimension(dims) # depends on [control=['if'], data=[]]
other = other.data # depends on [control=['if'], data=[]]
for (key, data) in other.items():
self._add_item(key, data, sort=False) # depends on [control=['for'], data=[]]
if self.sort:
self._resort() # depends on [control=['if'], data=[]] |
def find_all_matches(finder, ireq, pre=False):
# type: (PackageFinder, InstallRequirement, bool) -> List[InstallationCandidate]
"""Find all matching dependencies using the supplied finder and the
given ireq.
:param finder: A package finder for discovering matching candidates.
:type finder: :class:`~pip._internal.index.PackageFinder`
:param ireq: An install requirement.
:type ireq: :class:`~pip._internal.req.req_install.InstallRequirement`
:return: A list of matching candidates.
:rtype: list[:class:`~pip._internal.index.InstallationCandidate`]
"""
candidates = clean_requires_python(finder.find_all_candidates(ireq.name))
versions = {candidate.version for candidate in candidates}
allowed_versions = _get_filtered_versions(ireq, versions, pre)
if not pre and not allowed_versions:
allowed_versions = _get_filtered_versions(ireq, versions, True)
candidates = {c for c in candidates if c.version in allowed_versions}
return candidates | def function[find_all_matches, parameter[finder, ireq, pre]]:
constant[Find all matching dependencies using the supplied finder and the
given ireq.
:param finder: A package finder for discovering matching candidates.
:type finder: :class:`~pip._internal.index.PackageFinder`
:param ireq: An install requirement.
:type ireq: :class:`~pip._internal.req.req_install.InstallRequirement`
:return: A list of matching candidates.
:rtype: list[:class:`~pip._internal.index.InstallationCandidate`]
]
variable[candidates] assign[=] call[name[clean_requires_python], parameter[call[name[finder].find_all_candidates, parameter[name[ireq].name]]]]
variable[versions] assign[=] <ast.SetComp object at 0x7da2041da7a0>
variable[allowed_versions] assign[=] call[name[_get_filtered_versions], parameter[name[ireq], name[versions], name[pre]]]
if <ast.BoolOp object at 0x7da2041da530> begin[:]
variable[allowed_versions] assign[=] call[name[_get_filtered_versions], parameter[name[ireq], name[versions], constant[True]]]
variable[candidates] assign[=] <ast.SetComp object at 0x7da20c6c4fd0>
return[name[candidates]] | keyword[def] identifier[find_all_matches] ( identifier[finder] , identifier[ireq] , identifier[pre] = keyword[False] ):
literal[string]
identifier[candidates] = identifier[clean_requires_python] ( identifier[finder] . identifier[find_all_candidates] ( identifier[ireq] . identifier[name] ))
identifier[versions] ={ identifier[candidate] . identifier[version] keyword[for] identifier[candidate] keyword[in] identifier[candidates] }
identifier[allowed_versions] = identifier[_get_filtered_versions] ( identifier[ireq] , identifier[versions] , identifier[pre] )
keyword[if] keyword[not] identifier[pre] keyword[and] keyword[not] identifier[allowed_versions] :
identifier[allowed_versions] = identifier[_get_filtered_versions] ( identifier[ireq] , identifier[versions] , keyword[True] )
identifier[candidates] ={ identifier[c] keyword[for] identifier[c] keyword[in] identifier[candidates] keyword[if] identifier[c] . identifier[version] keyword[in] identifier[allowed_versions] }
keyword[return] identifier[candidates] | def find_all_matches(finder, ireq, pre=False):
# type: (PackageFinder, InstallRequirement, bool) -> List[InstallationCandidate]
'Find all matching dependencies using the supplied finder and the\n given ireq.\n\n :param finder: A package finder for discovering matching candidates.\n :type finder: :class:`~pip._internal.index.PackageFinder`\n :param ireq: An install requirement.\n :type ireq: :class:`~pip._internal.req.req_install.InstallRequirement`\n :return: A list of matching candidates.\n :rtype: list[:class:`~pip._internal.index.InstallationCandidate`]\n '
candidates = clean_requires_python(finder.find_all_candidates(ireq.name))
versions = {candidate.version for candidate in candidates}
allowed_versions = _get_filtered_versions(ireq, versions, pre)
if not pre and (not allowed_versions):
allowed_versions = _get_filtered_versions(ireq, versions, True) # depends on [control=['if'], data=[]]
candidates = {c for c in candidates if c.version in allowed_versions}
return candidates |
def add_file(self,
asset_data,
label=None,
asset_type=None,
asset_content_type=None,
asset_content_record_types=None,
asset_name='',
asset_description=''):
"""stub"""
if asset_data is None:
raise NullArgument('asset_data cannot be None')
if not isinstance(asset_data, DataInputStream):
raise InvalidArgument('asset_data must be instance of DataInputStream')
if asset_type is not None and not isinstance(asset_type, Type):
raise InvalidArgument('asset_type must be an instance of Type')
if asset_content_type is not None and not isinstance(asset_content_type, Type):
raise InvalidArgument('asset_content_type must be an instance of Type')
if asset_content_record_types is not None and not isinstance(asset_content_record_types, list):
raise InvalidArgument('asset_content_record_types must be an instance of list')
if asset_content_record_types is not None:
for record_type in asset_content_record_types:
if not isinstance(record_type, Type):
raise InvalidArgument('non-Type present in asset_content_record_types')
if label is None:
label = self._label_metadata['default_string_values'][0]
else:
if not self.my_osid_object_form._is_valid_string(
label, self.get_label_metadata()) or '.' in label:
raise InvalidArgument('label')
asset_id, asset_content_id = self.create_asset(asset_data=asset_data,
asset_type=asset_type,
asset_content_type=asset_content_type,
asset_content_record_types=asset_content_record_types,
display_name=asset_name,
description=asset_description)
self.add_asset(asset_id,
asset_content_id,
label,
asset_content_type) | def function[add_file, parameter[self, asset_data, label, asset_type, asset_content_type, asset_content_record_types, asset_name, asset_description]]:
constant[stub]
if compare[name[asset_data] is constant[None]] begin[:]
<ast.Raise object at 0x7da204620940>
if <ast.UnaryOp object at 0x7da204622470> begin[:]
<ast.Raise object at 0x7da204620850>
if <ast.BoolOp object at 0x7da204622410> begin[:]
<ast.Raise object at 0x7da204620550>
if <ast.BoolOp object at 0x7da204621b10> begin[:]
<ast.Raise object at 0x7da2046215d0>
if <ast.BoolOp object at 0x7da204620eb0> begin[:]
<ast.Raise object at 0x7da204620e80>
if compare[name[asset_content_record_types] is_not constant[None]] begin[:]
for taget[name[record_type]] in starred[name[asset_content_record_types]] begin[:]
if <ast.UnaryOp object at 0x7da204623d30> begin[:]
<ast.Raise object at 0x7da204620ca0>
if compare[name[label] is constant[None]] begin[:]
variable[label] assign[=] call[call[name[self]._label_metadata][constant[default_string_values]]][constant[0]]
<ast.Tuple object at 0x7da204623700> assign[=] call[name[self].create_asset, parameter[]]
call[name[self].add_asset, parameter[name[asset_id], name[asset_content_id], name[label], name[asset_content_type]]] | keyword[def] identifier[add_file] ( identifier[self] ,
identifier[asset_data] ,
identifier[label] = keyword[None] ,
identifier[asset_type] = keyword[None] ,
identifier[asset_content_type] = keyword[None] ,
identifier[asset_content_record_types] = keyword[None] ,
identifier[asset_name] = literal[string] ,
identifier[asset_description] = literal[string] ):
literal[string]
keyword[if] identifier[asset_data] keyword[is] keyword[None] :
keyword[raise] identifier[NullArgument] ( literal[string] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[asset_data] , identifier[DataInputStream] ):
keyword[raise] identifier[InvalidArgument] ( literal[string] )
keyword[if] identifier[asset_type] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[isinstance] ( identifier[asset_type] , identifier[Type] ):
keyword[raise] identifier[InvalidArgument] ( literal[string] )
keyword[if] identifier[asset_content_type] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[isinstance] ( identifier[asset_content_type] , identifier[Type] ):
keyword[raise] identifier[InvalidArgument] ( literal[string] )
keyword[if] identifier[asset_content_record_types] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[isinstance] ( identifier[asset_content_record_types] , identifier[list] ):
keyword[raise] identifier[InvalidArgument] ( literal[string] )
keyword[if] identifier[asset_content_record_types] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[record_type] keyword[in] identifier[asset_content_record_types] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[record_type] , identifier[Type] ):
keyword[raise] identifier[InvalidArgument] ( literal[string] )
keyword[if] identifier[label] keyword[is] keyword[None] :
identifier[label] = identifier[self] . identifier[_label_metadata] [ literal[string] ][ literal[int] ]
keyword[else] :
keyword[if] keyword[not] identifier[self] . identifier[my_osid_object_form] . identifier[_is_valid_string] (
identifier[label] , identifier[self] . identifier[get_label_metadata] ()) keyword[or] literal[string] keyword[in] identifier[label] :
keyword[raise] identifier[InvalidArgument] ( literal[string] )
identifier[asset_id] , identifier[asset_content_id] = identifier[self] . identifier[create_asset] ( identifier[asset_data] = identifier[asset_data] ,
identifier[asset_type] = identifier[asset_type] ,
identifier[asset_content_type] = identifier[asset_content_type] ,
identifier[asset_content_record_types] = identifier[asset_content_record_types] ,
identifier[display_name] = identifier[asset_name] ,
identifier[description] = identifier[asset_description] )
identifier[self] . identifier[add_asset] ( identifier[asset_id] ,
identifier[asset_content_id] ,
identifier[label] ,
identifier[asset_content_type] ) | def add_file(self, asset_data, label=None, asset_type=None, asset_content_type=None, asset_content_record_types=None, asset_name='', asset_description=''):
"""stub"""
if asset_data is None:
raise NullArgument('asset_data cannot be None') # depends on [control=['if'], data=[]]
if not isinstance(asset_data, DataInputStream):
raise InvalidArgument('asset_data must be instance of DataInputStream') # depends on [control=['if'], data=[]]
if asset_type is not None and (not isinstance(asset_type, Type)):
raise InvalidArgument('asset_type must be an instance of Type') # depends on [control=['if'], data=[]]
if asset_content_type is not None and (not isinstance(asset_content_type, Type)):
raise InvalidArgument('asset_content_type must be an instance of Type') # depends on [control=['if'], data=[]]
if asset_content_record_types is not None and (not isinstance(asset_content_record_types, list)):
raise InvalidArgument('asset_content_record_types must be an instance of list') # depends on [control=['if'], data=[]]
if asset_content_record_types is not None:
for record_type in asset_content_record_types:
if not isinstance(record_type, Type):
raise InvalidArgument('non-Type present in asset_content_record_types') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['record_type']] # depends on [control=['if'], data=['asset_content_record_types']]
if label is None:
label = self._label_metadata['default_string_values'][0] # depends on [control=['if'], data=['label']]
elif not self.my_osid_object_form._is_valid_string(label, self.get_label_metadata()) or '.' in label:
raise InvalidArgument('label') # depends on [control=['if'], data=[]]
(asset_id, asset_content_id) = self.create_asset(asset_data=asset_data, asset_type=asset_type, asset_content_type=asset_content_type, asset_content_record_types=asset_content_record_types, display_name=asset_name, description=asset_description)
self.add_asset(asset_id, asset_content_id, label, asset_content_type) |
def _restart_on_unavailable(restart):
"""Restart iteration after :exc:`.ServiceUnavailable`.
:type restart: callable
:param restart: curried function returning iterator
"""
resume_token = b""
item_buffer = []
iterator = restart()
while True:
try:
for item in iterator:
item_buffer.append(item)
if item.resume_token:
resume_token = item.resume_token
break
except ServiceUnavailable:
del item_buffer[:]
iterator = restart(resume_token=resume_token)
continue
if len(item_buffer) == 0:
break
for item in item_buffer:
yield item
del item_buffer[:] | def function[_restart_on_unavailable, parameter[restart]]:
constant[Restart iteration after :exc:`.ServiceUnavailable`.
:type restart: callable
:param restart: curried function returning iterator
]
variable[resume_token] assign[=] constant[b'']
variable[item_buffer] assign[=] list[[]]
variable[iterator] assign[=] call[name[restart], parameter[]]
while constant[True] begin[:]
<ast.Try object at 0x7da18f00c640>
if compare[call[name[len], parameter[name[item_buffer]]] equal[==] constant[0]] begin[:]
break
for taget[name[item]] in starred[name[item_buffer]] begin[:]
<ast.Yield object at 0x7da20c6a9a50>
<ast.Delete object at 0x7da20c6ab8e0> | keyword[def] identifier[_restart_on_unavailable] ( identifier[restart] ):
literal[string]
identifier[resume_token] = literal[string]
identifier[item_buffer] =[]
identifier[iterator] = identifier[restart] ()
keyword[while] keyword[True] :
keyword[try] :
keyword[for] identifier[item] keyword[in] identifier[iterator] :
identifier[item_buffer] . identifier[append] ( identifier[item] )
keyword[if] identifier[item] . identifier[resume_token] :
identifier[resume_token] = identifier[item] . identifier[resume_token]
keyword[break]
keyword[except] identifier[ServiceUnavailable] :
keyword[del] identifier[item_buffer] [:]
identifier[iterator] = identifier[restart] ( identifier[resume_token] = identifier[resume_token] )
keyword[continue]
keyword[if] identifier[len] ( identifier[item_buffer] )== literal[int] :
keyword[break]
keyword[for] identifier[item] keyword[in] identifier[item_buffer] :
keyword[yield] identifier[item]
keyword[del] identifier[item_buffer] [:] | def _restart_on_unavailable(restart):
"""Restart iteration after :exc:`.ServiceUnavailable`.
:type restart: callable
:param restart: curried function returning iterator
"""
resume_token = b''
item_buffer = []
iterator = restart()
while True:
try:
for item in iterator:
item_buffer.append(item)
if item.resume_token:
resume_token = item.resume_token
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']] # depends on [control=['try'], data=[]]
except ServiceUnavailable:
del item_buffer[:]
iterator = restart(resume_token=resume_token)
continue # depends on [control=['except'], data=[]]
if len(item_buffer) == 0:
break # depends on [control=['if'], data=[]]
for item in item_buffer:
yield item # depends on [control=['for'], data=['item']]
del item_buffer[:] # depends on [control=['while'], data=[]] |
def configureWhere(self, where):
"""Configure the working directory or directories for the test run.
"""
from nose.importer import add_path
self.workingDir = None
where = tolist(where)
warned = False
for path in where:
if not self.workingDir:
abs_path = absdir(path)
if abs_path is None:
raise ValueError("Working directory %s not found, or "
"not a directory" % path)
log.info("Set working dir to %s", abs_path)
self.workingDir = abs_path
if self.addPaths and \
os.path.exists(os.path.join(abs_path, '__init__.py')):
log.info("Working directory %s is a package; "
"adding to sys.path" % abs_path)
add_path(abs_path)
continue
if not warned:
warn("Use of multiple -w arguments is deprecated and "
"support may be removed in a future release. You can "
"get the same behavior by passing directories without "
"the -w argument on the command line, or by using the "
"--tests argument in a configuration file.",
DeprecationWarning)
self.testNames.append(path) | def function[configureWhere, parameter[self, where]]:
constant[Configure the working directory or directories for the test run.
]
from relative_module[nose.importer] import module[add_path]
name[self].workingDir assign[=] constant[None]
variable[where] assign[=] call[name[tolist], parameter[name[where]]]
variable[warned] assign[=] constant[False]
for taget[name[path]] in starred[name[where]] begin[:]
if <ast.UnaryOp object at 0x7da18ede6920> begin[:]
variable[abs_path] assign[=] call[name[absdir], parameter[name[path]]]
if compare[name[abs_path] is constant[None]] begin[:]
<ast.Raise object at 0x7da18dc04670>
call[name[log].info, parameter[constant[Set working dir to %s], name[abs_path]]]
name[self].workingDir assign[=] name[abs_path]
if <ast.BoolOp object at 0x7da18dc07af0> begin[:]
call[name[log].info, parameter[binary_operation[constant[Working directory %s is a package; adding to sys.path] <ast.Mod object at 0x7da2590d6920> name[abs_path]]]]
call[name[add_path], parameter[name[abs_path]]]
continue
if <ast.UnaryOp object at 0x7da18dc04d00> begin[:]
call[name[warn], parameter[constant[Use of multiple -w arguments is deprecated and support may be removed in a future release. You can get the same behavior by passing directories without the -w argument on the command line, or by using the --tests argument in a configuration file.], name[DeprecationWarning]]]
call[name[self].testNames.append, parameter[name[path]]] | keyword[def] identifier[configureWhere] ( identifier[self] , identifier[where] ):
literal[string]
keyword[from] identifier[nose] . identifier[importer] keyword[import] identifier[add_path]
identifier[self] . identifier[workingDir] = keyword[None]
identifier[where] = identifier[tolist] ( identifier[where] )
identifier[warned] = keyword[False]
keyword[for] identifier[path] keyword[in] identifier[where] :
keyword[if] keyword[not] identifier[self] . identifier[workingDir] :
identifier[abs_path] = identifier[absdir] ( identifier[path] )
keyword[if] identifier[abs_path] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] % identifier[path] )
identifier[log] . identifier[info] ( literal[string] , identifier[abs_path] )
identifier[self] . identifier[workingDir] = identifier[abs_path]
keyword[if] identifier[self] . identifier[addPaths] keyword[and] identifier[os] . identifier[path] . identifier[exists] ( identifier[os] . identifier[path] . identifier[join] ( identifier[abs_path] , literal[string] )):
identifier[log] . identifier[info] ( literal[string]
literal[string] % identifier[abs_path] )
identifier[add_path] ( identifier[abs_path] )
keyword[continue]
keyword[if] keyword[not] identifier[warned] :
identifier[warn] ( literal[string]
literal[string]
literal[string]
literal[string]
literal[string] ,
identifier[DeprecationWarning] )
identifier[self] . identifier[testNames] . identifier[append] ( identifier[path] ) | def configureWhere(self, where):
"""Configure the working directory or directories for the test run.
"""
from nose.importer import add_path
self.workingDir = None
where = tolist(where)
warned = False
for path in where:
if not self.workingDir:
abs_path = absdir(path)
if abs_path is None:
raise ValueError('Working directory %s not found, or not a directory' % path) # depends on [control=['if'], data=[]]
log.info('Set working dir to %s', abs_path)
self.workingDir = abs_path
if self.addPaths and os.path.exists(os.path.join(abs_path, '__init__.py')):
log.info('Working directory %s is a package; adding to sys.path' % abs_path)
add_path(abs_path) # depends on [control=['if'], data=[]]
continue # depends on [control=['if'], data=[]]
if not warned:
warn('Use of multiple -w arguments is deprecated and support may be removed in a future release. You can get the same behavior by passing directories without the -w argument on the command line, or by using the --tests argument in a configuration file.', DeprecationWarning) # depends on [control=['if'], data=[]]
self.testNames.append(path) # depends on [control=['for'], data=['path']] |
def raw(self) -> str:
"""
Return signed raw format string of the Membership instance
:return:
"""
return """Version: {0}
Type: Membership
Currency: {1}
Issuer: {2}
Block: {3}
Membership: {4}
UserID: {5}
CertTS: {6}
""".format(self.version,
self.currency,
self.issuer,
self.membership_ts,
self.membership_type,
self.uid,
self.identity_ts) | def function[raw, parameter[self]]:
constant[
Return signed raw format string of the Membership instance
:return:
]
return[call[constant[Version: {0}
Type: Membership
Currency: {1}
Issuer: {2}
Block: {3}
Membership: {4}
UserID: {5}
CertTS: {6}
].format, parameter[name[self].version, name[self].currency, name[self].issuer, name[self].membership_ts, name[self].membership_type, name[self].uid, name[self].identity_ts]]] | keyword[def] identifier[raw] ( identifier[self] )-> identifier[str] :
literal[string]
keyword[return] literal[string] . identifier[format] ( identifier[self] . identifier[version] ,
identifier[self] . identifier[currency] ,
identifier[self] . identifier[issuer] ,
identifier[self] . identifier[membership_ts] ,
identifier[self] . identifier[membership_type] ,
identifier[self] . identifier[uid] ,
identifier[self] . identifier[identity_ts] ) | def raw(self) -> str:
"""
Return signed raw format string of the Membership instance
:return:
"""
return 'Version: {0}\nType: Membership\nCurrency: {1}\nIssuer: {2}\nBlock: {3}\nMembership: {4}\nUserID: {5}\nCertTS: {6}\n'.format(self.version, self.currency, self.issuer, self.membership_ts, self.membership_type, self.uid, self.identity_ts) |
def index(index, length):
"""Generates an index.
:param index: The index, can be positive or negative.
:param length: The length of the sequence to index.
:raises: IndexError
Negative indices are typically used to index a sequence in reverse order.
But to use them, the indexed object must convert them to the correct,
positive index. This function can be used to do this.
"""
if index < 0:
index += length
if 0 <= index < length:
return index
raise IndexError() | def function[index, parameter[index, length]]:
constant[Generates an index.
:param index: The index, can be positive or negative.
:param length: The length of the sequence to index.
:raises: IndexError
Negative indices are typically used to index a sequence in reverse order.
But to use them, the indexed object must convert them to the correct,
positive index. This function can be used to do this.
]
if compare[name[index] less[<] constant[0]] begin[:]
<ast.AugAssign object at 0x7da1b0b118d0>
if compare[constant[0] less_or_equal[<=] name[index]] begin[:]
return[name[index]]
<ast.Raise object at 0x7da1b0b11570> | keyword[def] identifier[index] ( identifier[index] , identifier[length] ):
literal[string]
keyword[if] identifier[index] < literal[int] :
identifier[index] += identifier[length]
keyword[if] literal[int] <= identifier[index] < identifier[length] :
keyword[return] identifier[index]
keyword[raise] identifier[IndexError] () | def index(index, length):
"""Generates an index.
:param index: The index, can be positive or negative.
:param length: The length of the sequence to index.
:raises: IndexError
Negative indices are typically used to index a sequence in reverse order.
But to use them, the indexed object must convert them to the correct,
positive index. This function can be used to do this.
"""
if index < 0:
index += length # depends on [control=['if'], data=['index']]
if 0 <= index < length:
return index # depends on [control=['if'], data=['index']]
raise IndexError() |
def next_page(self):
"""
Fetches next result set.
:return: Collection object.
"""
for link in self.links:
if link.rel.lower() == 'next':
return self._load(link.href)
raise PaginationError('No more entries.') | def function[next_page, parameter[self]]:
constant[
Fetches next result set.
:return: Collection object.
]
for taget[name[link]] in starred[name[self].links] begin[:]
if compare[call[name[link].rel.lower, parameter[]] equal[==] constant[next]] begin[:]
return[call[name[self]._load, parameter[name[link].href]]]
<ast.Raise object at 0x7da1b0591fc0> | keyword[def] identifier[next_page] ( identifier[self] ):
literal[string]
keyword[for] identifier[link] keyword[in] identifier[self] . identifier[links] :
keyword[if] identifier[link] . identifier[rel] . identifier[lower] ()== literal[string] :
keyword[return] identifier[self] . identifier[_load] ( identifier[link] . identifier[href] )
keyword[raise] identifier[PaginationError] ( literal[string] ) | def next_page(self):
"""
Fetches next result set.
:return: Collection object.
"""
for link in self.links:
if link.rel.lower() == 'next':
return self._load(link.href) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['link']]
raise PaginationError('No more entries.') |
def topic(self):
"""
| Comment: The id of the topic that the post belongs to
"""
if self.api and self.topic_id:
return self.api._get_topic(self.topic_id) | def function[topic, parameter[self]]:
constant[
| Comment: The id of the topic that the post belongs to
]
if <ast.BoolOp object at 0x7da20c7c8b80> begin[:]
return[call[name[self].api._get_topic, parameter[name[self].topic_id]]] | keyword[def] identifier[topic] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[api] keyword[and] identifier[self] . identifier[topic_id] :
keyword[return] identifier[self] . identifier[api] . identifier[_get_topic] ( identifier[self] . identifier[topic_id] ) | def topic(self):
"""
| Comment: The id of the topic that the post belongs to
"""
if self.api and self.topic_id:
return self.api._get_topic(self.topic_id) # depends on [control=['if'], data=[]] |
def extractdata(pattern, text=None, filepath=None):
"""
Read through an entire file or body of text one line at a time. Parse each line that matches the supplied
pattern string and ignore the rest.
If *text* is supplied, it will be parsed according to the *pattern* string.
If *text* is not supplied, the file at *filepath* will be opened and parsed.
"""
y = []
if text is None:
textsource = open(filepath, 'r')
else:
textsource = text.splitlines()
for line in textsource:
match = scanf(pattern, line)
if match:
if len(y) == 0:
y = [[s] for s in match]
else:
for i, ydata in enumerate(y):
ydata.append(match[i])
if text is None:
textsource.close()
return y | def function[extractdata, parameter[pattern, text, filepath]]:
constant[
Read through an entire file or body of text one line at a time. Parse each line that matches the supplied
pattern string and ignore the rest.
If *text* is supplied, it will be parsed according to the *pattern* string.
If *text* is not supplied, the file at *filepath* will be opened and parsed.
]
variable[y] assign[=] list[[]]
if compare[name[text] is constant[None]] begin[:]
variable[textsource] assign[=] call[name[open], parameter[name[filepath], constant[r]]]
for taget[name[line]] in starred[name[textsource]] begin[:]
variable[match] assign[=] call[name[scanf], parameter[name[pattern], name[line]]]
if name[match] begin[:]
if compare[call[name[len], parameter[name[y]]] equal[==] constant[0]] begin[:]
variable[y] assign[=] <ast.ListComp object at 0x7da20c992740>
if compare[name[text] is constant[None]] begin[:]
call[name[textsource].close, parameter[]]
return[name[y]] | keyword[def] identifier[extractdata] ( identifier[pattern] , identifier[text] = keyword[None] , identifier[filepath] = keyword[None] ):
literal[string]
identifier[y] =[]
keyword[if] identifier[text] keyword[is] keyword[None] :
identifier[textsource] = identifier[open] ( identifier[filepath] , literal[string] )
keyword[else] :
identifier[textsource] = identifier[text] . identifier[splitlines] ()
keyword[for] identifier[line] keyword[in] identifier[textsource] :
identifier[match] = identifier[scanf] ( identifier[pattern] , identifier[line] )
keyword[if] identifier[match] :
keyword[if] identifier[len] ( identifier[y] )== literal[int] :
identifier[y] =[[ identifier[s] ] keyword[for] identifier[s] keyword[in] identifier[match] ]
keyword[else] :
keyword[for] identifier[i] , identifier[ydata] keyword[in] identifier[enumerate] ( identifier[y] ):
identifier[ydata] . identifier[append] ( identifier[match] [ identifier[i] ])
keyword[if] identifier[text] keyword[is] keyword[None] :
identifier[textsource] . identifier[close] ()
keyword[return] identifier[y] | def extractdata(pattern, text=None, filepath=None):
"""
Read through an entire file or body of text one line at a time. Parse each line that matches the supplied
pattern string and ignore the rest.
If *text* is supplied, it will be parsed according to the *pattern* string.
If *text* is not supplied, the file at *filepath* will be opened and parsed.
"""
y = []
if text is None:
textsource = open(filepath, 'r') # depends on [control=['if'], data=[]]
else:
textsource = text.splitlines()
for line in textsource:
match = scanf(pattern, line)
if match:
if len(y) == 0:
y = [[s] for s in match] # depends on [control=['if'], data=[]]
else:
for (i, ydata) in enumerate(y):
ydata.append(match[i]) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']]
if text is None:
textsource.close() # depends on [control=['if'], data=[]]
return y |
def create_model(self):
"""Return :class:`NativeModel` fully loaded into memory."""
properties = {
'name': self.name,
'biomass': self.biomass_reaction,
'extracellular': self.extracellular_compartment,
'default_compartment': self.default_compartment,
'default_flux_limit': self.default_flux_limit
}
if self.context is not None:
git_version = util.git_try_describe(self.context.basepath)
properties['version_string'] = git_version
model = NativeModel(properties)
# Load compartments into model
compartment_iter, boundaries = self.parse_compartments()
for compartment in compartment_iter:
model.compartments.add_entry(compartment)
model.compartment_boundaries.update(boundaries)
# Load compounds into model
for compound in self.parse_compounds():
if compound.id in model.compounds:
existing_entry = model.compounds[compound.id]
common_props = set(compound.properties).intersection(
existing_entry.properties).difference({'id'})
if len(common_props) > 0:
logger.warning(
'Compound entry {} at {} overrides already defined'
' properties: {}'.format(
compound.id, compound.filemark, common_props))
properties = dict(compound.properties)
properties.update(existing_entry.properties)
compound = CompoundEntry(
properties, filemark=compound.filemark)
model.compounds.add_entry(compound)
# Load reactions into model
for reaction in self.parse_reactions():
if reaction.id in model.reactions:
existing_entry = model.reactions[reaction.id]
common_props = set(reaction.properties).intersection(
existing_entry.properties).difference({'id'})
if len(common_props) > 0:
logger.warning(
'Reaction entry {} at {} overrides already defined'
' properties: {}'.format(
reaction.id, reaction.filemark, common_props))
properties = dict(reaction.properties)
properties.update(existing_entry.properties)
reaction = ReactionEntry(
properties, filemark=reaction.filemark)
model.reactions.add_entry(reaction)
for exchange_def in self.parse_exchange():
model.exchange[exchange_def[0]] = exchange_def
for limit in self.parse_limits():
model.limits[limit[0]] = limit
if self.has_model_definition():
for model_reaction in self.parse_model():
model.model[model_reaction] = None
else:
for reaction in model.reactions:
model.model[reaction.id] = None
return model | def function[create_model, parameter[self]]:
constant[Return :class:`NativeModel` fully loaded into memory.]
variable[properties] assign[=] dictionary[[<ast.Constant object at 0x7da20c6c5420>, <ast.Constant object at 0x7da20c6c4e50>, <ast.Constant object at 0x7da20c6c4e80>, <ast.Constant object at 0x7da20c6c7b20>, <ast.Constant object at 0x7da20c6c6d70>], [<ast.Attribute object at 0x7da20c6c6920>, <ast.Attribute object at 0x7da20c6c7790>, <ast.Attribute object at 0x7da20c6c6830>, <ast.Attribute object at 0x7da20c6c6dd0>, <ast.Attribute object at 0x7da20c6c5000>]]
if compare[name[self].context is_not constant[None]] begin[:]
variable[git_version] assign[=] call[name[util].git_try_describe, parameter[name[self].context.basepath]]
call[name[properties]][constant[version_string]] assign[=] name[git_version]
variable[model] assign[=] call[name[NativeModel], parameter[name[properties]]]
<ast.Tuple object at 0x7da20c6c7670> assign[=] call[name[self].parse_compartments, parameter[]]
for taget[name[compartment]] in starred[name[compartment_iter]] begin[:]
call[name[model].compartments.add_entry, parameter[name[compartment]]]
call[name[model].compartment_boundaries.update, parameter[name[boundaries]]]
for taget[name[compound]] in starred[call[name[self].parse_compounds, parameter[]]] begin[:]
if compare[name[compound].id in name[model].compounds] begin[:]
variable[existing_entry] assign[=] call[name[model].compounds][name[compound].id]
variable[common_props] assign[=] call[call[call[name[set], parameter[name[compound].properties]].intersection, parameter[name[existing_entry].properties]].difference, parameter[<ast.Set object at 0x7da20c6c7040>]]
if compare[call[name[len], parameter[name[common_props]]] greater[>] constant[0]] begin[:]
call[name[logger].warning, parameter[call[constant[Compound entry {} at {} overrides already defined properties: {}].format, parameter[name[compound].id, name[compound].filemark, name[common_props]]]]]
variable[properties] assign[=] call[name[dict], parameter[name[compound].properties]]
call[name[properties].update, parameter[name[existing_entry].properties]]
variable[compound] assign[=] call[name[CompoundEntry], parameter[name[properties]]]
call[name[model].compounds.add_entry, parameter[name[compound]]]
for taget[name[reaction]] in starred[call[name[self].parse_reactions, parameter[]]] begin[:]
if compare[name[reaction].id in name[model].reactions] begin[:]
variable[existing_entry] assign[=] call[name[model].reactions][name[reaction].id]
variable[common_props] assign[=] call[call[call[name[set], parameter[name[reaction].properties]].intersection, parameter[name[existing_entry].properties]].difference, parameter[<ast.Set object at 0x7da20c6c6050>]]
if compare[call[name[len], parameter[name[common_props]]] greater[>] constant[0]] begin[:]
call[name[logger].warning, parameter[call[constant[Reaction entry {} at {} overrides already defined properties: {}].format, parameter[name[reaction].id, name[reaction].filemark, name[common_props]]]]]
variable[properties] assign[=] call[name[dict], parameter[name[reaction].properties]]
call[name[properties].update, parameter[name[existing_entry].properties]]
variable[reaction] assign[=] call[name[ReactionEntry], parameter[name[properties]]]
call[name[model].reactions.add_entry, parameter[name[reaction]]]
for taget[name[exchange_def]] in starred[call[name[self].parse_exchange, parameter[]]] begin[:]
call[name[model].exchange][call[name[exchange_def]][constant[0]]] assign[=] name[exchange_def]
for taget[name[limit]] in starred[call[name[self].parse_limits, parameter[]]] begin[:]
call[name[model].limits][call[name[limit]][constant[0]]] assign[=] name[limit]
if call[name[self].has_model_definition, parameter[]] begin[:]
for taget[name[model_reaction]] in starred[call[name[self].parse_model, parameter[]]] begin[:]
call[name[model].model][name[model_reaction]] assign[=] constant[None]
return[name[model]] | keyword[def] identifier[create_model] ( identifier[self] ):
literal[string]
identifier[properties] ={
literal[string] : identifier[self] . identifier[name] ,
literal[string] : identifier[self] . identifier[biomass_reaction] ,
literal[string] : identifier[self] . identifier[extracellular_compartment] ,
literal[string] : identifier[self] . identifier[default_compartment] ,
literal[string] : identifier[self] . identifier[default_flux_limit]
}
keyword[if] identifier[self] . identifier[context] keyword[is] keyword[not] keyword[None] :
identifier[git_version] = identifier[util] . identifier[git_try_describe] ( identifier[self] . identifier[context] . identifier[basepath] )
identifier[properties] [ literal[string] ]= identifier[git_version]
identifier[model] = identifier[NativeModel] ( identifier[properties] )
identifier[compartment_iter] , identifier[boundaries] = identifier[self] . identifier[parse_compartments] ()
keyword[for] identifier[compartment] keyword[in] identifier[compartment_iter] :
identifier[model] . identifier[compartments] . identifier[add_entry] ( identifier[compartment] )
identifier[model] . identifier[compartment_boundaries] . identifier[update] ( identifier[boundaries] )
keyword[for] identifier[compound] keyword[in] identifier[self] . identifier[parse_compounds] ():
keyword[if] identifier[compound] . identifier[id] keyword[in] identifier[model] . identifier[compounds] :
identifier[existing_entry] = identifier[model] . identifier[compounds] [ identifier[compound] . identifier[id] ]
identifier[common_props] = identifier[set] ( identifier[compound] . identifier[properties] ). identifier[intersection] (
identifier[existing_entry] . identifier[properties] ). identifier[difference] ({ literal[string] })
keyword[if] identifier[len] ( identifier[common_props] )> literal[int] :
identifier[logger] . identifier[warning] (
literal[string]
literal[string] . identifier[format] (
identifier[compound] . identifier[id] , identifier[compound] . identifier[filemark] , identifier[common_props] ))
identifier[properties] = identifier[dict] ( identifier[compound] . identifier[properties] )
identifier[properties] . identifier[update] ( identifier[existing_entry] . identifier[properties] )
identifier[compound] = identifier[CompoundEntry] (
identifier[properties] , identifier[filemark] = identifier[compound] . identifier[filemark] )
identifier[model] . identifier[compounds] . identifier[add_entry] ( identifier[compound] )
keyword[for] identifier[reaction] keyword[in] identifier[self] . identifier[parse_reactions] ():
keyword[if] identifier[reaction] . identifier[id] keyword[in] identifier[model] . identifier[reactions] :
identifier[existing_entry] = identifier[model] . identifier[reactions] [ identifier[reaction] . identifier[id] ]
identifier[common_props] = identifier[set] ( identifier[reaction] . identifier[properties] ). identifier[intersection] (
identifier[existing_entry] . identifier[properties] ). identifier[difference] ({ literal[string] })
keyword[if] identifier[len] ( identifier[common_props] )> literal[int] :
identifier[logger] . identifier[warning] (
literal[string]
literal[string] . identifier[format] (
identifier[reaction] . identifier[id] , identifier[reaction] . identifier[filemark] , identifier[common_props] ))
identifier[properties] = identifier[dict] ( identifier[reaction] . identifier[properties] )
identifier[properties] . identifier[update] ( identifier[existing_entry] . identifier[properties] )
identifier[reaction] = identifier[ReactionEntry] (
identifier[properties] , identifier[filemark] = identifier[reaction] . identifier[filemark] )
identifier[model] . identifier[reactions] . identifier[add_entry] ( identifier[reaction] )
keyword[for] identifier[exchange_def] keyword[in] identifier[self] . identifier[parse_exchange] ():
identifier[model] . identifier[exchange] [ identifier[exchange_def] [ literal[int] ]]= identifier[exchange_def]
keyword[for] identifier[limit] keyword[in] identifier[self] . identifier[parse_limits] ():
identifier[model] . identifier[limits] [ identifier[limit] [ literal[int] ]]= identifier[limit]
keyword[if] identifier[self] . identifier[has_model_definition] ():
keyword[for] identifier[model_reaction] keyword[in] identifier[self] . identifier[parse_model] ():
identifier[model] . identifier[model] [ identifier[model_reaction] ]= keyword[None]
keyword[else] :
keyword[for] identifier[reaction] keyword[in] identifier[model] . identifier[reactions] :
identifier[model] . identifier[model] [ identifier[reaction] . identifier[id] ]= keyword[None]
keyword[return] identifier[model] | def create_model(self):
"""Return :class:`NativeModel` fully loaded into memory."""
properties = {'name': self.name, 'biomass': self.biomass_reaction, 'extracellular': self.extracellular_compartment, 'default_compartment': self.default_compartment, 'default_flux_limit': self.default_flux_limit}
if self.context is not None:
git_version = util.git_try_describe(self.context.basepath)
properties['version_string'] = git_version # depends on [control=['if'], data=[]]
model = NativeModel(properties)
# Load compartments into model
(compartment_iter, boundaries) = self.parse_compartments()
for compartment in compartment_iter:
model.compartments.add_entry(compartment) # depends on [control=['for'], data=['compartment']]
model.compartment_boundaries.update(boundaries)
# Load compounds into model
for compound in self.parse_compounds():
if compound.id in model.compounds:
existing_entry = model.compounds[compound.id]
common_props = set(compound.properties).intersection(existing_entry.properties).difference({'id'})
if len(common_props) > 0:
logger.warning('Compound entry {} at {} overrides already defined properties: {}'.format(compound.id, compound.filemark, common_props)) # depends on [control=['if'], data=[]]
properties = dict(compound.properties)
properties.update(existing_entry.properties)
compound = CompoundEntry(properties, filemark=compound.filemark) # depends on [control=['if'], data=[]]
model.compounds.add_entry(compound) # depends on [control=['for'], data=['compound']]
# Load reactions into model
for reaction in self.parse_reactions():
if reaction.id in model.reactions:
existing_entry = model.reactions[reaction.id]
common_props = set(reaction.properties).intersection(existing_entry.properties).difference({'id'})
if len(common_props) > 0:
logger.warning('Reaction entry {} at {} overrides already defined properties: {}'.format(reaction.id, reaction.filemark, common_props)) # depends on [control=['if'], data=[]]
properties = dict(reaction.properties)
properties.update(existing_entry.properties)
reaction = ReactionEntry(properties, filemark=reaction.filemark) # depends on [control=['if'], data=[]]
model.reactions.add_entry(reaction) # depends on [control=['for'], data=['reaction']]
for exchange_def in self.parse_exchange():
model.exchange[exchange_def[0]] = exchange_def # depends on [control=['for'], data=['exchange_def']]
for limit in self.parse_limits():
model.limits[limit[0]] = limit # depends on [control=['for'], data=['limit']]
if self.has_model_definition():
for model_reaction in self.parse_model():
model.model[model_reaction] = None # depends on [control=['for'], data=['model_reaction']] # depends on [control=['if'], data=[]]
else:
for reaction in model.reactions:
model.model[reaction.id] = None # depends on [control=['for'], data=['reaction']]
return model |
def dimensions(self, selection='all', label=False):
"""Lists the available dimensions on the object
Provides convenient access to Dimensions on nested Dimensioned
objects. Dimensions can be selected by their type, i.e. 'key'
or 'value' dimensions. By default 'all' dimensions are
returned.
Args:
selection: Type of dimensions to return
The type of dimension, i.e. one of 'key', 'value',
'constant' or 'all'.
label: Whether to return the name, label or Dimension
Whether to return the Dimension objects (False),
the Dimension names (True/'name') or labels ('label').
Returns:
List of Dimension objects or their names or labels
"""
if label in ['name', True]:
label = 'short'
elif label == 'label':
label = 'long'
elif label:
raise ValueError("label needs to be one of True, False, 'name' or 'label'")
lambdas = {'k': (lambda x: x.kdims, {'full_breadth': False}),
'v': (lambda x: x.vdims, {}),
'c': (lambda x: x.cdims, {})}
aliases = {'key': 'k', 'value': 'v', 'constant': 'c'}
if selection in ['all', 'ranges']:
groups = [d for d in self._dim_groups if d != 'cdims']
dims = [dim for group in groups
for dim in getattr(self, group)]
elif isinstance(selection, list):
dims = [dim for group in selection
for dim in getattr(self, '%sdims' % aliases.get(group))]
elif aliases.get(selection) in lambdas:
selection = aliases.get(selection, selection)
lmbd, kwargs = lambdas[selection]
key_traversal = self.traverse(lmbd, **kwargs)
dims = [dim for keydims in key_traversal for dim in keydims]
else:
raise KeyError("Invalid selection %r, valid selections include"
"'all', 'value' and 'key' dimensions" % repr(selection))
return [(dim.label if label == 'long' else dim.name)
if label else dim for dim in dims] | def function[dimensions, parameter[self, selection, label]]:
constant[Lists the available dimensions on the object
Provides convenient access to Dimensions on nested Dimensioned
objects. Dimensions can be selected by their type, i.e. 'key'
or 'value' dimensions. By default 'all' dimensions are
returned.
Args:
selection: Type of dimensions to return
The type of dimension, i.e. one of 'key', 'value',
'constant' or 'all'.
label: Whether to return the name, label or Dimension
Whether to return the Dimension objects (False),
the Dimension names (True/'name') or labels ('label').
Returns:
List of Dimension objects or their names or labels
]
if compare[name[label] in list[[<ast.Constant object at 0x7da1b1ca3c70>, <ast.Constant object at 0x7da1b1ca3ca0>]]] begin[:]
variable[label] assign[=] constant[short]
variable[lambdas] assign[=] dictionary[[<ast.Constant object at 0x7da1b1ca0550>, <ast.Constant object at 0x7da1b1ca04f0>, <ast.Constant object at 0x7da1b1ca04c0>], [<ast.Tuple object at 0x7da1b1ca0520>, <ast.Tuple object at 0x7da1b1ca08b0>, <ast.Tuple object at 0x7da1b1ca0a00>]]
variable[aliases] assign[=] dictionary[[<ast.Constant object at 0x7da1b1ca0430>, <ast.Constant object at 0x7da1b1ca0400>, <ast.Constant object at 0x7da1b1ca0460>], [<ast.Constant object at 0x7da1b1ca03a0>, <ast.Constant object at 0x7da1b1ca0340>, <ast.Constant object at 0x7da1b1ca0310>]]
if compare[name[selection] in list[[<ast.Constant object at 0x7da1b1ca00a0>, <ast.Constant object at 0x7da1b1ca01c0>]]] begin[:]
variable[groups] assign[=] <ast.ListComp object at 0x7da1b1ca00d0>
variable[dims] assign[=] <ast.ListComp object at 0x7da1b1ca0dc0>
return[<ast.ListComp object at 0x7da1b1ca1840>] | keyword[def] identifier[dimensions] ( identifier[self] , identifier[selection] = literal[string] , identifier[label] = keyword[False] ):
literal[string]
keyword[if] identifier[label] keyword[in] [ literal[string] , keyword[True] ]:
identifier[label] = literal[string]
keyword[elif] identifier[label] == literal[string] :
identifier[label] = literal[string]
keyword[elif] identifier[label] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[lambdas] ={ literal[string] :( keyword[lambda] identifier[x] : identifier[x] . identifier[kdims] ,{ literal[string] : keyword[False] }),
literal[string] :( keyword[lambda] identifier[x] : identifier[x] . identifier[vdims] ,{}),
literal[string] :( keyword[lambda] identifier[x] : identifier[x] . identifier[cdims] ,{})}
identifier[aliases] ={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] }
keyword[if] identifier[selection] keyword[in] [ literal[string] , literal[string] ]:
identifier[groups] =[ identifier[d] keyword[for] identifier[d] keyword[in] identifier[self] . identifier[_dim_groups] keyword[if] identifier[d] != literal[string] ]
identifier[dims] =[ identifier[dim] keyword[for] identifier[group] keyword[in] identifier[groups]
keyword[for] identifier[dim] keyword[in] identifier[getattr] ( identifier[self] , identifier[group] )]
keyword[elif] identifier[isinstance] ( identifier[selection] , identifier[list] ):
identifier[dims] =[ identifier[dim] keyword[for] identifier[group] keyword[in] identifier[selection]
keyword[for] identifier[dim] keyword[in] identifier[getattr] ( identifier[self] , literal[string] % identifier[aliases] . identifier[get] ( identifier[group] ))]
keyword[elif] identifier[aliases] . identifier[get] ( identifier[selection] ) keyword[in] identifier[lambdas] :
identifier[selection] = identifier[aliases] . identifier[get] ( identifier[selection] , identifier[selection] )
identifier[lmbd] , identifier[kwargs] = identifier[lambdas] [ identifier[selection] ]
identifier[key_traversal] = identifier[self] . identifier[traverse] ( identifier[lmbd] ,** identifier[kwargs] )
identifier[dims] =[ identifier[dim] keyword[for] identifier[keydims] keyword[in] identifier[key_traversal] keyword[for] identifier[dim] keyword[in] identifier[keydims] ]
keyword[else] :
keyword[raise] identifier[KeyError] ( literal[string]
literal[string] % identifier[repr] ( identifier[selection] ))
keyword[return] [( identifier[dim] . identifier[label] keyword[if] identifier[label] == literal[string] keyword[else] identifier[dim] . identifier[name] )
keyword[if] identifier[label] keyword[else] identifier[dim] keyword[for] identifier[dim] keyword[in] identifier[dims] ] | def dimensions(self, selection='all', label=False):
"""Lists the available dimensions on the object
Provides convenient access to Dimensions on nested Dimensioned
objects. Dimensions can be selected by their type, i.e. 'key'
or 'value' dimensions. By default 'all' dimensions are
returned.
Args:
selection: Type of dimensions to return
The type of dimension, i.e. one of 'key', 'value',
'constant' or 'all'.
label: Whether to return the name, label or Dimension
Whether to return the Dimension objects (False),
the Dimension names (True/'name') or labels ('label').
Returns:
List of Dimension objects or their names or labels
"""
if label in ['name', True]:
label = 'short' # depends on [control=['if'], data=['label']]
elif label == 'label':
label = 'long' # depends on [control=['if'], data=['label']]
elif label:
raise ValueError("label needs to be one of True, False, 'name' or 'label'") # depends on [control=['if'], data=[]]
lambdas = {'k': (lambda x: x.kdims, {'full_breadth': False}), 'v': (lambda x: x.vdims, {}), 'c': (lambda x: x.cdims, {})}
aliases = {'key': 'k', 'value': 'v', 'constant': 'c'}
if selection in ['all', 'ranges']:
groups = [d for d in self._dim_groups if d != 'cdims']
dims = [dim for group in groups for dim in getattr(self, group)] # depends on [control=['if'], data=[]]
elif isinstance(selection, list):
dims = [dim for group in selection for dim in getattr(self, '%sdims' % aliases.get(group))] # depends on [control=['if'], data=[]]
elif aliases.get(selection) in lambdas:
selection = aliases.get(selection, selection)
(lmbd, kwargs) = lambdas[selection]
key_traversal = self.traverse(lmbd, **kwargs)
dims = [dim for keydims in key_traversal for dim in keydims] # depends on [control=['if'], data=['lambdas']]
else:
raise KeyError("Invalid selection %r, valid selections include'all', 'value' and 'key' dimensions" % repr(selection))
return [(dim.label if label == 'long' else dim.name) if label else dim for dim in dims] |
def _capture_snapshot(a_snapshot: Snapshot, resolved_kwargs: Mapping[str, Any]) -> Any:
"""
Capture the snapshot from the keyword arguments resolved before the function call (including the default values).
:param a_snapshot: snapshot to be captured
:param resolved_kwargs: resolved keyword arguments (including the default values)
:return: captured value
"""
if a_snapshot.arg is not None:
if a_snapshot.arg not in resolved_kwargs:
raise TypeError(("The argument of the snapshot has not been set: {}. "
"Does the original function define it? Did you supply it in the call?").format(
a_snapshot.arg))
value = a_snapshot.capture(**{a_snapshot.arg: resolved_kwargs[a_snapshot.arg]})
else:
value = a_snapshot.capture()
return value | def function[_capture_snapshot, parameter[a_snapshot, resolved_kwargs]]:
constant[
Capture the snapshot from the keyword arguments resolved before the function call (including the default values).
:param a_snapshot: snapshot to be captured
:param resolved_kwargs: resolved keyword arguments (including the default values)
:return: captured value
]
if compare[name[a_snapshot].arg is_not constant[None]] begin[:]
if compare[name[a_snapshot].arg <ast.NotIn object at 0x7da2590d7190> name[resolved_kwargs]] begin[:]
<ast.Raise object at 0x7da1b1024250>
variable[value] assign[=] call[name[a_snapshot].capture, parameter[]]
return[name[value]] | keyword[def] identifier[_capture_snapshot] ( identifier[a_snapshot] : identifier[Snapshot] , identifier[resolved_kwargs] : identifier[Mapping] [ identifier[str] , identifier[Any] ])-> identifier[Any] :
literal[string]
keyword[if] identifier[a_snapshot] . identifier[arg] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[a_snapshot] . identifier[arg] keyword[not] keyword[in] identifier[resolved_kwargs] :
keyword[raise] identifier[TypeError] (( literal[string]
literal[string] ). identifier[format] (
identifier[a_snapshot] . identifier[arg] ))
identifier[value] = identifier[a_snapshot] . identifier[capture] (**{ identifier[a_snapshot] . identifier[arg] : identifier[resolved_kwargs] [ identifier[a_snapshot] . identifier[arg] ]})
keyword[else] :
identifier[value] = identifier[a_snapshot] . identifier[capture] ()
keyword[return] identifier[value] | def _capture_snapshot(a_snapshot: Snapshot, resolved_kwargs: Mapping[str, Any]) -> Any:
"""
Capture the snapshot from the keyword arguments resolved before the function call (including the default values).
:param a_snapshot: snapshot to be captured
:param resolved_kwargs: resolved keyword arguments (including the default values)
:return: captured value
"""
if a_snapshot.arg is not None:
if a_snapshot.arg not in resolved_kwargs:
raise TypeError('The argument of the snapshot has not been set: {}. Does the original function define it? Did you supply it in the call?'.format(a_snapshot.arg)) # depends on [control=['if'], data=[]]
value = a_snapshot.capture(**{a_snapshot.arg: resolved_kwargs[a_snapshot.arg]}) # depends on [control=['if'], data=[]]
else:
value = a_snapshot.capture()
return value |
def _reduce_helper(input_shape,
output_shape,
input_tensor_layout,
reduction_fn_string="SUM"):
"""Returns slicewise function and reduced mesh dimensions.
Args:
input_shape: a Shape
output_shape: a Shape
input_tensor_layout: a TensorLayout
reduction_fn_string: "SUM" or "MAX"
Returns:
reduce_slice_fn: a function from tf.Tensor to tf.Tensor
reduced_mesh_axes: a list of integers
"""
reduce_dims_indices = [
i for i, d in enumerate(input_shape.dims) if d not in output_shape.dims]
reduced_input_shape = Shape([
d for d in input_shape.dims if d in output_shape.dims])
perm = [reduced_input_shape.dims.index(d) for d in output_shape.dims]
def reduce_slice_fn(xslice):
ret = xslice
if reduce_dims_indices:
ret = reduction_fn(reduction_fn_string)(xslice, reduce_dims_indices)
if perm != list(xrange(len(perm))):
ret = tf.transpose(ret, perm)
return ret
reduced_mesh_axes = []
for i in reduce_dims_indices:
mesh_axis = input_tensor_layout[i]
if mesh_axis is not None:
reduced_mesh_axes.append(mesh_axis)
return reduce_slice_fn, reduced_mesh_axes | def function[_reduce_helper, parameter[input_shape, output_shape, input_tensor_layout, reduction_fn_string]]:
constant[Returns slicewise function and reduced mesh dimensions.
Args:
input_shape: a Shape
output_shape: a Shape
input_tensor_layout: a TensorLayout
reduction_fn_string: "SUM" or "MAX"
Returns:
reduce_slice_fn: a function from tf.Tensor to tf.Tensor
reduced_mesh_axes: a list of integers
]
variable[reduce_dims_indices] assign[=] <ast.ListComp object at 0x7da20c9906a0>
variable[reduced_input_shape] assign[=] call[name[Shape], parameter[<ast.ListComp object at 0x7da204567d00>]]
variable[perm] assign[=] <ast.ListComp object at 0x7da204566a70>
def function[reduce_slice_fn, parameter[xslice]]:
variable[ret] assign[=] name[xslice]
if name[reduce_dims_indices] begin[:]
variable[ret] assign[=] call[call[name[reduction_fn], parameter[name[reduction_fn_string]]], parameter[name[xslice], name[reduce_dims_indices]]]
if compare[name[perm] not_equal[!=] call[name[list], parameter[call[name[xrange], parameter[call[name[len], parameter[name[perm]]]]]]]] begin[:]
variable[ret] assign[=] call[name[tf].transpose, parameter[name[ret], name[perm]]]
return[name[ret]]
variable[reduced_mesh_axes] assign[=] list[[]]
for taget[name[i]] in starred[name[reduce_dims_indices]] begin[:]
variable[mesh_axis] assign[=] call[name[input_tensor_layout]][name[i]]
if compare[name[mesh_axis] is_not constant[None]] begin[:]
call[name[reduced_mesh_axes].append, parameter[name[mesh_axis]]]
return[tuple[[<ast.Name object at 0x7da204566440>, <ast.Name object at 0x7da204567eb0>]]] | keyword[def] identifier[_reduce_helper] ( identifier[input_shape] ,
identifier[output_shape] ,
identifier[input_tensor_layout] ,
identifier[reduction_fn_string] = literal[string] ):
literal[string]
identifier[reduce_dims_indices] =[
identifier[i] keyword[for] identifier[i] , identifier[d] keyword[in] identifier[enumerate] ( identifier[input_shape] . identifier[dims] ) keyword[if] identifier[d] keyword[not] keyword[in] identifier[output_shape] . identifier[dims] ]
identifier[reduced_input_shape] = identifier[Shape] ([
identifier[d] keyword[for] identifier[d] keyword[in] identifier[input_shape] . identifier[dims] keyword[if] identifier[d] keyword[in] identifier[output_shape] . identifier[dims] ])
identifier[perm] =[ identifier[reduced_input_shape] . identifier[dims] . identifier[index] ( identifier[d] ) keyword[for] identifier[d] keyword[in] identifier[output_shape] . identifier[dims] ]
keyword[def] identifier[reduce_slice_fn] ( identifier[xslice] ):
identifier[ret] = identifier[xslice]
keyword[if] identifier[reduce_dims_indices] :
identifier[ret] = identifier[reduction_fn] ( identifier[reduction_fn_string] )( identifier[xslice] , identifier[reduce_dims_indices] )
keyword[if] identifier[perm] != identifier[list] ( identifier[xrange] ( identifier[len] ( identifier[perm] ))):
identifier[ret] = identifier[tf] . identifier[transpose] ( identifier[ret] , identifier[perm] )
keyword[return] identifier[ret]
identifier[reduced_mesh_axes] =[]
keyword[for] identifier[i] keyword[in] identifier[reduce_dims_indices] :
identifier[mesh_axis] = identifier[input_tensor_layout] [ identifier[i] ]
keyword[if] identifier[mesh_axis] keyword[is] keyword[not] keyword[None] :
identifier[reduced_mesh_axes] . identifier[append] ( identifier[mesh_axis] )
keyword[return] identifier[reduce_slice_fn] , identifier[reduced_mesh_axes] | def _reduce_helper(input_shape, output_shape, input_tensor_layout, reduction_fn_string='SUM'):
"""Returns slicewise function and reduced mesh dimensions.
Args:
input_shape: a Shape
output_shape: a Shape
input_tensor_layout: a TensorLayout
reduction_fn_string: "SUM" or "MAX"
Returns:
reduce_slice_fn: a function from tf.Tensor to tf.Tensor
reduced_mesh_axes: a list of integers
"""
reduce_dims_indices = [i for (i, d) in enumerate(input_shape.dims) if d not in output_shape.dims]
reduced_input_shape = Shape([d for d in input_shape.dims if d in output_shape.dims])
perm = [reduced_input_shape.dims.index(d) for d in output_shape.dims]
def reduce_slice_fn(xslice):
ret = xslice
if reduce_dims_indices:
ret = reduction_fn(reduction_fn_string)(xslice, reduce_dims_indices) # depends on [control=['if'], data=[]]
if perm != list(xrange(len(perm))):
ret = tf.transpose(ret, perm) # depends on [control=['if'], data=['perm']]
return ret
reduced_mesh_axes = []
for i in reduce_dims_indices:
mesh_axis = input_tensor_layout[i]
if mesh_axis is not None:
reduced_mesh_axes.append(mesh_axis) # depends on [control=['if'], data=['mesh_axis']] # depends on [control=['for'], data=['i']]
return (reduce_slice_fn, reduced_mesh_axes) |
def splitarg(args):
'''
This function will split arguments separated by spaces or commas
to be backwards compatible with the original ArcGet command line tool
'''
if not args:
return args
split = list()
for arg in args:
if ',' in arg:
split.extend([x for x in arg.split(',') if x])
elif arg:
split.append(arg)
return split | def function[splitarg, parameter[args]]:
constant[
This function will split arguments separated by spaces or commas
to be backwards compatible with the original ArcGet command line tool
]
if <ast.UnaryOp object at 0x7da18fe915a0> begin[:]
return[name[args]]
variable[split] assign[=] call[name[list], parameter[]]
for taget[name[arg]] in starred[name[args]] begin[:]
if compare[constant[,] in name[arg]] begin[:]
call[name[split].extend, parameter[<ast.ListComp object at 0x7da18fe92470>]]
return[name[split]] | keyword[def] identifier[splitarg] ( identifier[args] ):
literal[string]
keyword[if] keyword[not] identifier[args] :
keyword[return] identifier[args]
identifier[split] = identifier[list] ()
keyword[for] identifier[arg] keyword[in] identifier[args] :
keyword[if] literal[string] keyword[in] identifier[arg] :
identifier[split] . identifier[extend] ([ identifier[x] keyword[for] identifier[x] keyword[in] identifier[arg] . identifier[split] ( literal[string] ) keyword[if] identifier[x] ])
keyword[elif] identifier[arg] :
identifier[split] . identifier[append] ( identifier[arg] )
keyword[return] identifier[split] | def splitarg(args):
"""
This function will split arguments separated by spaces or commas
to be backwards compatible with the original ArcGet command line tool
"""
if not args:
return args # depends on [control=['if'], data=[]]
split = list()
for arg in args:
if ',' in arg:
split.extend([x for x in arg.split(',') if x]) # depends on [control=['if'], data=['arg']]
elif arg:
split.append(arg) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['arg']]
return split |
def _get_data_files(data_specs, existing):
"""Expand data file specs into valid data files metadata.
Parameters
----------
data_specs: list of tuples
See [createcmdclass] for description.
existing: list of tuples
The existing distribution data_files metadata.
Returns
-------
A valid list of data_files items.
"""
# Extract the existing data files into a staging object.
file_data = defaultdict(list)
for (path, files) in existing or []:
file_data[path] = files
# Extract the files and assign them to the proper data
# files path.
for (path, dname, pattern) in data_specs or []:
dname = dname.replace(os.sep, '/')
offset = len(dname) + 1
files = _get_files(pjoin(dname, pattern))
for fname in files:
# Normalize the path.
root = os.path.dirname(fname)
full_path = '/'.join([path, root[offset:]])
if full_path.endswith('/'):
full_path = full_path[:-1]
file_data[full_path].append(fname)
# Construct the data files spec.
data_files = []
for (path, files) in file_data.items():
data_files.append((path, files))
return data_files | def function[_get_data_files, parameter[data_specs, existing]]:
constant[Expand data file specs into valid data files metadata.
Parameters
----------
data_specs: list of tuples
See [createcmdclass] for description.
existing: list of tuples
The existing distribution data_files metadata.
Returns
-------
A valid list of data_files items.
]
variable[file_data] assign[=] call[name[defaultdict], parameter[name[list]]]
for taget[tuple[[<ast.Name object at 0x7da20c990d00>, <ast.Name object at 0x7da20c992560>]]] in starred[<ast.BoolOp object at 0x7da20c9908b0>] begin[:]
call[name[file_data]][name[path]] assign[=] name[files]
for taget[tuple[[<ast.Name object at 0x7da20c990fd0>, <ast.Name object at 0x7da20c991c30>, <ast.Name object at 0x7da20c993610>]]] in starred[<ast.BoolOp object at 0x7da20c992500>] begin[:]
variable[dname] assign[=] call[name[dname].replace, parameter[name[os].sep, constant[/]]]
variable[offset] assign[=] binary_operation[call[name[len], parameter[name[dname]]] + constant[1]]
variable[files] assign[=] call[name[_get_files], parameter[call[name[pjoin], parameter[name[dname], name[pattern]]]]]
for taget[name[fname]] in starred[name[files]] begin[:]
variable[root] assign[=] call[name[os].path.dirname, parameter[name[fname]]]
variable[full_path] assign[=] call[constant[/].join, parameter[list[[<ast.Name object at 0x7da18c4cc190>, <ast.Subscript object at 0x7da18c4cd720>]]]]
if call[name[full_path].endswith, parameter[constant[/]]] begin[:]
variable[full_path] assign[=] call[name[full_path]][<ast.Slice object at 0x7da18c4cd030>]
call[call[name[file_data]][name[full_path]].append, parameter[name[fname]]]
variable[data_files] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da18c4cf940>, <ast.Name object at 0x7da18c4ce6e0>]]] in starred[call[name[file_data].items, parameter[]]] begin[:]
call[name[data_files].append, parameter[tuple[[<ast.Name object at 0x7da18c4cfa90>, <ast.Name object at 0x7da18c4ced10>]]]]
return[name[data_files]] | keyword[def] identifier[_get_data_files] ( identifier[data_specs] , identifier[existing] ):
literal[string]
identifier[file_data] = identifier[defaultdict] ( identifier[list] )
keyword[for] ( identifier[path] , identifier[files] ) keyword[in] identifier[existing] keyword[or] []:
identifier[file_data] [ identifier[path] ]= identifier[files]
keyword[for] ( identifier[path] , identifier[dname] , identifier[pattern] ) keyword[in] identifier[data_specs] keyword[or] []:
identifier[dname] = identifier[dname] . identifier[replace] ( identifier[os] . identifier[sep] , literal[string] )
identifier[offset] = identifier[len] ( identifier[dname] )+ literal[int]
identifier[files] = identifier[_get_files] ( identifier[pjoin] ( identifier[dname] , identifier[pattern] ))
keyword[for] identifier[fname] keyword[in] identifier[files] :
identifier[root] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[fname] )
identifier[full_path] = literal[string] . identifier[join] ([ identifier[path] , identifier[root] [ identifier[offset] :]])
keyword[if] identifier[full_path] . identifier[endswith] ( literal[string] ):
identifier[full_path] = identifier[full_path] [:- literal[int] ]
identifier[file_data] [ identifier[full_path] ]. identifier[append] ( identifier[fname] )
identifier[data_files] =[]
keyword[for] ( identifier[path] , identifier[files] ) keyword[in] identifier[file_data] . identifier[items] ():
identifier[data_files] . identifier[append] (( identifier[path] , identifier[files] ))
keyword[return] identifier[data_files] | def _get_data_files(data_specs, existing):
"""Expand data file specs into valid data files metadata.
Parameters
----------
data_specs: list of tuples
See [createcmdclass] for description.
existing: list of tuples
The existing distribution data_files metadata.
Returns
-------
A valid list of data_files items.
"""
# Extract the existing data files into a staging object.
file_data = defaultdict(list)
for (path, files) in existing or []:
file_data[path] = files # depends on [control=['for'], data=[]]
# Extract the files and assign them to the proper data
# files path.
for (path, dname, pattern) in data_specs or []:
dname = dname.replace(os.sep, '/')
offset = len(dname) + 1
files = _get_files(pjoin(dname, pattern))
for fname in files:
# Normalize the path.
root = os.path.dirname(fname)
full_path = '/'.join([path, root[offset:]])
if full_path.endswith('/'):
full_path = full_path[:-1] # depends on [control=['if'], data=[]]
file_data[full_path].append(fname) # depends on [control=['for'], data=['fname']] # depends on [control=['for'], data=[]]
# Construct the data files spec.
data_files = []
for (path, files) in file_data.items():
data_files.append((path, files)) # depends on [control=['for'], data=[]]
return data_files |
def client_receives_message(self, *parameters):
"""Receive a message with template defined using `New Message` and
validate field values.
Message template has to be defined with `New Message` before calling
this.
Optional parameters:
- `name` the client name (default is the latest used) example: `name=Client 1`
- `timeout` for receiving message. example: `timeout=0.1`
- `latest` if set to True, get latest message from buffer instead first. Default is False. Example: `latest=True`
- message field values for validation separated with colon. example: `some_field:0xaf05`
Examples:
| ${msg} = | Client receives message |
| ${msg} = | Client receives message | name=Client1 | timeout=5 |
| ${msg} = | Client receives message | message_field:(0|1) |
"""
with self._receive(self._clients, *parameters) as (msg, message_fields, header_fields):
self._validate_message(msg, message_fields, header_fields)
return msg | def function[client_receives_message, parameter[self]]:
constant[Receive a message with template defined using `New Message` and
validate field values.
Message template has to be defined with `New Message` before calling
this.
Optional parameters:
- `name` the client name (default is the latest used) example: `name=Client 1`
- `timeout` for receiving message. example: `timeout=0.1`
- `latest` if set to True, get latest message from buffer instead first. Default is False. Example: `latest=True`
- message field values for validation separated with colon. example: `some_field:0xaf05`
Examples:
| ${msg} = | Client receives message |
| ${msg} = | Client receives message | name=Client1 | timeout=5 |
| ${msg} = | Client receives message | message_field:(0|1) |
]
with call[name[self]._receive, parameter[name[self]._clients, <ast.Starred object at 0x7da20c6e5780>]] begin[:]
call[name[self]._validate_message, parameter[name[msg], name[message_fields], name[header_fields]]]
return[name[msg]] | keyword[def] identifier[client_receives_message] ( identifier[self] ,* identifier[parameters] ):
literal[string]
keyword[with] identifier[self] . identifier[_receive] ( identifier[self] . identifier[_clients] ,* identifier[parameters] ) keyword[as] ( identifier[msg] , identifier[message_fields] , identifier[header_fields] ):
identifier[self] . identifier[_validate_message] ( identifier[msg] , identifier[message_fields] , identifier[header_fields] )
keyword[return] identifier[msg] | def client_receives_message(self, *parameters):
"""Receive a message with template defined using `New Message` and
validate field values.
Message template has to be defined with `New Message` before calling
this.
Optional parameters:
- `name` the client name (default is the latest used) example: `name=Client 1`
- `timeout` for receiving message. example: `timeout=0.1`
- `latest` if set to True, get latest message from buffer instead first. Default is False. Example: `latest=True`
- message field values for validation separated with colon. example: `some_field:0xaf05`
Examples:
| ${msg} = | Client receives message |
| ${msg} = | Client receives message | name=Client1 | timeout=5 |
| ${msg} = | Client receives message | message_field:(0|1) |
"""
with self._receive(self._clients, *parameters) as (msg, message_fields, header_fields):
self._validate_message(msg, message_fields, header_fields)
return msg # depends on [control=['with'], data=[]] |
def wait_until_first_element_is_found(self, elements, timeout=None):
"""Search list of elements and wait until one of them is found
:param elements: list of PageElements or element locators as a tuple (locator_type, locator_value) to be found
sequentially
:param timeout: max time to wait
:returns: first element found
:rtype: toolium.pageelements.PageElement or tuple
:raises TimeoutException: If no element in the list is found after the timeout
"""
try:
return self._wait_until(self._expected_condition_find_first_element, elements, timeout)
except TimeoutException as exception:
msg = 'None of the page elements has been found after %s seconds'
timeout = timeout if timeout else self.get_explicitly_wait()
self.logger.error(msg, timeout)
exception.msg += "\n {}".format(msg % timeout)
raise exception | def function[wait_until_first_element_is_found, parameter[self, elements, timeout]]:
constant[Search list of elements and wait until one of them is found
:param elements: list of PageElements or element locators as a tuple (locator_type, locator_value) to be found
sequentially
:param timeout: max time to wait
:returns: first element found
:rtype: toolium.pageelements.PageElement or tuple
:raises TimeoutException: If no element in the list is found after the timeout
]
<ast.Try object at 0x7da18dc048e0> | keyword[def] identifier[wait_until_first_element_is_found] ( identifier[self] , identifier[elements] , identifier[timeout] = keyword[None] ):
literal[string]
keyword[try] :
keyword[return] identifier[self] . identifier[_wait_until] ( identifier[self] . identifier[_expected_condition_find_first_element] , identifier[elements] , identifier[timeout] )
keyword[except] identifier[TimeoutException] keyword[as] identifier[exception] :
identifier[msg] = literal[string]
identifier[timeout] = identifier[timeout] keyword[if] identifier[timeout] keyword[else] identifier[self] . identifier[get_explicitly_wait] ()
identifier[self] . identifier[logger] . identifier[error] ( identifier[msg] , identifier[timeout] )
identifier[exception] . identifier[msg] += literal[string] . identifier[format] ( identifier[msg] % identifier[timeout] )
keyword[raise] identifier[exception] | def wait_until_first_element_is_found(self, elements, timeout=None):
"""Search list of elements and wait until one of them is found
:param elements: list of PageElements or element locators as a tuple (locator_type, locator_value) to be found
sequentially
:param timeout: max time to wait
:returns: first element found
:rtype: toolium.pageelements.PageElement or tuple
:raises TimeoutException: If no element in the list is found after the timeout
"""
try:
return self._wait_until(self._expected_condition_find_first_element, elements, timeout) # depends on [control=['try'], data=[]]
except TimeoutException as exception:
msg = 'None of the page elements has been found after %s seconds'
timeout = timeout if timeout else self.get_explicitly_wait()
self.logger.error(msg, timeout)
exception.msg += '\n {}'.format(msg % timeout)
raise exception # depends on [control=['except'], data=['exception']] |
def emit_event(self, event_name, event_body):
"""
Publishes an event of type ``event_name`` to all subscribers, having the body
``event_body``. The event is pushed through all available event transports.
The event body must be a Python object that can be represented as a JSON.
:param event_name: a ``str`` representing the event type
:param event_body: a Python object that can be represented as JSON.
.. versionadded:: 0.5.0
.. versionchanged:: 0.10.0
Added parameter broadcast
"""
for transport in self.event_transports:
transport.emit_event(event_name, event_body) | def function[emit_event, parameter[self, event_name, event_body]]:
constant[
Publishes an event of type ``event_name`` to all subscribers, having the body
``event_body``. The event is pushed through all available event transports.
The event body must be a Python object that can be represented as a JSON.
:param event_name: a ``str`` representing the event type
:param event_body: a Python object that can be represented as JSON.
.. versionadded:: 0.5.0
.. versionchanged:: 0.10.0
Added parameter broadcast
]
for taget[name[transport]] in starred[name[self].event_transports] begin[:]
call[name[transport].emit_event, parameter[name[event_name], name[event_body]]] | keyword[def] identifier[emit_event] ( identifier[self] , identifier[event_name] , identifier[event_body] ):
literal[string]
keyword[for] identifier[transport] keyword[in] identifier[self] . identifier[event_transports] :
identifier[transport] . identifier[emit_event] ( identifier[event_name] , identifier[event_body] ) | def emit_event(self, event_name, event_body):
"""
Publishes an event of type ``event_name`` to all subscribers, having the body
``event_body``. The event is pushed through all available event transports.
The event body must be a Python object that can be represented as a JSON.
:param event_name: a ``str`` representing the event type
:param event_body: a Python object that can be represented as JSON.
.. versionadded:: 0.5.0
.. versionchanged:: 0.10.0
Added parameter broadcast
"""
for transport in self.event_transports:
transport.emit_event(event_name, event_body) # depends on [control=['for'], data=['transport']] |
def validate(self):
"""
validate whether value in config file is correct.
"""
spec = self._create_specs()
# support in future
functions = {}
validator = validate.Validator(functions=functions)
self.config.configspec = spec
result = self.config.validate(validator, preserve_errors=True)
if self._parse_result(result):
return True | def function[validate, parameter[self]]:
constant[
validate whether value in config file is correct.
]
variable[spec] assign[=] call[name[self]._create_specs, parameter[]]
variable[functions] assign[=] dictionary[[], []]
variable[validator] assign[=] call[name[validate].Validator, parameter[]]
name[self].config.configspec assign[=] name[spec]
variable[result] assign[=] call[name[self].config.validate, parameter[name[validator]]]
if call[name[self]._parse_result, parameter[name[result]]] begin[:]
return[constant[True]] | keyword[def] identifier[validate] ( identifier[self] ):
literal[string]
identifier[spec] = identifier[self] . identifier[_create_specs] ()
identifier[functions] ={}
identifier[validator] = identifier[validate] . identifier[Validator] ( identifier[functions] = identifier[functions] )
identifier[self] . identifier[config] . identifier[configspec] = identifier[spec]
identifier[result] = identifier[self] . identifier[config] . identifier[validate] ( identifier[validator] , identifier[preserve_errors] = keyword[True] )
keyword[if] identifier[self] . identifier[_parse_result] ( identifier[result] ):
keyword[return] keyword[True] | def validate(self):
"""
validate whether value in config file is correct.
"""
spec = self._create_specs()
# support in future
functions = {}
validator = validate.Validator(functions=functions)
self.config.configspec = spec
result = self.config.validate(validator, preserve_errors=True)
if self._parse_result(result):
return True # depends on [control=['if'], data=[]] |
def get_details(self, obj):
""" returns uri of API image resource """
args = {
'slug': obj.node.slug,
'pk': obj.pk
}
return reverse('api_node_image_detail',
kwargs=args,
request=self.context.get('request', None)) | def function[get_details, parameter[self, obj]]:
constant[ returns uri of API image resource ]
variable[args] assign[=] dictionary[[<ast.Constant object at 0x7da20c6e7dc0>, <ast.Constant object at 0x7da20c6e4370>], [<ast.Attribute object at 0x7da1b26afe20>, <ast.Attribute object at 0x7da1b26aff10>]]
return[call[name[reverse], parameter[constant[api_node_image_detail]]]] | keyword[def] identifier[get_details] ( identifier[self] , identifier[obj] ):
literal[string]
identifier[args] ={
literal[string] : identifier[obj] . identifier[node] . identifier[slug] ,
literal[string] : identifier[obj] . identifier[pk]
}
keyword[return] identifier[reverse] ( literal[string] ,
identifier[kwargs] = identifier[args] ,
identifier[request] = identifier[self] . identifier[context] . identifier[get] ( literal[string] , keyword[None] )) | def get_details(self, obj):
""" returns uri of API image resource """
args = {'slug': obj.node.slug, 'pk': obj.pk}
return reverse('api_node_image_detail', kwargs=args, request=self.context.get('request', None)) |
def getservers(self, vhost = None):
'''
Return current servers
:param vhost: return only servers of vhost if specified. '' to return only default servers.
None for all servers.
'''
if vhost is not None:
return [s for s in self.connections if s.protocol.vhost == vhost]
else:
return list(self.connections) | def function[getservers, parameter[self, vhost]]:
constant[
Return current servers
:param vhost: return only servers of vhost if specified. '' to return only default servers.
None for all servers.
]
if compare[name[vhost] is_not constant[None]] begin[:]
return[<ast.ListComp object at 0x7da20c992920>] | keyword[def] identifier[getservers] ( identifier[self] , identifier[vhost] = keyword[None] ):
literal[string]
keyword[if] identifier[vhost] keyword[is] keyword[not] keyword[None] :
keyword[return] [ identifier[s] keyword[for] identifier[s] keyword[in] identifier[self] . identifier[connections] keyword[if] identifier[s] . identifier[protocol] . identifier[vhost] == identifier[vhost] ]
keyword[else] :
keyword[return] identifier[list] ( identifier[self] . identifier[connections] ) | def getservers(self, vhost=None):
"""
Return current servers
:param vhost: return only servers of vhost if specified. '' to return only default servers.
None for all servers.
"""
if vhost is not None:
return [s for s in self.connections if s.protocol.vhost == vhost] # depends on [control=['if'], data=['vhost']]
else:
return list(self.connections) |
def _get_base_rates(self, base_params):
'''
Defines the base moment rate that should be assigned to places of
zero strain (i.e. Intraplate regions). In Bird et al (2010) this is
taken as basic rate of Intraplate events in GCMT catalogue above the
threshold magnitude
:param dict base_params:
Parameters needed for calculating the base rate. Requires:
'CMT_EVENTS': The number of CMT events
'area': Total area (km ^ 2) of the region class
'CMT_duration': Duration of reference catalogue
'CMT_moment': Moment rate from CMT catalogue
'corner_mag': Corner magnitude of Tapered G-R for region
'beta': Beta value of tapered G-R for distribution
'''
base_ipl_rate = base_params['CMT_EVENTS'] / (
base_params['area'] * base_params['CMT_duration'])
base_rate = np.zeros(self.number_magnitudes, dtype=float)
for iloc in range(0, self.number_magnitudes):
base_rate[iloc] = base_ipl_rate * calculate_taper_function(
base_params['CMT_moment'],
self.threshold_moment[iloc],
moment_function(base_params['corner_mag']),
base_params['beta'])
return base_rate | def function[_get_base_rates, parameter[self, base_params]]:
constant[
Defines the base moment rate that should be assigned to places of
zero strain (i.e. Intraplate regions). In Bird et al (2010) this is
taken as basic rate of Intraplate events in GCMT catalogue above the
threshold magnitude
:param dict base_params:
Parameters needed for calculating the base rate. Requires:
'CMT_EVENTS': The number of CMT events
'area': Total area (km ^ 2) of the region class
'CMT_duration': Duration of reference catalogue
'CMT_moment': Moment rate from CMT catalogue
'corner_mag': Corner magnitude of Tapered G-R for region
'beta': Beta value of tapered G-R for distribution
]
variable[base_ipl_rate] assign[=] binary_operation[call[name[base_params]][constant[CMT_EVENTS]] / binary_operation[call[name[base_params]][constant[area]] * call[name[base_params]][constant[CMT_duration]]]]
variable[base_rate] assign[=] call[name[np].zeros, parameter[name[self].number_magnitudes]]
for taget[name[iloc]] in starred[call[name[range], parameter[constant[0], name[self].number_magnitudes]]] begin[:]
call[name[base_rate]][name[iloc]] assign[=] binary_operation[name[base_ipl_rate] * call[name[calculate_taper_function], parameter[call[name[base_params]][constant[CMT_moment]], call[name[self].threshold_moment][name[iloc]], call[name[moment_function], parameter[call[name[base_params]][constant[corner_mag]]]], call[name[base_params]][constant[beta]]]]]
return[name[base_rate]] | keyword[def] identifier[_get_base_rates] ( identifier[self] , identifier[base_params] ):
literal[string]
identifier[base_ipl_rate] = identifier[base_params] [ literal[string] ]/(
identifier[base_params] [ literal[string] ]* identifier[base_params] [ literal[string] ])
identifier[base_rate] = identifier[np] . identifier[zeros] ( identifier[self] . identifier[number_magnitudes] , identifier[dtype] = identifier[float] )
keyword[for] identifier[iloc] keyword[in] identifier[range] ( literal[int] , identifier[self] . identifier[number_magnitudes] ):
identifier[base_rate] [ identifier[iloc] ]= identifier[base_ipl_rate] * identifier[calculate_taper_function] (
identifier[base_params] [ literal[string] ],
identifier[self] . identifier[threshold_moment] [ identifier[iloc] ],
identifier[moment_function] ( identifier[base_params] [ literal[string] ]),
identifier[base_params] [ literal[string] ])
keyword[return] identifier[base_rate] | def _get_base_rates(self, base_params):
"""
Defines the base moment rate that should be assigned to places of
zero strain (i.e. Intraplate regions). In Bird et al (2010) this is
taken as basic rate of Intraplate events in GCMT catalogue above the
threshold magnitude
:param dict base_params:
Parameters needed for calculating the base rate. Requires:
'CMT_EVENTS': The number of CMT events
'area': Total area (km ^ 2) of the region class
'CMT_duration': Duration of reference catalogue
'CMT_moment': Moment rate from CMT catalogue
'corner_mag': Corner magnitude of Tapered G-R for region
'beta': Beta value of tapered G-R for distribution
"""
base_ipl_rate = base_params['CMT_EVENTS'] / (base_params['area'] * base_params['CMT_duration'])
base_rate = np.zeros(self.number_magnitudes, dtype=float)
for iloc in range(0, self.number_magnitudes):
base_rate[iloc] = base_ipl_rate * calculate_taper_function(base_params['CMT_moment'], self.threshold_moment[iloc], moment_function(base_params['corner_mag']), base_params['beta']) # depends on [control=['for'], data=['iloc']]
return base_rate |
def patch(name,
source=None,
source_hash=None,
source_hash_name=None,
skip_verify=False,
template=None,
context=None,
defaults=None,
options='',
reject_file=None,
strip=None,
saltenv=None,
**kwargs):
'''
Ensure that a patch has been applied to the specified file or directory
.. versionchanged:: 2019.2.0
The ``hash`` and ``dry_run_first`` options are now ignored, as the
logic which determines whether or not the patch has already been
applied no longer requires them. Additionally, this state now supports
patch files that modify more than one file. To use these sort of
patches, specify a directory (and, if necessary, the ``strip`` option)
instead of a file.
.. note::
A suitable ``patch`` executable must be available on the minion. Also,
keep in mind that the pre-check this state does to determine whether or
not changes need to be made will create a temp file and send all patch
output to that file. This means that, in the event that the patch would
not have applied cleanly, the comment included in the state results will
reference a temp file that will no longer exist once the state finishes
running.
name
The file or directory to which the patch should be applied
source
The patch file to apply
.. versionchanged:: 2019.2.0
The source can now be from any file source supported by Salt
(``salt://``, ``http://``, ``https://``, ``ftp://``, etc.).
Templating is also now supported.
source_hash
Works the same way as in :py:func:`file.managed
<salt.states.file.managed>`.
.. versionadded:: 2019.2.0
source_hash_name
Works the same way as in :py:func:`file.managed
<salt.states.file.managed>`
.. versionadded:: 2019.2.0
skip_verify
Works the same way as in :py:func:`file.managed
<salt.states.file.managed>`
.. versionadded:: 2019.2.0
template
Works the same way as in :py:func:`file.managed
<salt.states.file.managed>`
.. versionadded:: 2019.2.0
context
Works the same way as in :py:func:`file.managed
<salt.states.file.managed>`
.. versionadded:: 2019.2.0
defaults
Works the same way as in :py:func:`file.managed
<salt.states.file.managed>`
.. versionadded:: 2019.2.0
options
Extra options to pass to patch. This should not be necessary in most
cases.
.. note::
For best results, short opts should be separate from one another.
The ``-N`` and ``-r``, and ``-o`` options are used internally by
this state and cannot be used here. Additionally, instead of using
``-pN`` or ``--strip=N``, use the ``strip`` option documented
below.
reject_file
If specified, any rejected hunks will be written to this file. If not
specified, then they will be written to a temp file which will be
deleted when the state finishes running.
.. important::
The parent directory must exist. Also, this will overwrite the file
if it is already present.
.. versionadded:: 2019.2.0
strip
Number of directories to strip from paths in the patch file. For
example, using the below SLS would instruct Salt to use ``-p1`` when
applying the patch:
.. code-block:: yaml
/etc/myfile.conf:
file.patch:
- source: salt://myfile.patch
- strip: 1
.. versionadded:: 2019.2.0
In previous versions, ``-p1`` would need to be passed as part of
the ``options`` value.
saltenv
Specify the environment from which to retrieve the patch file indicated
by the ``source`` parameter. If not provided, this defaults to the
environment from which the state is being executed.
.. note::
Ignored when the patch file is from a non-``salt://`` source.
**Usage:**
.. code-block:: yaml
# Equivalent to ``patch --forward /opt/myfile.txt myfile.patch``
/opt/myfile.txt:
file.patch:
- source: salt://myfile.patch
'''
ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''}
if not salt.utils.path.which('patch'):
ret['comment'] = 'patch executable not found on minion'
return ret
# is_dir should be defined if we proceed past the if/else block below, but
# just in case, avoid a NameError.
is_dir = False
if not name:
ret['comment'] = 'A file/directory to be patched is required'
return ret
else:
try:
name = os.path.expanduser(name)
except Exception:
ret['comment'] = 'Invalid path \'{0}\''.format(name)
return ret
else:
if not os.path.isabs(name):
ret['comment'] = '{0} is not an absolute path'.format(name)
return ret
elif not os.path.exists(name):
ret['comment'] = '{0} does not exist'.format(name)
return ret
else:
is_dir = os.path.isdir(name)
for deprecated_arg in ('hash', 'dry_run_first'):
if deprecated_arg in kwargs:
ret.setdefault('warnings', []).append(
'The \'{0}\' argument is no longer used and has been '
'ignored.'.format(deprecated_arg)
)
if reject_file is not None:
try:
reject_file_parent = os.path.dirname(reject_file)
except Exception:
ret['comment'] = 'Invalid path \'{0}\' for reject_file'.format(
reject_file
)
return ret
else:
if not os.path.isabs(reject_file_parent):
ret['comment'] = '\'{0}\' is not an absolute path'.format(
reject_file
)
return ret
elif not os.path.isdir(reject_file_parent):
ret['comment'] = (
'Parent directory for reject_file \'{0}\' either does '
'not exist, or is not a directory'.format(reject_file)
)
return ret
sanitized_options = []
options = salt.utils.args.shlex_split(options)
index = 0
max_index = len(options) - 1
# Not using enumerate here because we may need to consume more than one
# option if --strip is used.
blacklisted_options = []
while index <= max_index:
option = options[index]
if not isinstance(option, six.string_types):
option = six.text_type(option)
for item in ('-N', '--forward', '-r', '--reject-file', '-o', '--output'):
if option.startswith(item):
blacklisted = option
break
else:
blacklisted = None
if blacklisted is not None:
blacklisted_options.append(blacklisted)
if option.startswith('-p'):
try:
strip = int(option[2:])
except Exception:
ret['comment'] = (
'Invalid format for \'-p\' CLI option. Consider using '
'the \'strip\' option for this state.'
)
return ret
elif option.startswith('--strip'):
if '=' in option:
# Assume --strip=N
try:
strip = int(option.rsplit('=', 1)[-1])
except Exception:
ret['comment'] = (
'Invalid format for \'-strip\' CLI option. Consider '
'using the \'strip\' option for this state.'
)
return ret
else:
# Assume --strip N and grab the next option in the list
try:
strip = int(options[index + 1])
except Exception:
ret['comment'] = (
'Invalid format for \'-strip\' CLI option. Consider '
'using the \'strip\' option for this state.'
)
return ret
else:
# We need to increment again because we grabbed the next
# option in the list.
index += 1
else:
sanitized_options.append(option)
# Increment the index
index += 1
if blacklisted_options:
ret['comment'] = (
'The following CLI options are not allowed: {0}'.format(
', '.join(blacklisted_options)
)
)
return ret
options = sanitized_options
try:
source_match = __salt__['file.source_list'](source,
source_hash,
__env__)[0]
except CommandExecutionError as exc:
ret['result'] = False
ret['comment'] = exc.strerror
return ret
else:
# Passing the saltenv to file.managed to pull down the patch file is
# not supported, because the saltenv is already being passed via the
# state compiler and this would result in two values for that argument
# (and a traceback). Therefore, we will add the saltenv to the source
# URL to ensure we pull the file from the correct environment.
if saltenv is not None:
source_match_url, source_match_saltenv = \
salt.utils.url.parse(source_match)
if source_match_url.startswith('salt://'):
if source_match_saltenv is not None \
and source_match_saltenv != saltenv:
ret.setdefault('warnings', []).append(
'Ignoring \'saltenv\' option in favor of saltenv '
'included in the source URL.'
)
else:
source_match += '?saltenv={0}'.format(saltenv)
cleanup = []
try:
patch_file = salt.utils.files.mkstemp()
cleanup.append(patch_file)
try:
orig_test = __opts__['test']
__opts__['test'] = False
sys.modules[__salt__['test.ping'].__module__].__opts__['test'] = False
result = managed(patch_file,
source=source_match,
source_hash=source_hash,
source_hash_name=source_hash_name,
skip_verify=skip_verify,
template=template,
context=context,
defaults=defaults)
except Exception as exc:
msg = 'Failed to cache patch file {0}: {1}'.format(
salt.utils.url.redact_http_basic_auth(source_match),
exc
)
log.exception(msg)
ret['comment'] = msg
return ret
else:
log.debug('file.managed: %s', result)
finally:
__opts__['test'] = orig_test
sys.modules[__salt__['test.ping'].__module__].__opts__['test'] = orig_test
if not result['result']:
log.debug(
'failed to download %s',
salt.utils.url.redact_http_basic_auth(source_match)
)
return result
def _patch(patch_file, options=None, dry_run=False):
patch_opts = copy.copy(sanitized_options)
if options is not None:
patch_opts.extend(options)
return __salt__['file.patch'](
name,
patch_file,
options=patch_opts,
dry_run=dry_run)
if reject_file is not None:
patch_rejects = reject_file
else:
# No rejects file specified, create a temp file
patch_rejects = salt.utils.files.mkstemp()
cleanup.append(patch_rejects)
patch_output = salt.utils.files.mkstemp()
cleanup.append(patch_output)
# Older patch releases can only write patch output to regular files,
# meaning that /dev/null can't be relied on. Also, if we ever want this
# to work on Windows with patch.exe, /dev/null is a non-starter.
# Therefore, redirect all patch output to a temp file, which we will
# then remove.
patch_opts = ['-N', '-r', patch_rejects, '-o', patch_output]
if is_dir and strip is not None:
patch_opts.append('-p{0}'.format(strip))
pre_check = _patch(patch_file, patch_opts)
if pre_check['retcode'] != 0:
# Try to reverse-apply hunks from rejects file using a dry-run.
# If this returns a retcode of 0, we know that the patch was
# already applied. Rejects are written from the base of the
# directory, so the strip option doesn't apply here.
reverse_pass = _patch(patch_rejects, ['-R', '-f'], dry_run=True)
already_applied = reverse_pass['retcode'] == 0
if already_applied:
ret['comment'] = 'Patch was already applied'
ret['result'] = True
return ret
else:
ret['comment'] = (
'Patch would not apply cleanly, no changes made. Results '
'of dry-run are below.'
)
if reject_file is None:
ret['comment'] += (
' Run state again using the reject_file option to '
'save rejects to a persistent file.'
)
opts = copy.copy(__opts__)
opts['color'] = False
ret['comment'] += '\n\n' + salt.output.out_format(
pre_check,
'nested',
opts,
nested_indent=14)
return ret
if __opts__['test']:
ret['result'] = None
ret['comment'] = 'The patch would be applied'
ret['changes'] = pre_check
return ret
# If we've made it here, the patch should apply cleanly
patch_opts = []
if is_dir and strip is not None:
patch_opts.append('-p{0}'.format(strip))
ret['changes'] = _patch(patch_file, patch_opts)
if ret['changes']['retcode'] == 0:
ret['comment'] = 'Patch successfully applied'
ret['result'] = True
else:
ret['comment'] = 'Failed to apply patch'
return ret
finally:
# Clean up any temp files
for path in cleanup:
try:
os.remove(path)
except OSError as exc:
if exc.errno != os.errno.ENOENT:
log.error(
'file.patch: Failed to remove temp file %s: %s',
path, exc
) | def function[patch, parameter[name, source, source_hash, source_hash_name, skip_verify, template, context, defaults, options, reject_file, strip, saltenv]]:
constant[
Ensure that a patch has been applied to the specified file or directory
.. versionchanged:: 2019.2.0
The ``hash`` and ``dry_run_first`` options are now ignored, as the
logic which determines whether or not the patch has already been
applied no longer requires them. Additionally, this state now supports
patch files that modify more than one file. To use these sort of
patches, specify a directory (and, if necessary, the ``strip`` option)
instead of a file.
.. note::
A suitable ``patch`` executable must be available on the minion. Also,
keep in mind that the pre-check this state does to determine whether or
not changes need to be made will create a temp file and send all patch
output to that file. This means that, in the event that the patch would
not have applied cleanly, the comment included in the state results will
reference a temp file that will no longer exist once the state finishes
running.
name
The file or directory to which the patch should be applied
source
The patch file to apply
.. versionchanged:: 2019.2.0
The source can now be from any file source supported by Salt
(``salt://``, ``http://``, ``https://``, ``ftp://``, etc.).
Templating is also now supported.
source_hash
Works the same way as in :py:func:`file.managed
<salt.states.file.managed>`.
.. versionadded:: 2019.2.0
source_hash_name
Works the same way as in :py:func:`file.managed
<salt.states.file.managed>`
.. versionadded:: 2019.2.0
skip_verify
Works the same way as in :py:func:`file.managed
<salt.states.file.managed>`
.. versionadded:: 2019.2.0
template
Works the same way as in :py:func:`file.managed
<salt.states.file.managed>`
.. versionadded:: 2019.2.0
context
Works the same way as in :py:func:`file.managed
<salt.states.file.managed>`
.. versionadded:: 2019.2.0
defaults
Works the same way as in :py:func:`file.managed
<salt.states.file.managed>`
.. versionadded:: 2019.2.0
options
Extra options to pass to patch. This should not be necessary in most
cases.
.. note::
For best results, short opts should be separate from one another.
The ``-N`` and ``-r``, and ``-o`` options are used internally by
this state and cannot be used here. Additionally, instead of using
``-pN`` or ``--strip=N``, use the ``strip`` option documented
below.
reject_file
If specified, any rejected hunks will be written to this file. If not
specified, then they will be written to a temp file which will be
deleted when the state finishes running.
.. important::
The parent directory must exist. Also, this will overwrite the file
if it is already present.
.. versionadded:: 2019.2.0
strip
Number of directories to strip from paths in the patch file. For
example, using the below SLS would instruct Salt to use ``-p1`` when
applying the patch:
.. code-block:: yaml
/etc/myfile.conf:
file.patch:
- source: salt://myfile.patch
- strip: 1
.. versionadded:: 2019.2.0
In previous versions, ``-p1`` would need to be passed as part of
the ``options`` value.
saltenv
Specify the environment from which to retrieve the patch file indicated
by the ``source`` parameter. If not provided, this defaults to the
environment from which the state is being executed.
.. note::
Ignored when the patch file is from a non-``salt://`` source.
**Usage:**
.. code-block:: yaml
# Equivalent to ``patch --forward /opt/myfile.txt myfile.patch``
/opt/myfile.txt:
file.patch:
- source: salt://myfile.patch
]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b1fd4ee0>, <ast.Constant object at 0x7da1b1fd4f40>, <ast.Constant object at 0x7da1b1fd5030>, <ast.Constant object at 0x7da1b1fd4fd0>], [<ast.Name object at 0x7da1b1fd5000>, <ast.Dict object at 0x7da1b1fd4f70>, <ast.Constant object at 0x7da1b1fd4fa0>, <ast.Constant object at 0x7da1b1fd5060>]]
if <ast.UnaryOp object at 0x7da1b1fd5120> begin[:]
call[name[ret]][constant[comment]] assign[=] constant[patch executable not found on minion]
return[name[ret]]
variable[is_dir] assign[=] constant[False]
if <ast.UnaryOp object at 0x7da1b1fd5270> begin[:]
call[name[ret]][constant[comment]] assign[=] constant[A file/directory to be patched is required]
return[name[ret]]
for taget[name[deprecated_arg]] in starred[tuple[[<ast.Constant object at 0x7da1b1fd6230>, <ast.Constant object at 0x7da1b1fd6290>]]] begin[:]
if compare[name[deprecated_arg] in name[kwargs]] begin[:]
call[call[name[ret].setdefault, parameter[constant[warnings], list[[]]]].append, parameter[call[constant[The '{0}' argument is no longer used and has been ignored.].format, parameter[name[deprecated_arg]]]]]
if compare[name[reject_file] is_not constant[None]] begin[:]
<ast.Try object at 0x7da18f58e8c0>
variable[sanitized_options] assign[=] list[[]]
variable[options] assign[=] call[name[salt].utils.args.shlex_split, parameter[name[options]]]
variable[index] assign[=] constant[0]
variable[max_index] assign[=] binary_operation[call[name[len], parameter[name[options]]] - constant[1]]
variable[blacklisted_options] assign[=] list[[]]
while compare[name[index] less_or_equal[<=] name[max_index]] begin[:]
variable[option] assign[=] call[name[options]][name[index]]
if <ast.UnaryOp object at 0x7da1b20bb820> begin[:]
variable[option] assign[=] call[name[six].text_type, parameter[name[option]]]
for taget[name[item]] in starred[tuple[[<ast.Constant object at 0x7da1b20b88b0>, <ast.Constant object at 0x7da1b20b9b70>, <ast.Constant object at 0x7da1b20bb310>, <ast.Constant object at 0x7da1b20b8be0>, <ast.Constant object at 0x7da1b20b8370>, <ast.Constant object at 0x7da1b20ba4d0>]]] begin[:]
if call[name[option].startswith, parameter[name[item]]] begin[:]
variable[blacklisted] assign[=] name[option]
break
if compare[name[blacklisted] is_not constant[None]] begin[:]
call[name[blacklisted_options].append, parameter[name[blacklisted]]]
if call[name[option].startswith, parameter[constant[-p]]] begin[:]
<ast.Try object at 0x7da1b20ba8c0>
<ast.AugAssign object at 0x7da1b20b9720>
if name[blacklisted_options] begin[:]
call[name[ret]][constant[comment]] assign[=] call[constant[The following CLI options are not allowed: {0}].format, parameter[call[constant[, ].join, parameter[name[blacklisted_options]]]]]
return[name[ret]]
variable[options] assign[=] name[sanitized_options]
<ast.Try object at 0x7da1b20b90c0>
variable[cleanup] assign[=] list[[]]
<ast.Try object at 0x7da1b20bba30> | keyword[def] identifier[patch] ( identifier[name] ,
identifier[source] = keyword[None] ,
identifier[source_hash] = keyword[None] ,
identifier[source_hash_name] = keyword[None] ,
identifier[skip_verify] = keyword[False] ,
identifier[template] = keyword[None] ,
identifier[context] = keyword[None] ,
identifier[defaults] = keyword[None] ,
identifier[options] = literal[string] ,
identifier[reject_file] = keyword[None] ,
identifier[strip] = keyword[None] ,
identifier[saltenv] = keyword[None] ,
** identifier[kwargs] ):
literal[string]
identifier[ret] ={ literal[string] : identifier[name] , literal[string] :{}, literal[string] : keyword[False] , literal[string] : literal[string] }
keyword[if] keyword[not] identifier[salt] . identifier[utils] . identifier[path] . identifier[which] ( literal[string] ):
identifier[ret] [ literal[string] ]= literal[string]
keyword[return] identifier[ret]
identifier[is_dir] = keyword[False]
keyword[if] keyword[not] identifier[name] :
identifier[ret] [ literal[string] ]= literal[string]
keyword[return] identifier[ret]
keyword[else] :
keyword[try] :
identifier[name] = identifier[os] . identifier[path] . identifier[expanduser] ( identifier[name] )
keyword[except] identifier[Exception] :
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
keyword[return] identifier[ret]
keyword[else] :
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isabs] ( identifier[name] ):
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
keyword[return] identifier[ret]
keyword[elif] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[name] ):
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
keyword[return] identifier[ret]
keyword[else] :
identifier[is_dir] = identifier[os] . identifier[path] . identifier[isdir] ( identifier[name] )
keyword[for] identifier[deprecated_arg] keyword[in] ( literal[string] , literal[string] ):
keyword[if] identifier[deprecated_arg] keyword[in] identifier[kwargs] :
identifier[ret] . identifier[setdefault] ( literal[string] ,[]). identifier[append] (
literal[string]
literal[string] . identifier[format] ( identifier[deprecated_arg] )
)
keyword[if] identifier[reject_file] keyword[is] keyword[not] keyword[None] :
keyword[try] :
identifier[reject_file_parent] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[reject_file] )
keyword[except] identifier[Exception] :
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] (
identifier[reject_file]
)
keyword[return] identifier[ret]
keyword[else] :
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isabs] ( identifier[reject_file_parent] ):
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] (
identifier[reject_file]
)
keyword[return] identifier[ret]
keyword[elif] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[reject_file_parent] ):
identifier[ret] [ literal[string] ]=(
literal[string]
literal[string] . identifier[format] ( identifier[reject_file] )
)
keyword[return] identifier[ret]
identifier[sanitized_options] =[]
identifier[options] = identifier[salt] . identifier[utils] . identifier[args] . identifier[shlex_split] ( identifier[options] )
identifier[index] = literal[int]
identifier[max_index] = identifier[len] ( identifier[options] )- literal[int]
identifier[blacklisted_options] =[]
keyword[while] identifier[index] <= identifier[max_index] :
identifier[option] = identifier[options] [ identifier[index] ]
keyword[if] keyword[not] identifier[isinstance] ( identifier[option] , identifier[six] . identifier[string_types] ):
identifier[option] = identifier[six] . identifier[text_type] ( identifier[option] )
keyword[for] identifier[item] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ):
keyword[if] identifier[option] . identifier[startswith] ( identifier[item] ):
identifier[blacklisted] = identifier[option]
keyword[break]
keyword[else] :
identifier[blacklisted] = keyword[None]
keyword[if] identifier[blacklisted] keyword[is] keyword[not] keyword[None] :
identifier[blacklisted_options] . identifier[append] ( identifier[blacklisted] )
keyword[if] identifier[option] . identifier[startswith] ( literal[string] ):
keyword[try] :
identifier[strip] = identifier[int] ( identifier[option] [ literal[int] :])
keyword[except] identifier[Exception] :
identifier[ret] [ literal[string] ]=(
literal[string]
literal[string]
)
keyword[return] identifier[ret]
keyword[elif] identifier[option] . identifier[startswith] ( literal[string] ):
keyword[if] literal[string] keyword[in] identifier[option] :
keyword[try] :
identifier[strip] = identifier[int] ( identifier[option] . identifier[rsplit] ( literal[string] , literal[int] )[- literal[int] ])
keyword[except] identifier[Exception] :
identifier[ret] [ literal[string] ]=(
literal[string]
literal[string]
)
keyword[return] identifier[ret]
keyword[else] :
keyword[try] :
identifier[strip] = identifier[int] ( identifier[options] [ identifier[index] + literal[int] ])
keyword[except] identifier[Exception] :
identifier[ret] [ literal[string] ]=(
literal[string]
literal[string]
)
keyword[return] identifier[ret]
keyword[else] :
identifier[index] += literal[int]
keyword[else] :
identifier[sanitized_options] . identifier[append] ( identifier[option] )
identifier[index] += literal[int]
keyword[if] identifier[blacklisted_options] :
identifier[ret] [ literal[string] ]=(
literal[string] . identifier[format] (
literal[string] . identifier[join] ( identifier[blacklisted_options] )
)
)
keyword[return] identifier[ret]
identifier[options] = identifier[sanitized_options]
keyword[try] :
identifier[source_match] = identifier[__salt__] [ literal[string] ]( identifier[source] ,
identifier[source_hash] ,
identifier[__env__] )[ literal[int] ]
keyword[except] identifier[CommandExecutionError] keyword[as] identifier[exc] :
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]= identifier[exc] . identifier[strerror]
keyword[return] identifier[ret]
keyword[else] :
keyword[if] identifier[saltenv] keyword[is] keyword[not] keyword[None] :
identifier[source_match_url] , identifier[source_match_saltenv] = identifier[salt] . identifier[utils] . identifier[url] . identifier[parse] ( identifier[source_match] )
keyword[if] identifier[source_match_url] . identifier[startswith] ( literal[string] ):
keyword[if] identifier[source_match_saltenv] keyword[is] keyword[not] keyword[None] keyword[and] identifier[source_match_saltenv] != identifier[saltenv] :
identifier[ret] . identifier[setdefault] ( literal[string] ,[]). identifier[append] (
literal[string]
literal[string]
)
keyword[else] :
identifier[source_match] += literal[string] . identifier[format] ( identifier[saltenv] )
identifier[cleanup] =[]
keyword[try] :
identifier[patch_file] = identifier[salt] . identifier[utils] . identifier[files] . identifier[mkstemp] ()
identifier[cleanup] . identifier[append] ( identifier[patch_file] )
keyword[try] :
identifier[orig_test] = identifier[__opts__] [ literal[string] ]
identifier[__opts__] [ literal[string] ]= keyword[False]
identifier[sys] . identifier[modules] [ identifier[__salt__] [ literal[string] ]. identifier[__module__] ]. identifier[__opts__] [ literal[string] ]= keyword[False]
identifier[result] = identifier[managed] ( identifier[patch_file] ,
identifier[source] = identifier[source_match] ,
identifier[source_hash] = identifier[source_hash] ,
identifier[source_hash_name] = identifier[source_hash_name] ,
identifier[skip_verify] = identifier[skip_verify] ,
identifier[template] = identifier[template] ,
identifier[context] = identifier[context] ,
identifier[defaults] = identifier[defaults] )
keyword[except] identifier[Exception] keyword[as] identifier[exc] :
identifier[msg] = literal[string] . identifier[format] (
identifier[salt] . identifier[utils] . identifier[url] . identifier[redact_http_basic_auth] ( identifier[source_match] ),
identifier[exc]
)
identifier[log] . identifier[exception] ( identifier[msg] )
identifier[ret] [ literal[string] ]= identifier[msg]
keyword[return] identifier[ret]
keyword[else] :
identifier[log] . identifier[debug] ( literal[string] , identifier[result] )
keyword[finally] :
identifier[__opts__] [ literal[string] ]= identifier[orig_test]
identifier[sys] . identifier[modules] [ identifier[__salt__] [ literal[string] ]. identifier[__module__] ]. identifier[__opts__] [ literal[string] ]= identifier[orig_test]
keyword[if] keyword[not] identifier[result] [ literal[string] ]:
identifier[log] . identifier[debug] (
literal[string] ,
identifier[salt] . identifier[utils] . identifier[url] . identifier[redact_http_basic_auth] ( identifier[source_match] )
)
keyword[return] identifier[result]
keyword[def] identifier[_patch] ( identifier[patch_file] , identifier[options] = keyword[None] , identifier[dry_run] = keyword[False] ):
identifier[patch_opts] = identifier[copy] . identifier[copy] ( identifier[sanitized_options] )
keyword[if] identifier[options] keyword[is] keyword[not] keyword[None] :
identifier[patch_opts] . identifier[extend] ( identifier[options] )
keyword[return] identifier[__salt__] [ literal[string] ](
identifier[name] ,
identifier[patch_file] ,
identifier[options] = identifier[patch_opts] ,
identifier[dry_run] = identifier[dry_run] )
keyword[if] identifier[reject_file] keyword[is] keyword[not] keyword[None] :
identifier[patch_rejects] = identifier[reject_file]
keyword[else] :
identifier[patch_rejects] = identifier[salt] . identifier[utils] . identifier[files] . identifier[mkstemp] ()
identifier[cleanup] . identifier[append] ( identifier[patch_rejects] )
identifier[patch_output] = identifier[salt] . identifier[utils] . identifier[files] . identifier[mkstemp] ()
identifier[cleanup] . identifier[append] ( identifier[patch_output] )
identifier[patch_opts] =[ literal[string] , literal[string] , identifier[patch_rejects] , literal[string] , identifier[patch_output] ]
keyword[if] identifier[is_dir] keyword[and] identifier[strip] keyword[is] keyword[not] keyword[None] :
identifier[patch_opts] . identifier[append] ( literal[string] . identifier[format] ( identifier[strip] ))
identifier[pre_check] = identifier[_patch] ( identifier[patch_file] , identifier[patch_opts] )
keyword[if] identifier[pre_check] [ literal[string] ]!= literal[int] :
identifier[reverse_pass] = identifier[_patch] ( identifier[patch_rejects] ,[ literal[string] , literal[string] ], identifier[dry_run] = keyword[True] )
identifier[already_applied] = identifier[reverse_pass] [ literal[string] ]== literal[int]
keyword[if] identifier[already_applied] :
identifier[ret] [ literal[string] ]= literal[string]
identifier[ret] [ literal[string] ]= keyword[True]
keyword[return] identifier[ret]
keyword[else] :
identifier[ret] [ literal[string] ]=(
literal[string]
literal[string]
)
keyword[if] identifier[reject_file] keyword[is] keyword[None] :
identifier[ret] [ literal[string] ]+=(
literal[string]
literal[string]
)
identifier[opts] = identifier[copy] . identifier[copy] ( identifier[__opts__] )
identifier[opts] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]+= literal[string] + identifier[salt] . identifier[output] . identifier[out_format] (
identifier[pre_check] ,
literal[string] ,
identifier[opts] ,
identifier[nested_indent] = literal[int] )
keyword[return] identifier[ret]
keyword[if] identifier[__opts__] [ literal[string] ]:
identifier[ret] [ literal[string] ]= keyword[None]
identifier[ret] [ literal[string] ]= literal[string]
identifier[ret] [ literal[string] ]= identifier[pre_check]
keyword[return] identifier[ret]
identifier[patch_opts] =[]
keyword[if] identifier[is_dir] keyword[and] identifier[strip] keyword[is] keyword[not] keyword[None] :
identifier[patch_opts] . identifier[append] ( literal[string] . identifier[format] ( identifier[strip] ))
identifier[ret] [ literal[string] ]= identifier[_patch] ( identifier[patch_file] , identifier[patch_opts] )
keyword[if] identifier[ret] [ literal[string] ][ literal[string] ]== literal[int] :
identifier[ret] [ literal[string] ]= literal[string]
identifier[ret] [ literal[string] ]= keyword[True]
keyword[else] :
identifier[ret] [ literal[string] ]= literal[string]
keyword[return] identifier[ret]
keyword[finally] :
keyword[for] identifier[path] keyword[in] identifier[cleanup] :
keyword[try] :
identifier[os] . identifier[remove] ( identifier[path] )
keyword[except] identifier[OSError] keyword[as] identifier[exc] :
keyword[if] identifier[exc] . identifier[errno] != identifier[os] . identifier[errno] . identifier[ENOENT] :
identifier[log] . identifier[error] (
literal[string] ,
identifier[path] , identifier[exc]
) | def patch(name, source=None, source_hash=None, source_hash_name=None, skip_verify=False, template=None, context=None, defaults=None, options='', reject_file=None, strip=None, saltenv=None, **kwargs):
"""
Ensure that a patch has been applied to the specified file or directory
.. versionchanged:: 2019.2.0
The ``hash`` and ``dry_run_first`` options are now ignored, as the
logic which determines whether or not the patch has already been
applied no longer requires them. Additionally, this state now supports
patch files that modify more than one file. To use these sort of
patches, specify a directory (and, if necessary, the ``strip`` option)
instead of a file.
.. note::
A suitable ``patch`` executable must be available on the minion. Also,
keep in mind that the pre-check this state does to determine whether or
not changes need to be made will create a temp file and send all patch
output to that file. This means that, in the event that the patch would
not have applied cleanly, the comment included in the state results will
reference a temp file that will no longer exist once the state finishes
running.
name
The file or directory to which the patch should be applied
source
The patch file to apply
.. versionchanged:: 2019.2.0
The source can now be from any file source supported by Salt
(``salt://``, ``http://``, ``https://``, ``ftp://``, etc.).
Templating is also now supported.
source_hash
Works the same way as in :py:func:`file.managed
<salt.states.file.managed>`.
.. versionadded:: 2019.2.0
source_hash_name
Works the same way as in :py:func:`file.managed
<salt.states.file.managed>`
.. versionadded:: 2019.2.0
skip_verify
Works the same way as in :py:func:`file.managed
<salt.states.file.managed>`
.. versionadded:: 2019.2.0
template
Works the same way as in :py:func:`file.managed
<salt.states.file.managed>`
.. versionadded:: 2019.2.0
context
Works the same way as in :py:func:`file.managed
<salt.states.file.managed>`
.. versionadded:: 2019.2.0
defaults
Works the same way as in :py:func:`file.managed
<salt.states.file.managed>`
.. versionadded:: 2019.2.0
options
Extra options to pass to patch. This should not be necessary in most
cases.
.. note::
For best results, short opts should be separate from one another.
The ``-N`` and ``-r``, and ``-o`` options are used internally by
this state and cannot be used here. Additionally, instead of using
``-pN`` or ``--strip=N``, use the ``strip`` option documented
below.
reject_file
If specified, any rejected hunks will be written to this file. If not
specified, then they will be written to a temp file which will be
deleted when the state finishes running.
.. important::
The parent directory must exist. Also, this will overwrite the file
if it is already present.
.. versionadded:: 2019.2.0
strip
Number of directories to strip from paths in the patch file. For
example, using the below SLS would instruct Salt to use ``-p1`` when
applying the patch:
.. code-block:: yaml
/etc/myfile.conf:
file.patch:
- source: salt://myfile.patch
- strip: 1
.. versionadded:: 2019.2.0
In previous versions, ``-p1`` would need to be passed as part of
the ``options`` value.
saltenv
Specify the environment from which to retrieve the patch file indicated
by the ``source`` parameter. If not provided, this defaults to the
environment from which the state is being executed.
.. note::
Ignored when the patch file is from a non-``salt://`` source.
**Usage:**
.. code-block:: yaml
# Equivalent to ``patch --forward /opt/myfile.txt myfile.patch``
/opt/myfile.txt:
file.patch:
- source: salt://myfile.patch
"""
ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''}
if not salt.utils.path.which('patch'):
ret['comment'] = 'patch executable not found on minion'
return ret # depends on [control=['if'], data=[]]
# is_dir should be defined if we proceed past the if/else block below, but
# just in case, avoid a NameError.
is_dir = False
if not name:
ret['comment'] = 'A file/directory to be patched is required'
return ret # depends on [control=['if'], data=[]]
else:
try:
name = os.path.expanduser(name) # depends on [control=['try'], data=[]]
except Exception:
ret['comment'] = "Invalid path '{0}'".format(name)
return ret # depends on [control=['except'], data=[]]
else:
if not os.path.isabs(name):
ret['comment'] = '{0} is not an absolute path'.format(name)
return ret # depends on [control=['if'], data=[]]
elif not os.path.exists(name):
ret['comment'] = '{0} does not exist'.format(name)
return ret # depends on [control=['if'], data=[]]
else:
is_dir = os.path.isdir(name)
for deprecated_arg in ('hash', 'dry_run_first'):
if deprecated_arg in kwargs:
ret.setdefault('warnings', []).append("The '{0}' argument is no longer used and has been ignored.".format(deprecated_arg)) # depends on [control=['if'], data=['deprecated_arg']] # depends on [control=['for'], data=['deprecated_arg']]
if reject_file is not None:
try:
reject_file_parent = os.path.dirname(reject_file) # depends on [control=['try'], data=[]]
except Exception:
ret['comment'] = "Invalid path '{0}' for reject_file".format(reject_file)
return ret # depends on [control=['except'], data=[]]
else:
if not os.path.isabs(reject_file_parent):
ret['comment'] = "'{0}' is not an absolute path".format(reject_file)
return ret # depends on [control=['if'], data=[]]
elif not os.path.isdir(reject_file_parent):
ret['comment'] = "Parent directory for reject_file '{0}' either does not exist, or is not a directory".format(reject_file)
return ret # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['reject_file']]
sanitized_options = []
options = salt.utils.args.shlex_split(options)
index = 0
max_index = len(options) - 1
# Not using enumerate here because we may need to consume more than one
# option if --strip is used.
blacklisted_options = []
while index <= max_index:
option = options[index]
if not isinstance(option, six.string_types):
option = six.text_type(option) # depends on [control=['if'], data=[]]
for item in ('-N', '--forward', '-r', '--reject-file', '-o', '--output'):
if option.startswith(item):
blacklisted = option
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']]
else:
blacklisted = None
if blacklisted is not None:
blacklisted_options.append(blacklisted) # depends on [control=['if'], data=['blacklisted']]
if option.startswith('-p'):
try:
strip = int(option[2:]) # depends on [control=['try'], data=[]]
except Exception:
ret['comment'] = "Invalid format for '-p' CLI option. Consider using the 'strip' option for this state."
return ret # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
elif option.startswith('--strip'):
if '=' in option:
# Assume --strip=N
try:
strip = int(option.rsplit('=', 1)[-1]) # depends on [control=['try'], data=[]]
except Exception:
ret['comment'] = "Invalid format for '-strip' CLI option. Consider using the 'strip' option for this state."
return ret # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['option']]
else:
# Assume --strip N and grab the next option in the list
try:
strip = int(options[index + 1]) # depends on [control=['try'], data=[]]
except Exception:
ret['comment'] = "Invalid format for '-strip' CLI option. Consider using the 'strip' option for this state."
return ret # depends on [control=['except'], data=[]]
else:
# We need to increment again because we grabbed the next
# option in the list.
index += 1 # depends on [control=['if'], data=[]]
else:
sanitized_options.append(option)
# Increment the index
index += 1 # depends on [control=['while'], data=['index']]
if blacklisted_options:
ret['comment'] = 'The following CLI options are not allowed: {0}'.format(', '.join(blacklisted_options))
return ret # depends on [control=['if'], data=[]]
options = sanitized_options
try:
source_match = __salt__['file.source_list'](source, source_hash, __env__)[0] # depends on [control=['try'], data=[]]
except CommandExecutionError as exc:
ret['result'] = False
ret['comment'] = exc.strerror
return ret # depends on [control=['except'], data=['exc']]
else:
# Passing the saltenv to file.managed to pull down the patch file is
# not supported, because the saltenv is already being passed via the
# state compiler and this would result in two values for that argument
# (and a traceback). Therefore, we will add the saltenv to the source
# URL to ensure we pull the file from the correct environment.
if saltenv is not None:
(source_match_url, source_match_saltenv) = salt.utils.url.parse(source_match)
if source_match_url.startswith('salt://'):
if source_match_saltenv is not None and source_match_saltenv != saltenv:
ret.setdefault('warnings', []).append("Ignoring 'saltenv' option in favor of saltenv included in the source URL.") # depends on [control=['if'], data=[]]
else:
source_match += '?saltenv={0}'.format(saltenv) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['saltenv']]
cleanup = []
try:
patch_file = salt.utils.files.mkstemp()
cleanup.append(patch_file)
try:
orig_test = __opts__['test']
__opts__['test'] = False
sys.modules[__salt__['test.ping'].__module__].__opts__['test'] = False
result = managed(patch_file, source=source_match, source_hash=source_hash, source_hash_name=source_hash_name, skip_verify=skip_verify, template=template, context=context, defaults=defaults) # depends on [control=['try'], data=[]]
except Exception as exc:
msg = 'Failed to cache patch file {0}: {1}'.format(salt.utils.url.redact_http_basic_auth(source_match), exc)
log.exception(msg)
ret['comment'] = msg
return ret # depends on [control=['except'], data=['exc']]
else:
log.debug('file.managed: %s', result)
finally:
__opts__['test'] = orig_test
sys.modules[__salt__['test.ping'].__module__].__opts__['test'] = orig_test
if not result['result']:
log.debug('failed to download %s', salt.utils.url.redact_http_basic_auth(source_match))
return result # depends on [control=['if'], data=[]]
def _patch(patch_file, options=None, dry_run=False):
patch_opts = copy.copy(sanitized_options)
if options is not None:
patch_opts.extend(options) # depends on [control=['if'], data=['options']]
return __salt__['file.patch'](name, patch_file, options=patch_opts, dry_run=dry_run)
if reject_file is not None:
patch_rejects = reject_file # depends on [control=['if'], data=['reject_file']]
else:
# No rejects file specified, create a temp file
patch_rejects = salt.utils.files.mkstemp()
cleanup.append(patch_rejects)
patch_output = salt.utils.files.mkstemp()
cleanup.append(patch_output)
# Older patch releases can only write patch output to regular files,
# meaning that /dev/null can't be relied on. Also, if we ever want this
# to work on Windows with patch.exe, /dev/null is a non-starter.
# Therefore, redirect all patch output to a temp file, which we will
# then remove.
patch_opts = ['-N', '-r', patch_rejects, '-o', patch_output]
if is_dir and strip is not None:
patch_opts.append('-p{0}'.format(strip)) # depends on [control=['if'], data=[]]
pre_check = _patch(patch_file, patch_opts)
if pre_check['retcode'] != 0:
# Try to reverse-apply hunks from rejects file using a dry-run.
# If this returns a retcode of 0, we know that the patch was
# already applied. Rejects are written from the base of the
# directory, so the strip option doesn't apply here.
reverse_pass = _patch(patch_rejects, ['-R', '-f'], dry_run=True)
already_applied = reverse_pass['retcode'] == 0
if already_applied:
ret['comment'] = 'Patch was already applied'
ret['result'] = True
return ret # depends on [control=['if'], data=[]]
else:
ret['comment'] = 'Patch would not apply cleanly, no changes made. Results of dry-run are below.'
if reject_file is None:
ret['comment'] += ' Run state again using the reject_file option to save rejects to a persistent file.' # depends on [control=['if'], data=[]]
opts = copy.copy(__opts__)
opts['color'] = False
ret['comment'] += '\n\n' + salt.output.out_format(pre_check, 'nested', opts, nested_indent=14)
return ret # depends on [control=['if'], data=[]]
if __opts__['test']:
ret['result'] = None
ret['comment'] = 'The patch would be applied'
ret['changes'] = pre_check
return ret # depends on [control=['if'], data=[]]
# If we've made it here, the patch should apply cleanly
patch_opts = []
if is_dir and strip is not None:
patch_opts.append('-p{0}'.format(strip)) # depends on [control=['if'], data=[]]
ret['changes'] = _patch(patch_file, patch_opts)
if ret['changes']['retcode'] == 0:
ret['comment'] = 'Patch successfully applied'
ret['result'] = True # depends on [control=['if'], data=[]]
else:
ret['comment'] = 'Failed to apply patch'
return ret # depends on [control=['try'], data=[]]
finally:
# Clean up any temp files
for path in cleanup:
try:
os.remove(path) # depends on [control=['try'], data=[]]
except OSError as exc:
if exc.errno != os.errno.ENOENT:
log.error('file.patch: Failed to remove temp file %s: %s', path, exc) # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['exc']] # depends on [control=['for'], data=['path']] |
def main(self):
"""
Run the required methods in the appropriate order
"""
self.targets()
self.bait(k=49)
self.reversebait(maskmiddle='t', k=19)
self.subsample_reads() | def function[main, parameter[self]]:
constant[
Run the required methods in the appropriate order
]
call[name[self].targets, parameter[]]
call[name[self].bait, parameter[]]
call[name[self].reversebait, parameter[]]
call[name[self].subsample_reads, parameter[]] | keyword[def] identifier[main] ( identifier[self] ):
literal[string]
identifier[self] . identifier[targets] ()
identifier[self] . identifier[bait] ( identifier[k] = literal[int] )
identifier[self] . identifier[reversebait] ( identifier[maskmiddle] = literal[string] , identifier[k] = literal[int] )
identifier[self] . identifier[subsample_reads] () | def main(self):
"""
Run the required methods in the appropriate order
"""
self.targets()
self.bait(k=49)
self.reversebait(maskmiddle='t', k=19)
self.subsample_reads() |
def strip_dbm_antsignal(self, idx):
"""strip(1 byte) radiotap.dbm.ant_signal
:idx: int
:return: int
idx
:return: int
"""
dbm_antsignal, = struct.unpack_from('<b', self._rtap, idx)
return idx + 1, dbm_antsignal | def function[strip_dbm_antsignal, parameter[self, idx]]:
constant[strip(1 byte) radiotap.dbm.ant_signal
:idx: int
:return: int
idx
:return: int
]
<ast.Tuple object at 0x7da1aff00370> assign[=] call[name[struct].unpack_from, parameter[constant[<b], name[self]._rtap, name[idx]]]
return[tuple[[<ast.BinOp object at 0x7da1aff01360>, <ast.Name object at 0x7da1aff01240>]]] | keyword[def] identifier[strip_dbm_antsignal] ( identifier[self] , identifier[idx] ):
literal[string]
identifier[dbm_antsignal] ,= identifier[struct] . identifier[unpack_from] ( literal[string] , identifier[self] . identifier[_rtap] , identifier[idx] )
keyword[return] identifier[idx] + literal[int] , identifier[dbm_antsignal] | def strip_dbm_antsignal(self, idx):
"""strip(1 byte) radiotap.dbm.ant_signal
:idx: int
:return: int
idx
:return: int
"""
(dbm_antsignal,) = struct.unpack_from('<b', self._rtap, idx)
return (idx + 1, dbm_antsignal) |
def start_tag(self, name, attrs=None):
"""Open an XML tag"""
if not attrs:
self._write('<%s>' % name)
else:
self._write('<' + name)
for (name, value) in sorted(attrs.items()):
self._write(
' %s=%s' % (name, quoteattr(scientificformat(value))))
self._write('>')
self.indentlevel += 1 | def function[start_tag, parameter[self, name, attrs]]:
constant[Open an XML tag]
if <ast.UnaryOp object at 0x7da204622c50> begin[:]
call[name[self]._write, parameter[binary_operation[constant[<%s>] <ast.Mod object at 0x7da2590d6920> name[name]]]]
<ast.AugAssign object at 0x7da204620430> | keyword[def] identifier[start_tag] ( identifier[self] , identifier[name] , identifier[attrs] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[attrs] :
identifier[self] . identifier[_write] ( literal[string] % identifier[name] )
keyword[else] :
identifier[self] . identifier[_write] ( literal[string] + identifier[name] )
keyword[for] ( identifier[name] , identifier[value] ) keyword[in] identifier[sorted] ( identifier[attrs] . identifier[items] ()):
identifier[self] . identifier[_write] (
literal[string] %( identifier[name] , identifier[quoteattr] ( identifier[scientificformat] ( identifier[value] ))))
identifier[self] . identifier[_write] ( literal[string] )
identifier[self] . identifier[indentlevel] += literal[int] | def start_tag(self, name, attrs=None):
"""Open an XML tag"""
if not attrs:
self._write('<%s>' % name) # depends on [control=['if'], data=[]]
else:
self._write('<' + name)
for (name, value) in sorted(attrs.items()):
self._write(' %s=%s' % (name, quoteattr(scientificformat(value)))) # depends on [control=['for'], data=[]]
self._write('>')
self.indentlevel += 1 |
def from_dict(data, require=None):
"""Validates a dictionary containing Google service account data.
Creates and returns a :class:`google.auth.crypt.Signer` instance from the
private key specified in the data.
Args:
data (Mapping[str, str]): The service account data
require (Sequence[str]): List of keys required to be present in the
info.
Returns:
google.auth.crypt.Signer: A signer created from the private key in the
service account file.
Raises:
ValueError: if the data was in the wrong format, or if one of the
required keys is missing.
"""
keys_needed = set(require if require is not None else [])
missing = keys_needed.difference(six.iterkeys(data))
if missing:
raise ValueError(
'Service account info was not in the expected format, missing '
'fields {}.'.format(', '.join(missing)))
# Create a signer.
signer = crypt.RSASigner.from_service_account_info(data)
return signer | def function[from_dict, parameter[data, require]]:
constant[Validates a dictionary containing Google service account data.
Creates and returns a :class:`google.auth.crypt.Signer` instance from the
private key specified in the data.
Args:
data (Mapping[str, str]): The service account data
require (Sequence[str]): List of keys required to be present in the
info.
Returns:
google.auth.crypt.Signer: A signer created from the private key in the
service account file.
Raises:
ValueError: if the data was in the wrong format, or if one of the
required keys is missing.
]
variable[keys_needed] assign[=] call[name[set], parameter[<ast.IfExp object at 0x7da2041dab30>]]
variable[missing] assign[=] call[name[keys_needed].difference, parameter[call[name[six].iterkeys, parameter[name[data]]]]]
if name[missing] begin[:]
<ast.Raise object at 0x7da2041daa70>
variable[signer] assign[=] call[name[crypt].RSASigner.from_service_account_info, parameter[name[data]]]
return[name[signer]] | keyword[def] identifier[from_dict] ( identifier[data] , identifier[require] = keyword[None] ):
literal[string]
identifier[keys_needed] = identifier[set] ( identifier[require] keyword[if] identifier[require] keyword[is] keyword[not] keyword[None] keyword[else] [])
identifier[missing] = identifier[keys_needed] . identifier[difference] ( identifier[six] . identifier[iterkeys] ( identifier[data] ))
keyword[if] identifier[missing] :
keyword[raise] identifier[ValueError] (
literal[string]
literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[missing] )))
identifier[signer] = identifier[crypt] . identifier[RSASigner] . identifier[from_service_account_info] ( identifier[data] )
keyword[return] identifier[signer] | def from_dict(data, require=None):
"""Validates a dictionary containing Google service account data.
Creates and returns a :class:`google.auth.crypt.Signer` instance from the
private key specified in the data.
Args:
data (Mapping[str, str]): The service account data
require (Sequence[str]): List of keys required to be present in the
info.
Returns:
google.auth.crypt.Signer: A signer created from the private key in the
service account file.
Raises:
ValueError: if the data was in the wrong format, or if one of the
required keys is missing.
"""
keys_needed = set(require if require is not None else [])
missing = keys_needed.difference(six.iterkeys(data))
if missing:
raise ValueError('Service account info was not in the expected format, missing fields {}.'.format(', '.join(missing))) # depends on [control=['if'], data=[]]
# Create a signer.
signer = crypt.RSASigner.from_service_account_info(data)
return signer |
def get_face_fun_on(self):
"""
determine extend fmt
"""
command = const.CMD_OPTIONS_RRQ
command_string = b'FaceFunOn\x00'
response_size = 1024
cmd_response = self.__send_command(command, command_string, response_size)
if cmd_response.get('status'):
response = (self.__data.split(b'=', 1)[-1].split(b'\x00')[0])
return safe_cast(response, int ,0) if response else 0
else:
self._clear_error(command_string)
return None | def function[get_face_fun_on, parameter[self]]:
constant[
determine extend fmt
]
variable[command] assign[=] name[const].CMD_OPTIONS_RRQ
variable[command_string] assign[=] constant[b'FaceFunOn\x00']
variable[response_size] assign[=] constant[1024]
variable[cmd_response] assign[=] call[name[self].__send_command, parameter[name[command], name[command_string], name[response_size]]]
if call[name[cmd_response].get, parameter[constant[status]]] begin[:]
variable[response] assign[=] call[call[call[call[name[self].__data.split, parameter[constant[b'='], constant[1]]]][<ast.UnaryOp object at 0x7da1b1ecd3f0>].split, parameter[constant[b'\x00']]]][constant[0]]
return[<ast.IfExp object at 0x7da1b1ecc250>] | keyword[def] identifier[get_face_fun_on] ( identifier[self] ):
literal[string]
identifier[command] = identifier[const] . identifier[CMD_OPTIONS_RRQ]
identifier[command_string] = literal[string]
identifier[response_size] = literal[int]
identifier[cmd_response] = identifier[self] . identifier[__send_command] ( identifier[command] , identifier[command_string] , identifier[response_size] )
keyword[if] identifier[cmd_response] . identifier[get] ( literal[string] ):
identifier[response] =( identifier[self] . identifier[__data] . identifier[split] ( literal[string] , literal[int] )[- literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ])
keyword[return] identifier[safe_cast] ( identifier[response] , identifier[int] , literal[int] ) keyword[if] identifier[response] keyword[else] literal[int]
keyword[else] :
identifier[self] . identifier[_clear_error] ( identifier[command_string] )
keyword[return] keyword[None] | def get_face_fun_on(self):
"""
determine extend fmt
"""
command = const.CMD_OPTIONS_RRQ
command_string = b'FaceFunOn\x00'
response_size = 1024
cmd_response = self.__send_command(command, command_string, response_size)
if cmd_response.get('status'):
response = self.__data.split(b'=', 1)[-1].split(b'\x00')[0]
return safe_cast(response, int, 0) if response else 0 # depends on [control=['if'], data=[]]
else:
self._clear_error(command_string)
return None |
def check_status(status):
""" Check the status of a mkl functions and raise a python exeption if
there is an error.
"""
if status:
msg = lib.DftiErrorMessage(status)
msg = ctypes.c_char_p(msg).value
raise RuntimeError(msg) | def function[check_status, parameter[status]]:
constant[ Check the status of a mkl functions and raise a python exeption if
there is an error.
]
if name[status] begin[:]
variable[msg] assign[=] call[name[lib].DftiErrorMessage, parameter[name[status]]]
variable[msg] assign[=] call[name[ctypes].c_char_p, parameter[name[msg]]].value
<ast.Raise object at 0x7da20c6c6b30> | keyword[def] identifier[check_status] ( identifier[status] ):
literal[string]
keyword[if] identifier[status] :
identifier[msg] = identifier[lib] . identifier[DftiErrorMessage] ( identifier[status] )
identifier[msg] = identifier[ctypes] . identifier[c_char_p] ( identifier[msg] ). identifier[value]
keyword[raise] identifier[RuntimeError] ( identifier[msg] ) | def check_status(status):
""" Check the status of a mkl functions and raise a python exeption if
there is an error.
"""
if status:
msg = lib.DftiErrorMessage(status)
msg = ctypes.c_char_p(msg).value
raise RuntimeError(msg) # depends on [control=['if'], data=[]] |
def run(
self,
host: Optional[str] = None,
port: Optional[int] = None,
debug: bool = False,
ssl: Union[dict, SSLContext, None] = None,
sock: Optional[socket] = None,
workers: int = 1,
protocol: Type[Protocol] = None,
backlog: int = 100,
stop_event: Any = None,
register_sys_signals: bool = True,
access_log: Optional[bool] = None,
**kwargs: Any
) -> None:
"""Run the HTTP Server and listen until keyboard interrupt or term
signal. On termination, drain connections before closing.
:param host: Address to host on
:type host: str
:param port: Port to host on
:type port: int
:param debug: Enables debug output (slows server)
:type debug: bool
:param ssl: SSLContext, or location of certificate and key
for SSL encryption of worker(s)
:type ssl:SSLContext or dict
:param sock: Socket for the server to accept connections from
:type sock: socket
:param workers: Number of processes received before it is respected
:type workers: int
:param protocol: Subclass of asyncio Protocol class
:type protocol: type[Protocol]
:param backlog: a number of unaccepted connections that the system
will allow before refusing new connections
:type backlog: int
:param stop_event: event to be triggered
before stopping the app - deprecated
:type stop_event: None
:param register_sys_signals: Register SIG* events
:type register_sys_signals: bool
:param access_log: Enables writing access logs (slows server)
:type access_log: bool
:return: Nothing
"""
if "loop" in kwargs:
raise TypeError(
"loop is not a valid argument. To use an existing loop, "
"change to create_server().\nSee more: "
"https://sanic.readthedocs.io/en/latest/sanic/deploying.html"
"#asynchronous-support"
)
# Default auto_reload to false
auto_reload = False
# If debug is set, default it to true (unless on windows)
if debug and os.name == "posix":
auto_reload = True
# Allow for overriding either of the defaults
auto_reload = kwargs.get("auto_reload", auto_reload)
if sock is None:
host, port = host or "127.0.0.1", port or 8000
if protocol is None:
protocol = (
WebSocketProtocol if self.websocket_enabled else HttpProtocol
)
if stop_event is not None:
if debug:
warnings.simplefilter("default")
warnings.warn(
"stop_event will be removed from future versions.",
DeprecationWarning,
)
# if access_log is passed explicitly change config.ACCESS_LOG
if access_log is not None:
self.config.ACCESS_LOG = access_log
server_settings = self._helper(
host=host,
port=port,
debug=debug,
ssl=ssl,
sock=sock,
workers=workers,
protocol=protocol,
backlog=backlog,
register_sys_signals=register_sys_signals,
auto_reload=auto_reload,
)
try:
self.is_running = True
if workers == 1:
if auto_reload and os.name != "posix":
# This condition must be removed after implementing
# auto reloader for other operating systems.
raise NotImplementedError
if (
auto_reload
and os.environ.get("SANIC_SERVER_RUNNING") != "true"
):
reloader_helpers.watchdog(2)
else:
serve(**server_settings)
else:
serve_multiple(server_settings, workers)
except BaseException:
error_logger.exception(
"Experienced exception while trying to serve"
)
raise
finally:
self.is_running = False
logger.info("Server Stopped") | def function[run, parameter[self, host, port, debug, ssl, sock, workers, protocol, backlog, stop_event, register_sys_signals, access_log]]:
constant[Run the HTTP Server and listen until keyboard interrupt or term
signal. On termination, drain connections before closing.
:param host: Address to host on
:type host: str
:param port: Port to host on
:type port: int
:param debug: Enables debug output (slows server)
:type debug: bool
:param ssl: SSLContext, or location of certificate and key
for SSL encryption of worker(s)
:type ssl:SSLContext or dict
:param sock: Socket for the server to accept connections from
:type sock: socket
:param workers: Number of processes received before it is respected
:type workers: int
:param protocol: Subclass of asyncio Protocol class
:type protocol: type[Protocol]
:param backlog: a number of unaccepted connections that the system
will allow before refusing new connections
:type backlog: int
:param stop_event: event to be triggered
before stopping the app - deprecated
:type stop_event: None
:param register_sys_signals: Register SIG* events
:type register_sys_signals: bool
:param access_log: Enables writing access logs (slows server)
:type access_log: bool
:return: Nothing
]
if compare[constant[loop] in name[kwargs]] begin[:]
<ast.Raise object at 0x7da1b1f467a0>
variable[auto_reload] assign[=] constant[False]
if <ast.BoolOp object at 0x7da1b1f457e0> begin[:]
variable[auto_reload] assign[=] constant[True]
variable[auto_reload] assign[=] call[name[kwargs].get, parameter[constant[auto_reload], name[auto_reload]]]
if compare[name[sock] is constant[None]] begin[:]
<ast.Tuple object at 0x7da1b1f45330> assign[=] tuple[[<ast.BoolOp object at 0x7da1b1f46050>, <ast.BoolOp object at 0x7da1b1f45600>]]
if compare[name[protocol] is constant[None]] begin[:]
variable[protocol] assign[=] <ast.IfExp object at 0x7da1b1f466b0>
if compare[name[stop_event] is_not constant[None]] begin[:]
if name[debug] begin[:]
call[name[warnings].simplefilter, parameter[constant[default]]]
call[name[warnings].warn, parameter[constant[stop_event will be removed from future versions.], name[DeprecationWarning]]]
if compare[name[access_log] is_not constant[None]] begin[:]
name[self].config.ACCESS_LOG assign[=] name[access_log]
variable[server_settings] assign[=] call[name[self]._helper, parameter[]]
<ast.Try object at 0x7da1b1fdfa00>
call[name[logger].info, parameter[constant[Server Stopped]]] | keyword[def] identifier[run] (
identifier[self] ,
identifier[host] : identifier[Optional] [ identifier[str] ]= keyword[None] ,
identifier[port] : identifier[Optional] [ identifier[int] ]= keyword[None] ,
identifier[debug] : identifier[bool] = keyword[False] ,
identifier[ssl] : identifier[Union] [ identifier[dict] , identifier[SSLContext] , keyword[None] ]= keyword[None] ,
identifier[sock] : identifier[Optional] [ identifier[socket] ]= keyword[None] ,
identifier[workers] : identifier[int] = literal[int] ,
identifier[protocol] : identifier[Type] [ identifier[Protocol] ]= keyword[None] ,
identifier[backlog] : identifier[int] = literal[int] ,
identifier[stop_event] : identifier[Any] = keyword[None] ,
identifier[register_sys_signals] : identifier[bool] = keyword[True] ,
identifier[access_log] : identifier[Optional] [ identifier[bool] ]= keyword[None] ,
** identifier[kwargs] : identifier[Any]
)-> keyword[None] :
literal[string]
keyword[if] literal[string] keyword[in] identifier[kwargs] :
keyword[raise] identifier[TypeError] (
literal[string]
literal[string]
literal[string]
literal[string]
)
identifier[auto_reload] = keyword[False]
keyword[if] identifier[debug] keyword[and] identifier[os] . identifier[name] == literal[string] :
identifier[auto_reload] = keyword[True]
identifier[auto_reload] = identifier[kwargs] . identifier[get] ( literal[string] , identifier[auto_reload] )
keyword[if] identifier[sock] keyword[is] keyword[None] :
identifier[host] , identifier[port] = identifier[host] keyword[or] literal[string] , identifier[port] keyword[or] literal[int]
keyword[if] identifier[protocol] keyword[is] keyword[None] :
identifier[protocol] =(
identifier[WebSocketProtocol] keyword[if] identifier[self] . identifier[websocket_enabled] keyword[else] identifier[HttpProtocol]
)
keyword[if] identifier[stop_event] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[debug] :
identifier[warnings] . identifier[simplefilter] ( literal[string] )
identifier[warnings] . identifier[warn] (
literal[string] ,
identifier[DeprecationWarning] ,
)
keyword[if] identifier[access_log] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[config] . identifier[ACCESS_LOG] = identifier[access_log]
identifier[server_settings] = identifier[self] . identifier[_helper] (
identifier[host] = identifier[host] ,
identifier[port] = identifier[port] ,
identifier[debug] = identifier[debug] ,
identifier[ssl] = identifier[ssl] ,
identifier[sock] = identifier[sock] ,
identifier[workers] = identifier[workers] ,
identifier[protocol] = identifier[protocol] ,
identifier[backlog] = identifier[backlog] ,
identifier[register_sys_signals] = identifier[register_sys_signals] ,
identifier[auto_reload] = identifier[auto_reload] ,
)
keyword[try] :
identifier[self] . identifier[is_running] = keyword[True]
keyword[if] identifier[workers] == literal[int] :
keyword[if] identifier[auto_reload] keyword[and] identifier[os] . identifier[name] != literal[string] :
keyword[raise] identifier[NotImplementedError]
keyword[if] (
identifier[auto_reload]
keyword[and] identifier[os] . identifier[environ] . identifier[get] ( literal[string] )!= literal[string]
):
identifier[reloader_helpers] . identifier[watchdog] ( literal[int] )
keyword[else] :
identifier[serve] (** identifier[server_settings] )
keyword[else] :
identifier[serve_multiple] ( identifier[server_settings] , identifier[workers] )
keyword[except] identifier[BaseException] :
identifier[error_logger] . identifier[exception] (
literal[string]
)
keyword[raise]
keyword[finally] :
identifier[self] . identifier[is_running] = keyword[False]
identifier[logger] . identifier[info] ( literal[string] ) | def run(self, host: Optional[str]=None, port: Optional[int]=None, debug: bool=False, ssl: Union[dict, SSLContext, None]=None, sock: Optional[socket]=None, workers: int=1, protocol: Type[Protocol]=None, backlog: int=100, stop_event: Any=None, register_sys_signals: bool=True, access_log: Optional[bool]=None, **kwargs: Any) -> None:
"""Run the HTTP Server and listen until keyboard interrupt or term
signal. On termination, drain connections before closing.
:param host: Address to host on
:type host: str
:param port: Port to host on
:type port: int
:param debug: Enables debug output (slows server)
:type debug: bool
:param ssl: SSLContext, or location of certificate and key
for SSL encryption of worker(s)
:type ssl:SSLContext or dict
:param sock: Socket for the server to accept connections from
:type sock: socket
:param workers: Number of processes received before it is respected
:type workers: int
:param protocol: Subclass of asyncio Protocol class
:type protocol: type[Protocol]
:param backlog: a number of unaccepted connections that the system
will allow before refusing new connections
:type backlog: int
:param stop_event: event to be triggered
before stopping the app - deprecated
:type stop_event: None
:param register_sys_signals: Register SIG* events
:type register_sys_signals: bool
:param access_log: Enables writing access logs (slows server)
:type access_log: bool
:return: Nothing
"""
if 'loop' in kwargs:
raise TypeError('loop is not a valid argument. To use an existing loop, change to create_server().\nSee more: https://sanic.readthedocs.io/en/latest/sanic/deploying.html#asynchronous-support') # depends on [control=['if'], data=[]]
# Default auto_reload to false
auto_reload = False
# If debug is set, default it to true (unless on windows)
if debug and os.name == 'posix':
auto_reload = True # depends on [control=['if'], data=[]]
# Allow for overriding either of the defaults
auto_reload = kwargs.get('auto_reload', auto_reload)
if sock is None:
(host, port) = (host or '127.0.0.1', port or 8000) # depends on [control=['if'], data=[]]
if protocol is None:
protocol = WebSocketProtocol if self.websocket_enabled else HttpProtocol # depends on [control=['if'], data=['protocol']]
if stop_event is not None:
if debug:
warnings.simplefilter('default') # depends on [control=['if'], data=[]]
warnings.warn('stop_event will be removed from future versions.', DeprecationWarning) # depends on [control=['if'], data=[]]
# if access_log is passed explicitly change config.ACCESS_LOG
if access_log is not None:
self.config.ACCESS_LOG = access_log # depends on [control=['if'], data=['access_log']]
server_settings = self._helper(host=host, port=port, debug=debug, ssl=ssl, sock=sock, workers=workers, protocol=protocol, backlog=backlog, register_sys_signals=register_sys_signals, auto_reload=auto_reload)
try:
self.is_running = True
if workers == 1:
if auto_reload and os.name != 'posix':
# This condition must be removed after implementing
# auto reloader for other operating systems.
raise NotImplementedError # depends on [control=['if'], data=[]]
if auto_reload and os.environ.get('SANIC_SERVER_RUNNING') != 'true':
reloader_helpers.watchdog(2) # depends on [control=['if'], data=[]]
else:
serve(**server_settings) # depends on [control=['if'], data=[]]
else:
serve_multiple(server_settings, workers) # depends on [control=['try'], data=[]]
except BaseException:
error_logger.exception('Experienced exception while trying to serve')
raise # depends on [control=['except'], data=[]]
finally:
self.is_running = False
logger.info('Server Stopped') |
def add_layer_from_yaml_file(self, filename):
"""This function implements loading a YAML file and populating a new
empty layer (Layer) with its contents
:param filename: The name of a file to read
:type filename: string
"""
if filename.endswith(('.yaml', '.yml')):
file_stream = None
try:
file_stream = open(os.path.abspath(filename), 'r')
stack = yaml.safe_load(file_stream)
self.add_layer()
for attribute in stack:
setattr(self, attribute, stack[attribute])
del stack
except (TypeError, IOError):
logging.info("Could not import stack from %s" % filename)
finally:
if file_stream:
file_stream.close() | def function[add_layer_from_yaml_file, parameter[self, filename]]:
constant[This function implements loading a YAML file and populating a new
empty layer (Layer) with its contents
:param filename: The name of a file to read
:type filename: string
]
if call[name[filename].endswith, parameter[tuple[[<ast.Constant object at 0x7da204345690>, <ast.Constant object at 0x7da204344eb0>]]]] begin[:]
variable[file_stream] assign[=] constant[None]
<ast.Try object at 0x7da204345300> | keyword[def] identifier[add_layer_from_yaml_file] ( identifier[self] , identifier[filename] ):
literal[string]
keyword[if] identifier[filename] . identifier[endswith] (( literal[string] , literal[string] )):
identifier[file_stream] = keyword[None]
keyword[try] :
identifier[file_stream] = identifier[open] ( identifier[os] . identifier[path] . identifier[abspath] ( identifier[filename] ), literal[string] )
identifier[stack] = identifier[yaml] . identifier[safe_load] ( identifier[file_stream] )
identifier[self] . identifier[add_layer] ()
keyword[for] identifier[attribute] keyword[in] identifier[stack] :
identifier[setattr] ( identifier[self] , identifier[attribute] , identifier[stack] [ identifier[attribute] ])
keyword[del] identifier[stack]
keyword[except] ( identifier[TypeError] , identifier[IOError] ):
identifier[logging] . identifier[info] ( literal[string] % identifier[filename] )
keyword[finally] :
keyword[if] identifier[file_stream] :
identifier[file_stream] . identifier[close] () | def add_layer_from_yaml_file(self, filename):
"""This function implements loading a YAML file and populating a new
empty layer (Layer) with its contents
:param filename: The name of a file to read
:type filename: string
"""
if filename.endswith(('.yaml', '.yml')):
file_stream = None
try:
file_stream = open(os.path.abspath(filename), 'r')
stack = yaml.safe_load(file_stream)
self.add_layer()
for attribute in stack:
setattr(self, attribute, stack[attribute]) # depends on [control=['for'], data=['attribute']]
del stack # depends on [control=['try'], data=[]]
except (TypeError, IOError):
logging.info('Could not import stack from %s' % filename) # depends on [control=['except'], data=[]]
finally:
if file_stream:
file_stream.close() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def config_replace(self, *args, **kwargs):
"""Replaces the existing config with a user-defined config.
Make sure to back up the config file first if neccessary, as this
operation can't be undone.
"""
return self._client.request('/config/replace', args,
decoder='json', **kwargs) | def function[config_replace, parameter[self]]:
constant[Replaces the existing config with a user-defined config.
Make sure to back up the config file first if neccessary, as this
operation can't be undone.
]
return[call[name[self]._client.request, parameter[constant[/config/replace], name[args]]]] | keyword[def] identifier[config_replace] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[self] . identifier[_client] . identifier[request] ( literal[string] , identifier[args] ,
identifier[decoder] = literal[string] ,** identifier[kwargs] ) | def config_replace(self, *args, **kwargs):
"""Replaces the existing config with a user-defined config.
Make sure to back up the config file first if neccessary, as this
operation can't be undone.
"""
return self._client.request('/config/replace', args, decoder='json', **kwargs) |
def set_prop(self, prop, value, ef=None):
"""
set attributes values
:param prop:
:param value:
:param ef:
:return:
"""
if ef:
# prop should be restricted to n_decoys, an int, the no. of decoys corresponding to a given FPF.
# value is restricted to the corresponding enrichment factor and should be a float
self.ef[prop] = value
else:
if prop == 'ensemble':
# value is a tuple of strings that gives the ensemble composition
self.ensemble = value
elif prop == 'auc':
# value is a float that gives the auc value
self.auc = value | def function[set_prop, parameter[self, prop, value, ef]]:
constant[
set attributes values
:param prop:
:param value:
:param ef:
:return:
]
if name[ef] begin[:]
call[name[self].ef][name[prop]] assign[=] name[value] | keyword[def] identifier[set_prop] ( identifier[self] , identifier[prop] , identifier[value] , identifier[ef] = keyword[None] ):
literal[string]
keyword[if] identifier[ef] :
identifier[self] . identifier[ef] [ identifier[prop] ]= identifier[value]
keyword[else] :
keyword[if] identifier[prop] == literal[string] :
identifier[self] . identifier[ensemble] = identifier[value]
keyword[elif] identifier[prop] == literal[string] :
identifier[self] . identifier[auc] = identifier[value] | def set_prop(self, prop, value, ef=None):
"""
set attributes values
:param prop:
:param value:
:param ef:
:return:
"""
if ef:
# prop should be restricted to n_decoys, an int, the no. of decoys corresponding to a given FPF.
# value is restricted to the corresponding enrichment factor and should be a float
self.ef[prop] = value # depends on [control=['if'], data=[]]
elif prop == 'ensemble':
# value is a tuple of strings that gives the ensemble composition
self.ensemble = value # depends on [control=['if'], data=[]]
elif prop == 'auc':
# value is a float that gives the auc value
self.auc = value # depends on [control=['if'], data=[]] |
def from_heatmaps(heatmaps, class_indices=None, nb_classes=None):
"""
Convert heatmaps to segmentation map.
Assumes that each class is represented as a single heatmap channel.
Parameters
----------
heatmaps : imgaug.HeatmapsOnImage
Heatmaps to convert.
class_indices : None or list of int, optional
List of class indices represented by each heatmap channel. See also the
secondary output of :func:`imgaug.SegmentationMapOnImage.to_heatmap`.
If this is provided, it must have the same length as the number of heatmap channels.
nb_classes : None or int, optional
Number of classes. Must be provided if class_indices is set.
Returns
-------
imgaug.SegmentationMapOnImage
Segmentation map derived from heatmaps.
"""
if class_indices is None:
return SegmentationMapOnImage(heatmaps.arr_0to1, shape=heatmaps.shape)
else:
ia.do_assert(nb_classes is not None)
ia.do_assert(min(class_indices) >= 0)
ia.do_assert(max(class_indices) < nb_classes)
ia.do_assert(len(class_indices) == heatmaps.arr_0to1.shape[2])
arr_0to1 = heatmaps.arr_0to1
arr_0to1_full = np.zeros((arr_0to1.shape[0], arr_0to1.shape[1], nb_classes), dtype=np.float32)
for heatmap_channel, mapped_channel in enumerate(class_indices):
arr_0to1_full[:, :, mapped_channel] = arr_0to1[:, :, heatmap_channel]
return SegmentationMapOnImage(arr_0to1_full, shape=heatmaps.shape) | def function[from_heatmaps, parameter[heatmaps, class_indices, nb_classes]]:
constant[
Convert heatmaps to segmentation map.
Assumes that each class is represented as a single heatmap channel.
Parameters
----------
heatmaps : imgaug.HeatmapsOnImage
Heatmaps to convert.
class_indices : None or list of int, optional
List of class indices represented by each heatmap channel. See also the
secondary output of :func:`imgaug.SegmentationMapOnImage.to_heatmap`.
If this is provided, it must have the same length as the number of heatmap channels.
nb_classes : None or int, optional
Number of classes. Must be provided if class_indices is set.
Returns
-------
imgaug.SegmentationMapOnImage
Segmentation map derived from heatmaps.
]
if compare[name[class_indices] is constant[None]] begin[:]
return[call[name[SegmentationMapOnImage], parameter[name[heatmaps].arr_0to1]]] | keyword[def] identifier[from_heatmaps] ( identifier[heatmaps] , identifier[class_indices] = keyword[None] , identifier[nb_classes] = keyword[None] ):
literal[string]
keyword[if] identifier[class_indices] keyword[is] keyword[None] :
keyword[return] identifier[SegmentationMapOnImage] ( identifier[heatmaps] . identifier[arr_0to1] , identifier[shape] = identifier[heatmaps] . identifier[shape] )
keyword[else] :
identifier[ia] . identifier[do_assert] ( identifier[nb_classes] keyword[is] keyword[not] keyword[None] )
identifier[ia] . identifier[do_assert] ( identifier[min] ( identifier[class_indices] )>= literal[int] )
identifier[ia] . identifier[do_assert] ( identifier[max] ( identifier[class_indices] )< identifier[nb_classes] )
identifier[ia] . identifier[do_assert] ( identifier[len] ( identifier[class_indices] )== identifier[heatmaps] . identifier[arr_0to1] . identifier[shape] [ literal[int] ])
identifier[arr_0to1] = identifier[heatmaps] . identifier[arr_0to1]
identifier[arr_0to1_full] = identifier[np] . identifier[zeros] (( identifier[arr_0to1] . identifier[shape] [ literal[int] ], identifier[arr_0to1] . identifier[shape] [ literal[int] ], identifier[nb_classes] ), identifier[dtype] = identifier[np] . identifier[float32] )
keyword[for] identifier[heatmap_channel] , identifier[mapped_channel] keyword[in] identifier[enumerate] ( identifier[class_indices] ):
identifier[arr_0to1_full] [:,:, identifier[mapped_channel] ]= identifier[arr_0to1] [:,:, identifier[heatmap_channel] ]
keyword[return] identifier[SegmentationMapOnImage] ( identifier[arr_0to1_full] , identifier[shape] = identifier[heatmaps] . identifier[shape] ) | def from_heatmaps(heatmaps, class_indices=None, nb_classes=None):
"""
Convert heatmaps to segmentation map.
Assumes that each class is represented as a single heatmap channel.
Parameters
----------
heatmaps : imgaug.HeatmapsOnImage
Heatmaps to convert.
class_indices : None or list of int, optional
List of class indices represented by each heatmap channel. See also the
secondary output of :func:`imgaug.SegmentationMapOnImage.to_heatmap`.
If this is provided, it must have the same length as the number of heatmap channels.
nb_classes : None or int, optional
Number of classes. Must be provided if class_indices is set.
Returns
-------
imgaug.SegmentationMapOnImage
Segmentation map derived from heatmaps.
"""
if class_indices is None:
return SegmentationMapOnImage(heatmaps.arr_0to1, shape=heatmaps.shape) # depends on [control=['if'], data=[]]
else:
ia.do_assert(nb_classes is not None)
ia.do_assert(min(class_indices) >= 0)
ia.do_assert(max(class_indices) < nb_classes)
ia.do_assert(len(class_indices) == heatmaps.arr_0to1.shape[2])
arr_0to1 = heatmaps.arr_0to1
arr_0to1_full = np.zeros((arr_0to1.shape[0], arr_0to1.shape[1], nb_classes), dtype=np.float32)
for (heatmap_channel, mapped_channel) in enumerate(class_indices):
arr_0to1_full[:, :, mapped_channel] = arr_0to1[:, :, heatmap_channel] # depends on [control=['for'], data=[]]
return SegmentationMapOnImage(arr_0to1_full, shape=heatmaps.shape) |
def list_difference(left, right):
"""
Take the not-in-place difference of two lists (left - right), similar to sets but preserving order.
"""
blocked = set(right)
difference = []
for item in left:
if item not in blocked:
blocked.add(item)
difference.append(item)
return difference | def function[list_difference, parameter[left, right]]:
constant[
Take the not-in-place difference of two lists (left - right), similar to sets but preserving order.
]
variable[blocked] assign[=] call[name[set], parameter[name[right]]]
variable[difference] assign[=] list[[]]
for taget[name[item]] in starred[name[left]] begin[:]
if compare[name[item] <ast.NotIn object at 0x7da2590d7190> name[blocked]] begin[:]
call[name[blocked].add, parameter[name[item]]]
call[name[difference].append, parameter[name[item]]]
return[name[difference]] | keyword[def] identifier[list_difference] ( identifier[left] , identifier[right] ):
literal[string]
identifier[blocked] = identifier[set] ( identifier[right] )
identifier[difference] =[]
keyword[for] identifier[item] keyword[in] identifier[left] :
keyword[if] identifier[item] keyword[not] keyword[in] identifier[blocked] :
identifier[blocked] . identifier[add] ( identifier[item] )
identifier[difference] . identifier[append] ( identifier[item] )
keyword[return] identifier[difference] | def list_difference(left, right):
"""
Take the not-in-place difference of two lists (left - right), similar to sets but preserving order.
"""
blocked = set(right)
difference = []
for item in left:
if item not in blocked:
blocked.add(item)
difference.append(item) # depends on [control=['if'], data=['item', 'blocked']] # depends on [control=['for'], data=['item']]
return difference |
def simxJointGetForce(clientID, jointHandle, operationMode):
'''
Please have a look at the function description/documentation in the V-REP user manual
'''
force = ct.c_float()
return c_GetJointForce(clientID, jointHandle, ct.byref(force), operationMode), force.value | def function[simxJointGetForce, parameter[clientID, jointHandle, operationMode]]:
constant[
Please have a look at the function description/documentation in the V-REP user manual
]
variable[force] assign[=] call[name[ct].c_float, parameter[]]
return[tuple[[<ast.Call object at 0x7da1b15d1b10>, <ast.Attribute object at 0x7da1b15d21a0>]]] | keyword[def] identifier[simxJointGetForce] ( identifier[clientID] , identifier[jointHandle] , identifier[operationMode] ):
literal[string]
identifier[force] = identifier[ct] . identifier[c_float] ()
keyword[return] identifier[c_GetJointForce] ( identifier[clientID] , identifier[jointHandle] , identifier[ct] . identifier[byref] ( identifier[force] ), identifier[operationMode] ), identifier[force] . identifier[value] | def simxJointGetForce(clientID, jointHandle, operationMode):
"""
Please have a look at the function description/documentation in the V-REP user manual
"""
force = ct.c_float()
return (c_GetJointForce(clientID, jointHandle, ct.byref(force), operationMode), force.value) |
def get_spec_list(image_specs, container_args):
""" Given a list of specs and a set of container args, return a tuple of
the final container argument list and the original list size """
spec_list = [spec.strip() for spec in image_specs.split('|')]
original_count = len(spec_list)
if 'count' in container_args:
if 'count_offset' in container_args:
spec_list = spec_list[container_args['count_offset']:]
spec_list = spec_list[:container_args['count']]
return spec_list, original_count | def function[get_spec_list, parameter[image_specs, container_args]]:
constant[ Given a list of specs and a set of container args, return a tuple of
the final container argument list and the original list size ]
variable[spec_list] assign[=] <ast.ListComp object at 0x7da20c6c5810>
variable[original_count] assign[=] call[name[len], parameter[name[spec_list]]]
if compare[constant[count] in name[container_args]] begin[:]
if compare[constant[count_offset] in name[container_args]] begin[:]
variable[spec_list] assign[=] call[name[spec_list]][<ast.Slice object at 0x7da20c6c4670>]
variable[spec_list] assign[=] call[name[spec_list]][<ast.Slice object at 0x7da20c6c4d60>]
return[tuple[[<ast.Name object at 0x7da20c6c6860>, <ast.Name object at 0x7da20c6c6410>]]] | keyword[def] identifier[get_spec_list] ( identifier[image_specs] , identifier[container_args] ):
literal[string]
identifier[spec_list] =[ identifier[spec] . identifier[strip] () keyword[for] identifier[spec] keyword[in] identifier[image_specs] . identifier[split] ( literal[string] )]
identifier[original_count] = identifier[len] ( identifier[spec_list] )
keyword[if] literal[string] keyword[in] identifier[container_args] :
keyword[if] literal[string] keyword[in] identifier[container_args] :
identifier[spec_list] = identifier[spec_list] [ identifier[container_args] [ literal[string] ]:]
identifier[spec_list] = identifier[spec_list] [: identifier[container_args] [ literal[string] ]]
keyword[return] identifier[spec_list] , identifier[original_count] | def get_spec_list(image_specs, container_args):
""" Given a list of specs and a set of container args, return a tuple of
the final container argument list and the original list size """
spec_list = [spec.strip() for spec in image_specs.split('|')]
original_count = len(spec_list)
if 'count' in container_args:
if 'count_offset' in container_args:
spec_list = spec_list[container_args['count_offset']:] # depends on [control=['if'], data=['container_args']]
spec_list = spec_list[:container_args['count']] # depends on [control=['if'], data=['container_args']]
return (spec_list, original_count) |
def inject(self, contents=None, **kwargs):
"""
Adds content data in this element. This will be used in the rendering of this element's childs.
Multiple injections on the same key will override the content (dict.update behavior).
"""
if contents and not isinstance(contents, dict):
raise WrongContentError(self, contents, "contents should be a dict")
self._stable = False
if not contents:
contents = {}
if kwargs:
contents.update(kwargs)
self.content_data.update(contents)
return self | def function[inject, parameter[self, contents]]:
constant[
Adds content data in this element. This will be used in the rendering of this element's childs.
Multiple injections on the same key will override the content (dict.update behavior).
]
if <ast.BoolOp object at 0x7da1b0e25cc0> begin[:]
<ast.Raise object at 0x7da1b0e263e0>
name[self]._stable assign[=] constant[False]
if <ast.UnaryOp object at 0x7da1b0e25b70> begin[:]
variable[contents] assign[=] dictionary[[], []]
if name[kwargs] begin[:]
call[name[contents].update, parameter[name[kwargs]]]
call[name[self].content_data.update, parameter[name[contents]]]
return[name[self]] | keyword[def] identifier[inject] ( identifier[self] , identifier[contents] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[contents] keyword[and] keyword[not] identifier[isinstance] ( identifier[contents] , identifier[dict] ):
keyword[raise] identifier[WrongContentError] ( identifier[self] , identifier[contents] , literal[string] )
identifier[self] . identifier[_stable] = keyword[False]
keyword[if] keyword[not] identifier[contents] :
identifier[contents] ={}
keyword[if] identifier[kwargs] :
identifier[contents] . identifier[update] ( identifier[kwargs] )
identifier[self] . identifier[content_data] . identifier[update] ( identifier[contents] )
keyword[return] identifier[self] | def inject(self, contents=None, **kwargs):
"""
Adds content data in this element. This will be used in the rendering of this element's childs.
Multiple injections on the same key will override the content (dict.update behavior).
"""
if contents and (not isinstance(contents, dict)):
raise WrongContentError(self, contents, 'contents should be a dict') # depends on [control=['if'], data=[]]
self._stable = False
if not contents:
contents = {} # depends on [control=['if'], data=[]]
if kwargs:
contents.update(kwargs) # depends on [control=['if'], data=[]]
self.content_data.update(contents)
return self |
def run(self, name, *args):
"""Run an action as specified by its name."""
assert isinstance(name, string_types)
# Resolve the alias if it is an alias.
name = self._aliases.get(name, name)
# Get the action.
action = self._actions_dict.get(name, None)
if not action:
raise ValueError("Action `{}` doesn't exist.".format(name))
if not name.startswith('_'):
logger.debug("Execute action `%s`.", name)
return action.callback(*args) | def function[run, parameter[self, name]]:
constant[Run an action as specified by its name.]
assert[call[name[isinstance], parameter[name[name], name[string_types]]]]
variable[name] assign[=] call[name[self]._aliases.get, parameter[name[name], name[name]]]
variable[action] assign[=] call[name[self]._actions_dict.get, parameter[name[name], constant[None]]]
if <ast.UnaryOp object at 0x7da18bc739a0> begin[:]
<ast.Raise object at 0x7da18bc71d50>
if <ast.UnaryOp object at 0x7da18bc730a0> begin[:]
call[name[logger].debug, parameter[constant[Execute action `%s`.], name[name]]]
return[call[name[action].callback, parameter[<ast.Starred object at 0x7da18bc72410>]]] | keyword[def] identifier[run] ( identifier[self] , identifier[name] ,* identifier[args] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[name] , identifier[string_types] )
identifier[name] = identifier[self] . identifier[_aliases] . identifier[get] ( identifier[name] , identifier[name] )
identifier[action] = identifier[self] . identifier[_actions_dict] . identifier[get] ( identifier[name] , keyword[None] )
keyword[if] keyword[not] identifier[action] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[name] ))
keyword[if] keyword[not] identifier[name] . identifier[startswith] ( literal[string] ):
identifier[logger] . identifier[debug] ( literal[string] , identifier[name] )
keyword[return] identifier[action] . identifier[callback] (* identifier[args] ) | def run(self, name, *args):
"""Run an action as specified by its name."""
assert isinstance(name, string_types)
# Resolve the alias if it is an alias.
name = self._aliases.get(name, name)
# Get the action.
action = self._actions_dict.get(name, None)
if not action:
raise ValueError("Action `{}` doesn't exist.".format(name)) # depends on [control=['if'], data=[]]
if not name.startswith('_'):
logger.debug('Execute action `%s`.', name) # depends on [control=['if'], data=[]]
return action.callback(*args) |
def get_full_description(self):
"""Generates the FULL description
Returns:
The FULL description
Raises:
FormatException: if formating fails and throw_exception_on_parse_error is True
"""
try:
time_segment = self.get_time_of_day_description()
day_of_month_desc = self.get_day_of_month_description()
month_desc = self.get_month_description()
day_of_week_desc = self.get_day_of_week_description()
year_desc = self.get_year_description()
description = "{0}{1}{2}{3}{4}".format(
time_segment,
day_of_month_desc,
day_of_week_desc,
month_desc,
year_desc)
description = self.transform_verbosity(
description, self._options.verbose)
description = self.transform_case(
description,
self._options.casing_type)
except Exception:
description = _(
"An error occured when generating the expression description. Check the cron expression syntax.")
if self._options.throw_exception_on_parse_error:
raise FormatException(description)
return description | def function[get_full_description, parameter[self]]:
constant[Generates the FULL description
Returns:
The FULL description
Raises:
FormatException: if formating fails and throw_exception_on_parse_error is True
]
<ast.Try object at 0x7da1b04059f0>
return[name[description]] | keyword[def] identifier[get_full_description] ( identifier[self] ):
literal[string]
keyword[try] :
identifier[time_segment] = identifier[self] . identifier[get_time_of_day_description] ()
identifier[day_of_month_desc] = identifier[self] . identifier[get_day_of_month_description] ()
identifier[month_desc] = identifier[self] . identifier[get_month_description] ()
identifier[day_of_week_desc] = identifier[self] . identifier[get_day_of_week_description] ()
identifier[year_desc] = identifier[self] . identifier[get_year_description] ()
identifier[description] = literal[string] . identifier[format] (
identifier[time_segment] ,
identifier[day_of_month_desc] ,
identifier[day_of_week_desc] ,
identifier[month_desc] ,
identifier[year_desc] )
identifier[description] = identifier[self] . identifier[transform_verbosity] (
identifier[description] , identifier[self] . identifier[_options] . identifier[verbose] )
identifier[description] = identifier[self] . identifier[transform_case] (
identifier[description] ,
identifier[self] . identifier[_options] . identifier[casing_type] )
keyword[except] identifier[Exception] :
identifier[description] = identifier[_] (
literal[string] )
keyword[if] identifier[self] . identifier[_options] . identifier[throw_exception_on_parse_error] :
keyword[raise] identifier[FormatException] ( identifier[description] )
keyword[return] identifier[description] | def get_full_description(self):
"""Generates the FULL description
Returns:
The FULL description
Raises:
FormatException: if formating fails and throw_exception_on_parse_error is True
"""
try:
time_segment = self.get_time_of_day_description()
day_of_month_desc = self.get_day_of_month_description()
month_desc = self.get_month_description()
day_of_week_desc = self.get_day_of_week_description()
year_desc = self.get_year_description()
description = '{0}{1}{2}{3}{4}'.format(time_segment, day_of_month_desc, day_of_week_desc, month_desc, year_desc)
description = self.transform_verbosity(description, self._options.verbose)
description = self.transform_case(description, self._options.casing_type) # depends on [control=['try'], data=[]]
except Exception:
description = _('An error occured when generating the expression description. Check the cron expression syntax.')
if self._options.throw_exception_on_parse_error:
raise FormatException(description) # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]]
return description |
def stop_request(self, stop_now=False):
"""Send a stop request to the daemon
:param stop_now: stop now or go to stop wait mode
:type stop_now: bool
:return: the daemon response (True)
"""
logger.debug("Sending stop request to %s, stop now: %s", self.name, stop_now)
res = self.con.get('stop_request', {'stop_now': '1' if stop_now else '0'})
return res | def function[stop_request, parameter[self, stop_now]]:
constant[Send a stop request to the daemon
:param stop_now: stop now or go to stop wait mode
:type stop_now: bool
:return: the daemon response (True)
]
call[name[logger].debug, parameter[constant[Sending stop request to %s, stop now: %s], name[self].name, name[stop_now]]]
variable[res] assign[=] call[name[self].con.get, parameter[constant[stop_request], dictionary[[<ast.Constant object at 0x7da18fe910f0>], [<ast.IfExp object at 0x7da18fe90cd0>]]]]
return[name[res]] | keyword[def] identifier[stop_request] ( identifier[self] , identifier[stop_now] = keyword[False] ):
literal[string]
identifier[logger] . identifier[debug] ( literal[string] , identifier[self] . identifier[name] , identifier[stop_now] )
identifier[res] = identifier[self] . identifier[con] . identifier[get] ( literal[string] ,{ literal[string] : literal[string] keyword[if] identifier[stop_now] keyword[else] literal[string] })
keyword[return] identifier[res] | def stop_request(self, stop_now=False):
"""Send a stop request to the daemon
:param stop_now: stop now or go to stop wait mode
:type stop_now: bool
:return: the daemon response (True)
"""
logger.debug('Sending stop request to %s, stop now: %s', self.name, stop_now)
res = self.con.get('stop_request', {'stop_now': '1' if stop_now else '0'})
return res |
def get_scipy_minimizer(**kwargs):
"""Get minimizer which uses `scipy.optimize.minimize`"""
def minimizer(objective, n_params):
params = [random.random() for _ in range(n_params)]
result = scipy_minimizer(objective, params, **kwargs)
return result.x
return minimizer | def function[get_scipy_minimizer, parameter[]]:
constant[Get minimizer which uses `scipy.optimize.minimize`]
def function[minimizer, parameter[objective, n_params]]:
variable[params] assign[=] <ast.ListComp object at 0x7da2044c0940>
variable[result] assign[=] call[name[scipy_minimizer], parameter[name[objective], name[params]]]
return[name[result].x]
return[name[minimizer]] | keyword[def] identifier[get_scipy_minimizer] (** identifier[kwargs] ):
literal[string]
keyword[def] identifier[minimizer] ( identifier[objective] , identifier[n_params] ):
identifier[params] =[ identifier[random] . identifier[random] () keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[n_params] )]
identifier[result] = identifier[scipy_minimizer] ( identifier[objective] , identifier[params] ,** identifier[kwargs] )
keyword[return] identifier[result] . identifier[x]
keyword[return] identifier[minimizer] | def get_scipy_minimizer(**kwargs):
"""Get minimizer which uses `scipy.optimize.minimize`"""
def minimizer(objective, n_params):
params = [random.random() for _ in range(n_params)]
result = scipy_minimizer(objective, params, **kwargs)
return result.x
return minimizer |
def _flatten_per_cluster(per_cluster):
"""Convert a dictionary {cluster: spikes} to a spikes array."""
return np.sort(np.concatenate(list(per_cluster.values()))).astype(np.int64) | def function[_flatten_per_cluster, parameter[per_cluster]]:
constant[Convert a dictionary {cluster: spikes} to a spikes array.]
return[call[call[name[np].sort, parameter[call[name[np].concatenate, parameter[call[name[list], parameter[call[name[per_cluster].values, parameter[]]]]]]]].astype, parameter[name[np].int64]]] | keyword[def] identifier[_flatten_per_cluster] ( identifier[per_cluster] ):
literal[string]
keyword[return] identifier[np] . identifier[sort] ( identifier[np] . identifier[concatenate] ( identifier[list] ( identifier[per_cluster] . identifier[values] ()))). identifier[astype] ( identifier[np] . identifier[int64] ) | def _flatten_per_cluster(per_cluster):
"""Convert a dictionary {cluster: spikes} to a spikes array."""
return np.sort(np.concatenate(list(per_cluster.values()))).astype(np.int64) |
def diff(self, other=diff.Diffable.Index, paths=None, create_patch=False, **kwargs):
"""Diff this index against the working copy or a Tree or Commit object
For a documentation of the parameters and return values, see
Diffable.diff
:note:
Will only work with indices that represent the default git index as
they have not been initialized with a stream.
"""
# index against index is always empty
if other is self.Index:
return diff.DiffIndex()
# index against anything but None is a reverse diff with the respective
# item. Handle existing -R flags properly. Transform strings to the object
# so that we can call diff on it
if isinstance(other, string_types):
other = self.repo.rev_parse(other)
# END object conversion
if isinstance(other, Object):
# invert the existing R flag
cur_val = kwargs.get('R', False)
kwargs['R'] = not cur_val
return other.diff(self.Index, paths, create_patch, **kwargs)
# END diff against other item handling
# if other is not None here, something is wrong
if other is not None:
raise ValueError("other must be None, Diffable.Index, a Tree or Commit, was %r" % other)
# diff against working copy - can be handled by superclass natively
return super(IndexFile, self).diff(other, paths, create_patch, **kwargs) | def function[diff, parameter[self, other, paths, create_patch]]:
constant[Diff this index against the working copy or a Tree or Commit object
For a documentation of the parameters and return values, see
Diffable.diff
:note:
Will only work with indices that represent the default git index as
they have not been initialized with a stream.
]
if compare[name[other] is name[self].Index] begin[:]
return[call[name[diff].DiffIndex, parameter[]]]
if call[name[isinstance], parameter[name[other], name[string_types]]] begin[:]
variable[other] assign[=] call[name[self].repo.rev_parse, parameter[name[other]]]
if call[name[isinstance], parameter[name[other], name[Object]]] begin[:]
variable[cur_val] assign[=] call[name[kwargs].get, parameter[constant[R], constant[False]]]
call[name[kwargs]][constant[R]] assign[=] <ast.UnaryOp object at 0x7da1b22704c0>
return[call[name[other].diff, parameter[name[self].Index, name[paths], name[create_patch]]]]
if compare[name[other] is_not constant[None]] begin[:]
<ast.Raise object at 0x7da1b2271720>
return[call[call[name[super], parameter[name[IndexFile], name[self]]].diff, parameter[name[other], name[paths], name[create_patch]]]] | keyword[def] identifier[diff] ( identifier[self] , identifier[other] = identifier[diff] . identifier[Diffable] . identifier[Index] , identifier[paths] = keyword[None] , identifier[create_patch] = keyword[False] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[other] keyword[is] identifier[self] . identifier[Index] :
keyword[return] identifier[diff] . identifier[DiffIndex] ()
keyword[if] identifier[isinstance] ( identifier[other] , identifier[string_types] ):
identifier[other] = identifier[self] . identifier[repo] . identifier[rev_parse] ( identifier[other] )
keyword[if] identifier[isinstance] ( identifier[other] , identifier[Object] ):
identifier[cur_val] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[False] )
identifier[kwargs] [ literal[string] ]= keyword[not] identifier[cur_val]
keyword[return] identifier[other] . identifier[diff] ( identifier[self] . identifier[Index] , identifier[paths] , identifier[create_patch] ,** identifier[kwargs] )
keyword[if] identifier[other] keyword[is] keyword[not] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[other] )
keyword[return] identifier[super] ( identifier[IndexFile] , identifier[self] ). identifier[diff] ( identifier[other] , identifier[paths] , identifier[create_patch] ,** identifier[kwargs] ) | def diff(self, other=diff.Diffable.Index, paths=None, create_patch=False, **kwargs):
"""Diff this index against the working copy or a Tree or Commit object
For a documentation of the parameters and return values, see
Diffable.diff
:note:
Will only work with indices that represent the default git index as
they have not been initialized with a stream.
"""
# index against index is always empty
if other is self.Index:
return diff.DiffIndex() # depends on [control=['if'], data=[]]
# index against anything but None is a reverse diff with the respective
# item. Handle existing -R flags properly. Transform strings to the object
# so that we can call diff on it
if isinstance(other, string_types):
other = self.repo.rev_parse(other) # depends on [control=['if'], data=[]]
# END object conversion
if isinstance(other, Object):
# invert the existing R flag
cur_val = kwargs.get('R', False)
kwargs['R'] = not cur_val
return other.diff(self.Index, paths, create_patch, **kwargs) # depends on [control=['if'], data=[]]
# END diff against other item handling
# if other is not None here, something is wrong
if other is not None:
raise ValueError('other must be None, Diffable.Index, a Tree or Commit, was %r' % other) # depends on [control=['if'], data=['other']]
# diff against working copy - can be handled by superclass natively
return super(IndexFile, self).diff(other, paths, create_patch, **kwargs) |
def write(self, *text, sep=' '):
"""
Write text to response
:param text:
:param sep:
:return:
"""
self.text += markdown.text(*text, sep)
return self | def function[write, parameter[self]]:
constant[
Write text to response
:param text:
:param sep:
:return:
]
<ast.AugAssign object at 0x7da1b18f9060>
return[name[self]] | keyword[def] identifier[write] ( identifier[self] ,* identifier[text] , identifier[sep] = literal[string] ):
literal[string]
identifier[self] . identifier[text] += identifier[markdown] . identifier[text] (* identifier[text] , identifier[sep] )
keyword[return] identifier[self] | def write(self, *text, sep=' '):
"""
Write text to response
:param text:
:param sep:
:return:
"""
self.text += markdown.text(*text, sep)
return self |
def reads(self):
'''
The reads in this PileupCollection. All reads will have an alignment
that overlaps at least one of the included loci.
Since SAM (and pysam) have no real notion of a "read", the returned
instances are actually pysam.AlignedSegment instances, (i.e.
alignments). However, only one alignment will be returned by this
method per read.
Returns
----------
List of pysam.AlignedSegment instances. If a particular read has more
than one alignment in this PileupCollection (e.g. one primary and one
secondary), then the alignment returned is the one with the highest
mapping quality.
'''
# TODO: Optimize this.
def alignment_precedence(pysam_alignment_record):
return pysam_alignment_record.mapping_quality
result = {}
for pileup in self.pileups.values():
for e in pileup.elements:
key = read_key(e.alignment)
if key not in result or (
alignment_precedence(e.alignment) >
alignment_precedence(result[key])):
result[key] = e.alignment
return list(result.values()) | def function[reads, parameter[self]]:
constant[
The reads in this PileupCollection. All reads will have an alignment
that overlaps at least one of the included loci.
Since SAM (and pysam) have no real notion of a "read", the returned
instances are actually pysam.AlignedSegment instances, (i.e.
alignments). However, only one alignment will be returned by this
method per read.
Returns
----------
List of pysam.AlignedSegment instances. If a particular read has more
than one alignment in this PileupCollection (e.g. one primary and one
secondary), then the alignment returned is the one with the highest
mapping quality.
]
def function[alignment_precedence, parameter[pysam_alignment_record]]:
return[name[pysam_alignment_record].mapping_quality]
variable[result] assign[=] dictionary[[], []]
for taget[name[pileup]] in starred[call[name[self].pileups.values, parameter[]]] begin[:]
for taget[name[e]] in starred[name[pileup].elements] begin[:]
variable[key] assign[=] call[name[read_key], parameter[name[e].alignment]]
if <ast.BoolOp object at 0x7da18f813670> begin[:]
call[name[result]][name[key]] assign[=] name[e].alignment
return[call[name[list], parameter[call[name[result].values, parameter[]]]]] | keyword[def] identifier[reads] ( identifier[self] ):
literal[string]
keyword[def] identifier[alignment_precedence] ( identifier[pysam_alignment_record] ):
keyword[return] identifier[pysam_alignment_record] . identifier[mapping_quality]
identifier[result] ={}
keyword[for] identifier[pileup] keyword[in] identifier[self] . identifier[pileups] . identifier[values] ():
keyword[for] identifier[e] keyword[in] identifier[pileup] . identifier[elements] :
identifier[key] = identifier[read_key] ( identifier[e] . identifier[alignment] )
keyword[if] identifier[key] keyword[not] keyword[in] identifier[result] keyword[or] (
identifier[alignment_precedence] ( identifier[e] . identifier[alignment] )>
identifier[alignment_precedence] ( identifier[result] [ identifier[key] ])):
identifier[result] [ identifier[key] ]= identifier[e] . identifier[alignment]
keyword[return] identifier[list] ( identifier[result] . identifier[values] ()) | def reads(self):
"""
The reads in this PileupCollection. All reads will have an alignment
that overlaps at least one of the included loci.
Since SAM (and pysam) have no real notion of a "read", the returned
instances are actually pysam.AlignedSegment instances, (i.e.
alignments). However, only one alignment will be returned by this
method per read.
Returns
----------
List of pysam.AlignedSegment instances. If a particular read has more
than one alignment in this PileupCollection (e.g. one primary and one
secondary), then the alignment returned is the one with the highest
mapping quality.
"""
# TODO: Optimize this.
def alignment_precedence(pysam_alignment_record):
return pysam_alignment_record.mapping_quality
result = {}
for pileup in self.pileups.values():
for e in pileup.elements:
key = read_key(e.alignment)
if key not in result or alignment_precedence(e.alignment) > alignment_precedence(result[key]):
result[key] = e.alignment # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['e']] # depends on [control=['for'], data=['pileup']]
return list(result.values()) |
def trigger(self, when=1):
"""Declare a window with this window's size and a trigger policy.
When the window is triggered is defined by `when`.
If `when` is an `int` then the window is triggered every
`when` tuples. For example, with ``when=5`` the window
will be triggered every five tuples.
If `when` is an `datetime.timedelta` then it is the period
of the trigger. With a `timedelta` representing one minute
then the window is triggered every minute.
By default, when `trigger` has not been called on a `Window`
it triggers for every tuple inserted into the window
(equivalent to ``when=1``).
Args:
when: The size of the window, either an `int` to define the
number of tuples or `datetime.timedelta` to define the
duration of the window.
Returns:
Window: Window that will be triggered.
.. warning:: A trigger is only supported for a sliding window
such as one created by :py:meth:`last`.
"""
tw = Window(self.stream, self._config['type'])
tw._config['evictPolicy'] = self._config['evictPolicy']
tw._config['evictConfig'] = self._config['evictConfig']
if self._config['evictPolicy'] == 'TIME':
tw._config['evictTimeUnit'] = 'MILLISECONDS'
if isinstance(when, datetime.timedelta):
tw._config['triggerPolicy'] = 'TIME'
tw._config['triggerConfig'] = int(when.total_seconds() * 1000.0)
tw._config['triggerTimeUnit'] = 'MILLISECONDS'
elif isinstance(when, int):
tw._config['triggerPolicy'] = 'COUNT'
tw._config['triggerConfig'] = when
else:
raise ValueError(when)
return tw | def function[trigger, parameter[self, when]]:
constant[Declare a window with this window's size and a trigger policy.
When the window is triggered is defined by `when`.
If `when` is an `int` then the window is triggered every
`when` tuples. For example, with ``when=5`` the window
will be triggered every five tuples.
If `when` is an `datetime.timedelta` then it is the period
of the trigger. With a `timedelta` representing one minute
then the window is triggered every minute.
By default, when `trigger` has not been called on a `Window`
it triggers for every tuple inserted into the window
(equivalent to ``when=1``).
Args:
when: The size of the window, either an `int` to define the
number of tuples or `datetime.timedelta` to define the
duration of the window.
Returns:
Window: Window that will be triggered.
.. warning:: A trigger is only supported for a sliding window
such as one created by :py:meth:`last`.
]
variable[tw] assign[=] call[name[Window], parameter[name[self].stream, call[name[self]._config][constant[type]]]]
call[name[tw]._config][constant[evictPolicy]] assign[=] call[name[self]._config][constant[evictPolicy]]
call[name[tw]._config][constant[evictConfig]] assign[=] call[name[self]._config][constant[evictConfig]]
if compare[call[name[self]._config][constant[evictPolicy]] equal[==] constant[TIME]] begin[:]
call[name[tw]._config][constant[evictTimeUnit]] assign[=] constant[MILLISECONDS]
if call[name[isinstance], parameter[name[when], name[datetime].timedelta]] begin[:]
call[name[tw]._config][constant[triggerPolicy]] assign[=] constant[TIME]
call[name[tw]._config][constant[triggerConfig]] assign[=] call[name[int], parameter[binary_operation[call[name[when].total_seconds, parameter[]] * constant[1000.0]]]]
call[name[tw]._config][constant[triggerTimeUnit]] assign[=] constant[MILLISECONDS]
return[name[tw]] | keyword[def] identifier[trigger] ( identifier[self] , identifier[when] = literal[int] ):
literal[string]
identifier[tw] = identifier[Window] ( identifier[self] . identifier[stream] , identifier[self] . identifier[_config] [ literal[string] ])
identifier[tw] . identifier[_config] [ literal[string] ]= identifier[self] . identifier[_config] [ literal[string] ]
identifier[tw] . identifier[_config] [ literal[string] ]= identifier[self] . identifier[_config] [ literal[string] ]
keyword[if] identifier[self] . identifier[_config] [ literal[string] ]== literal[string] :
identifier[tw] . identifier[_config] [ literal[string] ]= literal[string]
keyword[if] identifier[isinstance] ( identifier[when] , identifier[datetime] . identifier[timedelta] ):
identifier[tw] . identifier[_config] [ literal[string] ]= literal[string]
identifier[tw] . identifier[_config] [ literal[string] ]= identifier[int] ( identifier[when] . identifier[total_seconds] ()* literal[int] )
identifier[tw] . identifier[_config] [ literal[string] ]= literal[string]
keyword[elif] identifier[isinstance] ( identifier[when] , identifier[int] ):
identifier[tw] . identifier[_config] [ literal[string] ]= literal[string]
identifier[tw] . identifier[_config] [ literal[string] ]= identifier[when]
keyword[else] :
keyword[raise] identifier[ValueError] ( identifier[when] )
keyword[return] identifier[tw] | def trigger(self, when=1):
"""Declare a window with this window's size and a trigger policy.
When the window is triggered is defined by `when`.
If `when` is an `int` then the window is triggered every
`when` tuples. For example, with ``when=5`` the window
will be triggered every five tuples.
If `when` is an `datetime.timedelta` then it is the period
of the trigger. With a `timedelta` representing one minute
then the window is triggered every minute.
By default, when `trigger` has not been called on a `Window`
it triggers for every tuple inserted into the window
(equivalent to ``when=1``).
Args:
when: The size of the window, either an `int` to define the
number of tuples or `datetime.timedelta` to define the
duration of the window.
Returns:
Window: Window that will be triggered.
.. warning:: A trigger is only supported for a sliding window
such as one created by :py:meth:`last`.
"""
tw = Window(self.stream, self._config['type'])
tw._config['evictPolicy'] = self._config['evictPolicy']
tw._config['evictConfig'] = self._config['evictConfig']
if self._config['evictPolicy'] == 'TIME':
tw._config['evictTimeUnit'] = 'MILLISECONDS' # depends on [control=['if'], data=[]]
if isinstance(when, datetime.timedelta):
tw._config['triggerPolicy'] = 'TIME'
tw._config['triggerConfig'] = int(when.total_seconds() * 1000.0)
tw._config['triggerTimeUnit'] = 'MILLISECONDS' # depends on [control=['if'], data=[]]
elif isinstance(when, int):
tw._config['triggerPolicy'] = 'COUNT'
tw._config['triggerConfig'] = when # depends on [control=['if'], data=[]]
else:
raise ValueError(when)
return tw |
def _fetch_itemslist(self, item):
""" We define two collection:
- Number of work injuries ("Arbetsolycka")
- Number of workrelated diseases ("Arbetssjukdom")
Each contains four datasets:
- Per municipality and year
- Per county and year
- Per municipality and month
- Per municipality and year
"""
if item.is_root:
for c in ["Arbetsolycka", "Arbetssjukdom"]:
yield Collection(c, blob=(c, None, None))
else:
c = item.id
for r in [u"kommun", u"län"]:
for p in [u"år", u"månad"]:
yield Dataset(u"%s-%s-%s" % (c, r, p),
blob=(c, r, p),
label=u"%s, antal per %s och %s" % (c, r, p)) | def function[_fetch_itemslist, parameter[self, item]]:
constant[ We define two collection:
- Number of work injuries ("Arbetsolycka")
- Number of workrelated diseases ("Arbetssjukdom")
Each contains four datasets:
- Per municipality and year
- Per county and year
- Per municipality and month
- Per municipality and year
]
if name[item].is_root begin[:]
for taget[name[c]] in starred[list[[<ast.Constant object at 0x7da2054a4b80>, <ast.Constant object at 0x7da2054a7dc0>]]] begin[:]
<ast.Yield object at 0x7da2054a5390> | keyword[def] identifier[_fetch_itemslist] ( identifier[self] , identifier[item] ):
literal[string]
keyword[if] identifier[item] . identifier[is_root] :
keyword[for] identifier[c] keyword[in] [ literal[string] , literal[string] ]:
keyword[yield] identifier[Collection] ( identifier[c] , identifier[blob] =( identifier[c] , keyword[None] , keyword[None] ))
keyword[else] :
identifier[c] = identifier[item] . identifier[id]
keyword[for] identifier[r] keyword[in] [ literal[string] , literal[string] ]:
keyword[for] identifier[p] keyword[in] [ literal[string] , literal[string] ]:
keyword[yield] identifier[Dataset] ( literal[string] %( identifier[c] , identifier[r] , identifier[p] ),
identifier[blob] =( identifier[c] , identifier[r] , identifier[p] ),
identifier[label] = literal[string] %( identifier[c] , identifier[r] , identifier[p] )) | def _fetch_itemslist(self, item):
""" We define two collection:
- Number of work injuries ("Arbetsolycka")
- Number of workrelated diseases ("Arbetssjukdom")
Each contains four datasets:
- Per municipality and year
- Per county and year
- Per municipality and month
- Per municipality and year
"""
if item.is_root:
for c in ['Arbetsolycka', 'Arbetssjukdom']:
yield Collection(c, blob=(c, None, None)) # depends on [control=['for'], data=['c']] # depends on [control=['if'], data=[]]
else:
c = item.id
for r in [u'kommun', u'län']:
for p in [u'år', u'månad']:
yield Dataset(u'%s-%s-%s' % (c, r, p), blob=(c, r, p), label=u'%s, antal per %s och %s' % (c, r, p)) # depends on [control=['for'], data=['p']] # depends on [control=['for'], data=['r']] |
def rmdir(self, req, parent, name):
"""Remove a directory
Valid replies:
reply_err
"""
self.reply_err(req, errno.EROFS) | def function[rmdir, parameter[self, req, parent, name]]:
constant[Remove a directory
Valid replies:
reply_err
]
call[name[self].reply_err, parameter[name[req], name[errno].EROFS]] | keyword[def] identifier[rmdir] ( identifier[self] , identifier[req] , identifier[parent] , identifier[name] ):
literal[string]
identifier[self] . identifier[reply_err] ( identifier[req] , identifier[errno] . identifier[EROFS] ) | def rmdir(self, req, parent, name):
"""Remove a directory
Valid replies:
reply_err
"""
self.reply_err(req, errno.EROFS) |
def vblk_erase(self, address):
"""nvm_vblk erase"""
cmd = ["nvm_vblk erase", self.envs, "0x%x" % address]
status, _, _ = cij.ssh.command(cmd, shell=True)
return status | def function[vblk_erase, parameter[self, address]]:
constant[nvm_vblk erase]
variable[cmd] assign[=] list[[<ast.Constant object at 0x7da18f09e200>, <ast.Attribute object at 0x7da18f09f820>, <ast.BinOp object at 0x7da18f09e1a0>]]
<ast.Tuple object at 0x7da18f09f580> assign[=] call[name[cij].ssh.command, parameter[name[cmd]]]
return[name[status]] | keyword[def] identifier[vblk_erase] ( identifier[self] , identifier[address] ):
literal[string]
identifier[cmd] =[ literal[string] , identifier[self] . identifier[envs] , literal[string] % identifier[address] ]
identifier[status] , identifier[_] , identifier[_] = identifier[cij] . identifier[ssh] . identifier[command] ( identifier[cmd] , identifier[shell] = keyword[True] )
keyword[return] identifier[status] | def vblk_erase(self, address):
"""nvm_vblk erase"""
cmd = ['nvm_vblk erase', self.envs, '0x%x' % address]
(status, _, _) = cij.ssh.command(cmd, shell=True)
return status |
def show(uuid):
'''
Show manifest of a given image
uuid : string
uuid of image
CLI Example:
.. code-block:: bash
salt '*' imgadm.show e42f8c84-bbea-11e2-b920-078fab2aab1f
salt '*' imgadm.show plexinc/pms-docker:plexpass
'''
ret = {}
if _is_uuid(uuid) or _is_docker_uuid(uuid):
cmd = 'imgadm show {0}'.format(uuid)
res = __salt__['cmd.run_all'](cmd, python_shell=False)
retcode = res['retcode']
if retcode != 0:
ret['Error'] = _exit_status(retcode, res['stderr'])
else:
ret = salt.utils.json.loads(res['stdout'])
else:
ret['Error'] = "{} is not a valid uuid.".format(uuid)
return ret | def function[show, parameter[uuid]]:
constant[
Show manifest of a given image
uuid : string
uuid of image
CLI Example:
.. code-block:: bash
salt '*' imgadm.show e42f8c84-bbea-11e2-b920-078fab2aab1f
salt '*' imgadm.show plexinc/pms-docker:plexpass
]
variable[ret] assign[=] dictionary[[], []]
if <ast.BoolOp object at 0x7da1b1c654e0> begin[:]
variable[cmd] assign[=] call[constant[imgadm show {0}].format, parameter[name[uuid]]]
variable[res] assign[=] call[call[name[__salt__]][constant[cmd.run_all]], parameter[name[cmd]]]
variable[retcode] assign[=] call[name[res]][constant[retcode]]
if compare[name[retcode] not_equal[!=] constant[0]] begin[:]
call[name[ret]][constant[Error]] assign[=] call[name[_exit_status], parameter[name[retcode], call[name[res]][constant[stderr]]]]
return[name[ret]] | keyword[def] identifier[show] ( identifier[uuid] ):
literal[string]
identifier[ret] ={}
keyword[if] identifier[_is_uuid] ( identifier[uuid] ) keyword[or] identifier[_is_docker_uuid] ( identifier[uuid] ):
identifier[cmd] = literal[string] . identifier[format] ( identifier[uuid] )
identifier[res] = identifier[__salt__] [ literal[string] ]( identifier[cmd] , identifier[python_shell] = keyword[False] )
identifier[retcode] = identifier[res] [ literal[string] ]
keyword[if] identifier[retcode] != literal[int] :
identifier[ret] [ literal[string] ]= identifier[_exit_status] ( identifier[retcode] , identifier[res] [ literal[string] ])
keyword[else] :
identifier[ret] = identifier[salt] . identifier[utils] . identifier[json] . identifier[loads] ( identifier[res] [ literal[string] ])
keyword[else] :
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[uuid] )
keyword[return] identifier[ret] | def show(uuid):
"""
Show manifest of a given image
uuid : string
uuid of image
CLI Example:
.. code-block:: bash
salt '*' imgadm.show e42f8c84-bbea-11e2-b920-078fab2aab1f
salt '*' imgadm.show plexinc/pms-docker:plexpass
"""
ret = {}
if _is_uuid(uuid) or _is_docker_uuid(uuid):
cmd = 'imgadm show {0}'.format(uuid)
res = __salt__['cmd.run_all'](cmd, python_shell=False)
retcode = res['retcode']
if retcode != 0:
ret['Error'] = _exit_status(retcode, res['stderr']) # depends on [control=['if'], data=['retcode']]
else:
ret = salt.utils.json.loads(res['stdout']) # depends on [control=['if'], data=[]]
else:
ret['Error'] = '{} is not a valid uuid.'.format(uuid)
return ret |
def apply(
self,
func,
axis=0,
broadcast=None,
raw=False,
reduce=None,
result_type=None,
convert_dtype=True,
args=(),
**kwds
):
"""Apply a function along input axis of DataFrame.
Args:
func: The function to apply
axis: The axis over which to apply the func.
broadcast: Whether or not to broadcast.
raw: Whether or not to convert to a Series.
reduce: Whether or not to try to apply reduction procedures.
Returns:
Series or DataFrame, depending on func.
"""
axis = self._get_axis_number(axis)
ErrorMessage.non_verified_udf()
if isinstance(func, string_types):
if axis == 1:
kwds["axis"] = axis
result = self._string_function(func, *args, **kwds)
# Sometimes we can return a scalar here
if isinstance(result, BasePandasDataset):
return result._query_compiler
return result
elif isinstance(func, dict):
if axis == 1:
raise TypeError(
"(\"'dict' object is not callable\", "
"'occurred at index {0}'".format(self.index[0])
)
if len(self.columns) != len(set(self.columns)):
warnings.warn(
"duplicate column names not supported with apply().",
FutureWarning,
stacklevel=2,
)
elif not callable(func) and not is_list_like(func):
raise TypeError("{} object is not callable".format(type(func)))
query_compiler = self._query_compiler.apply(func, axis, *args, **kwds)
return query_compiler | def function[apply, parameter[self, func, axis, broadcast, raw, reduce, result_type, convert_dtype, args]]:
constant[Apply a function along input axis of DataFrame.
Args:
func: The function to apply
axis: The axis over which to apply the func.
broadcast: Whether or not to broadcast.
raw: Whether or not to convert to a Series.
reduce: Whether or not to try to apply reduction procedures.
Returns:
Series or DataFrame, depending on func.
]
variable[axis] assign[=] call[name[self]._get_axis_number, parameter[name[axis]]]
call[name[ErrorMessage].non_verified_udf, parameter[]]
if call[name[isinstance], parameter[name[func], name[string_types]]] begin[:]
if compare[name[axis] equal[==] constant[1]] begin[:]
call[name[kwds]][constant[axis]] assign[=] name[axis]
variable[result] assign[=] call[name[self]._string_function, parameter[name[func], <ast.Starred object at 0x7da18f722e00>]]
if call[name[isinstance], parameter[name[result], name[BasePandasDataset]]] begin[:]
return[name[result]._query_compiler]
return[name[result]]
variable[query_compiler] assign[=] call[name[self]._query_compiler.apply, parameter[name[func], name[axis], <ast.Starred object at 0x7da20c76d330>]]
return[name[query_compiler]] | keyword[def] identifier[apply] (
identifier[self] ,
identifier[func] ,
identifier[axis] = literal[int] ,
identifier[broadcast] = keyword[None] ,
identifier[raw] = keyword[False] ,
identifier[reduce] = keyword[None] ,
identifier[result_type] = keyword[None] ,
identifier[convert_dtype] = keyword[True] ,
identifier[args] =(),
** identifier[kwds]
):
literal[string]
identifier[axis] = identifier[self] . identifier[_get_axis_number] ( identifier[axis] )
identifier[ErrorMessage] . identifier[non_verified_udf] ()
keyword[if] identifier[isinstance] ( identifier[func] , identifier[string_types] ):
keyword[if] identifier[axis] == literal[int] :
identifier[kwds] [ literal[string] ]= identifier[axis]
identifier[result] = identifier[self] . identifier[_string_function] ( identifier[func] ,* identifier[args] ,** identifier[kwds] )
keyword[if] identifier[isinstance] ( identifier[result] , identifier[BasePandasDataset] ):
keyword[return] identifier[result] . identifier[_query_compiler]
keyword[return] identifier[result]
keyword[elif] identifier[isinstance] ( identifier[func] , identifier[dict] ):
keyword[if] identifier[axis] == literal[int] :
keyword[raise] identifier[TypeError] (
literal[string]
literal[string] . identifier[format] ( identifier[self] . identifier[index] [ literal[int] ])
)
keyword[if] identifier[len] ( identifier[self] . identifier[columns] )!= identifier[len] ( identifier[set] ( identifier[self] . identifier[columns] )):
identifier[warnings] . identifier[warn] (
literal[string] ,
identifier[FutureWarning] ,
identifier[stacklevel] = literal[int] ,
)
keyword[elif] keyword[not] identifier[callable] ( identifier[func] ) keyword[and] keyword[not] identifier[is_list_like] ( identifier[func] ):
keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] ( identifier[type] ( identifier[func] )))
identifier[query_compiler] = identifier[self] . identifier[_query_compiler] . identifier[apply] ( identifier[func] , identifier[axis] ,* identifier[args] ,** identifier[kwds] )
keyword[return] identifier[query_compiler] | def apply(self, func, axis=0, broadcast=None, raw=False, reduce=None, result_type=None, convert_dtype=True, args=(), **kwds):
"""Apply a function along input axis of DataFrame.
Args:
func: The function to apply
axis: The axis over which to apply the func.
broadcast: Whether or not to broadcast.
raw: Whether or not to convert to a Series.
reduce: Whether or not to try to apply reduction procedures.
Returns:
Series or DataFrame, depending on func.
"""
axis = self._get_axis_number(axis)
ErrorMessage.non_verified_udf()
if isinstance(func, string_types):
if axis == 1:
kwds['axis'] = axis # depends on [control=['if'], data=['axis']]
result = self._string_function(func, *args, **kwds) # Sometimes we can return a scalar here
if isinstance(result, BasePandasDataset):
return result._query_compiler # depends on [control=['if'], data=[]]
return result # depends on [control=['if'], data=[]]
elif isinstance(func, dict):
if axis == 1:
raise TypeError('("\'dict\' object is not callable", \'occurred at index {0}\''.format(self.index[0])) # depends on [control=['if'], data=[]]
if len(self.columns) != len(set(self.columns)):
warnings.warn('duplicate column names not supported with apply().', FutureWarning, stacklevel=2) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif not callable(func) and (not is_list_like(func)):
raise TypeError('{} object is not callable'.format(type(func))) # depends on [control=['if'], data=[]]
query_compiler = self._query_compiler.apply(func, axis, *args, **kwds)
return query_compiler |
def createProduct(self, powerups):
"""
Create a new L{Product} instance which confers the given
powerups.
@type powerups: C{list} of powerup item types
@rtype: L{Product}
@return: The new product instance.
"""
types = [qual(powerup).decode('ascii')
for powerup in powerups]
for p in self.store.parent.query(Product):
for t in types:
if t in p.types:
raise ValueError("%s is already included in a Product" % (t,))
return Product(store=self.store.parent,
types=types) | def function[createProduct, parameter[self, powerups]]:
constant[
Create a new L{Product} instance which confers the given
powerups.
@type powerups: C{list} of powerup item types
@rtype: L{Product}
@return: The new product instance.
]
variable[types] assign[=] <ast.ListComp object at 0x7da1b0a6e080>
for taget[name[p]] in starred[call[name[self].store.parent.query, parameter[name[Product]]]] begin[:]
for taget[name[t]] in starred[name[types]] begin[:]
if compare[name[t] in name[p].types] begin[:]
<ast.Raise object at 0x7da1b0bd9f00>
return[call[name[Product], parameter[]]] | keyword[def] identifier[createProduct] ( identifier[self] , identifier[powerups] ):
literal[string]
identifier[types] =[ identifier[qual] ( identifier[powerup] ). identifier[decode] ( literal[string] )
keyword[for] identifier[powerup] keyword[in] identifier[powerups] ]
keyword[for] identifier[p] keyword[in] identifier[self] . identifier[store] . identifier[parent] . identifier[query] ( identifier[Product] ):
keyword[for] identifier[t] keyword[in] identifier[types] :
keyword[if] identifier[t] keyword[in] identifier[p] . identifier[types] :
keyword[raise] identifier[ValueError] ( literal[string] %( identifier[t] ,))
keyword[return] identifier[Product] ( identifier[store] = identifier[self] . identifier[store] . identifier[parent] ,
identifier[types] = identifier[types] ) | def createProduct(self, powerups):
"""
Create a new L{Product} instance which confers the given
powerups.
@type powerups: C{list} of powerup item types
@rtype: L{Product}
@return: The new product instance.
"""
types = [qual(powerup).decode('ascii') for powerup in powerups]
for p in self.store.parent.query(Product):
for t in types:
if t in p.types:
raise ValueError('%s is already included in a Product' % (t,)) # depends on [control=['if'], data=['t']] # depends on [control=['for'], data=['t']] # depends on [control=['for'], data=['p']]
return Product(store=self.store.parent, types=types) |
def add_output(self, address, value, unit='satoshi'):
"""
Add an output (a person who will receive funds via this tx).
If no unit is specified, satoshi is implied.
"""
value_satoshi = self.from_unit_to_satoshi(value, unit)
if self.verbose:
print("Adding output of: %s satoshi (%.8f)" % (
value_satoshi, (value_satoshi / 1e8)
))
self.outs.append({
'address': address,
'value': value_satoshi
}) | def function[add_output, parameter[self, address, value, unit]]:
constant[
Add an output (a person who will receive funds via this tx).
If no unit is specified, satoshi is implied.
]
variable[value_satoshi] assign[=] call[name[self].from_unit_to_satoshi, parameter[name[value], name[unit]]]
if name[self].verbose begin[:]
call[name[print], parameter[binary_operation[constant[Adding output of: %s satoshi (%.8f)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1018190>, <ast.BinOp object at 0x7da1b1018100>]]]]]
call[name[self].outs.append, parameter[dictionary[[<ast.Constant object at 0x7da1b1017430>, <ast.Constant object at 0x7da1b1017760>], [<ast.Name object at 0x7da1b10176a0>, <ast.Name object at 0x7da1b10170a0>]]]] | keyword[def] identifier[add_output] ( identifier[self] , identifier[address] , identifier[value] , identifier[unit] = literal[string] ):
literal[string]
identifier[value_satoshi] = identifier[self] . identifier[from_unit_to_satoshi] ( identifier[value] , identifier[unit] )
keyword[if] identifier[self] . identifier[verbose] :
identifier[print] ( literal[string] %(
identifier[value_satoshi] ,( identifier[value_satoshi] / literal[int] )
))
identifier[self] . identifier[outs] . identifier[append] ({
literal[string] : identifier[address] ,
literal[string] : identifier[value_satoshi]
}) | def add_output(self, address, value, unit='satoshi'):
"""
Add an output (a person who will receive funds via this tx).
If no unit is specified, satoshi is implied.
"""
value_satoshi = self.from_unit_to_satoshi(value, unit)
if self.verbose:
print('Adding output of: %s satoshi (%.8f)' % (value_satoshi, value_satoshi / 100000000.0)) # depends on [control=['if'], data=[]]
self.outs.append({'address': address, 'value': value_satoshi}) |
def plot(self, fig=None, plot_trap=False, name=False, trap_color='g',
trap_kwargs=None, **kwargs):
"""
Makes a simple plot of signal
:param fig: (optional)
Argument for :func:`plotutils.setfig`.
:param plot_trap: (optional)
Whether to plot the (best-fit least-sq) trapezoid fit.
:param name: (optional)
Whether to annotate plot with the name of the signal;
can be ``True`` (in which case ``self.name`` will be
used), or any arbitrary string.
:param trap_color: (optional)
Color of trapezoid fit line.
:param trap_kwargs: (optional)
Keyword arguments to pass to trapezoid fit line.
:param **kwargs: (optional)
Additional keyword arguments passed to ``plt.plot``.
"""
setfig(fig)
plt.plot(self.ts,self.fs,'.',**kwargs)
if plot_trap and hasattr(self,'trapfit'):
if trap_kwargs is None:
trap_kwargs = {}
plt.plot(self.ts, traptransit(self.ts,self.trapfit),
color=trap_color, **trap_kwargs)
if name is not None:
if type(name)==type(''):
text = name
else:
text = self.name
plt.annotate(text,xy=(0.1,0.1),xycoords='axes fraction',fontsize=22)
if hasattr(self,'depthfit') and not np.isnan(self.depthfit[0]):
lo = 1 - 3*self.depthfit[0]
hi = 1 + 2*self.depthfit[0]
else:
lo = 1
hi = 1
sig = qstd(self.fs,0.005)
hi = max(hi,self.fs.mean() + 7*sig)
lo = min(lo,self.fs.mean() - 7*sig)
logging.debug('lo={}, hi={}'.format(lo,hi))
plt.ylim((lo,hi))
plt.xlabel('time [days]')
plt.ylabel('Relative flux') | def function[plot, parameter[self, fig, plot_trap, name, trap_color, trap_kwargs]]:
constant[
Makes a simple plot of signal
:param fig: (optional)
Argument for :func:`plotutils.setfig`.
:param plot_trap: (optional)
Whether to plot the (best-fit least-sq) trapezoid fit.
:param name: (optional)
Whether to annotate plot with the name of the signal;
can be ``True`` (in which case ``self.name`` will be
used), or any arbitrary string.
:param trap_color: (optional)
Color of trapezoid fit line.
:param trap_kwargs: (optional)
Keyword arguments to pass to trapezoid fit line.
:param **kwargs: (optional)
Additional keyword arguments passed to ``plt.plot``.
]
call[name[setfig], parameter[name[fig]]]
call[name[plt].plot, parameter[name[self].ts, name[self].fs, constant[.]]]
if <ast.BoolOp object at 0x7da18ede67a0> begin[:]
if compare[name[trap_kwargs] is constant[None]] begin[:]
variable[trap_kwargs] assign[=] dictionary[[], []]
call[name[plt].plot, parameter[name[self].ts, call[name[traptransit], parameter[name[self].ts, name[self].trapfit]]]]
if compare[name[name] is_not constant[None]] begin[:]
if compare[call[name[type], parameter[name[name]]] equal[==] call[name[type], parameter[constant[]]]] begin[:]
variable[text] assign[=] name[name]
call[name[plt].annotate, parameter[name[text]]]
if <ast.BoolOp object at 0x7da18ede7a60> begin[:]
variable[lo] assign[=] binary_operation[constant[1] - binary_operation[constant[3] * call[name[self].depthfit][constant[0]]]]
variable[hi] assign[=] binary_operation[constant[1] + binary_operation[constant[2] * call[name[self].depthfit][constant[0]]]]
variable[sig] assign[=] call[name[qstd], parameter[name[self].fs, constant[0.005]]]
variable[hi] assign[=] call[name[max], parameter[name[hi], binary_operation[call[name[self].fs.mean, parameter[]] + binary_operation[constant[7] * name[sig]]]]]
variable[lo] assign[=] call[name[min], parameter[name[lo], binary_operation[call[name[self].fs.mean, parameter[]] - binary_operation[constant[7] * name[sig]]]]]
call[name[logging].debug, parameter[call[constant[lo={}, hi={}].format, parameter[name[lo], name[hi]]]]]
call[name[plt].ylim, parameter[tuple[[<ast.Name object at 0x7da1b28d4d00>, <ast.Name object at 0x7da1b28d6140>]]]]
call[name[plt].xlabel, parameter[constant[time [days]]]]
call[name[plt].ylabel, parameter[constant[Relative flux]]] | keyword[def] identifier[plot] ( identifier[self] , identifier[fig] = keyword[None] , identifier[plot_trap] = keyword[False] , identifier[name] = keyword[False] , identifier[trap_color] = literal[string] ,
identifier[trap_kwargs] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[setfig] ( identifier[fig] )
identifier[plt] . identifier[plot] ( identifier[self] . identifier[ts] , identifier[self] . identifier[fs] , literal[string] ,** identifier[kwargs] )
keyword[if] identifier[plot_trap] keyword[and] identifier[hasattr] ( identifier[self] , literal[string] ):
keyword[if] identifier[trap_kwargs] keyword[is] keyword[None] :
identifier[trap_kwargs] ={}
identifier[plt] . identifier[plot] ( identifier[self] . identifier[ts] , identifier[traptransit] ( identifier[self] . identifier[ts] , identifier[self] . identifier[trapfit] ),
identifier[color] = identifier[trap_color] ,** identifier[trap_kwargs] )
keyword[if] identifier[name] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[type] ( identifier[name] )== identifier[type] ( literal[string] ):
identifier[text] = identifier[name]
keyword[else] :
identifier[text] = identifier[self] . identifier[name]
identifier[plt] . identifier[annotate] ( identifier[text] , identifier[xy] =( literal[int] , literal[int] ), identifier[xycoords] = literal[string] , identifier[fontsize] = literal[int] )
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] keyword[not] identifier[np] . identifier[isnan] ( identifier[self] . identifier[depthfit] [ literal[int] ]):
identifier[lo] = literal[int] - literal[int] * identifier[self] . identifier[depthfit] [ literal[int] ]
identifier[hi] = literal[int] + literal[int] * identifier[self] . identifier[depthfit] [ literal[int] ]
keyword[else] :
identifier[lo] = literal[int]
identifier[hi] = literal[int]
identifier[sig] = identifier[qstd] ( identifier[self] . identifier[fs] , literal[int] )
identifier[hi] = identifier[max] ( identifier[hi] , identifier[self] . identifier[fs] . identifier[mean] ()+ literal[int] * identifier[sig] )
identifier[lo] = identifier[min] ( identifier[lo] , identifier[self] . identifier[fs] . identifier[mean] ()- literal[int] * identifier[sig] )
identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[lo] , identifier[hi] ))
identifier[plt] . identifier[ylim] (( identifier[lo] , identifier[hi] ))
identifier[plt] . identifier[xlabel] ( literal[string] )
identifier[plt] . identifier[ylabel] ( literal[string] ) | def plot(self, fig=None, plot_trap=False, name=False, trap_color='g', trap_kwargs=None, **kwargs):
"""
Makes a simple plot of signal
:param fig: (optional)
Argument for :func:`plotutils.setfig`.
:param plot_trap: (optional)
Whether to plot the (best-fit least-sq) trapezoid fit.
:param name: (optional)
Whether to annotate plot with the name of the signal;
can be ``True`` (in which case ``self.name`` will be
used), or any arbitrary string.
:param trap_color: (optional)
Color of trapezoid fit line.
:param trap_kwargs: (optional)
Keyword arguments to pass to trapezoid fit line.
:param **kwargs: (optional)
Additional keyword arguments passed to ``plt.plot``.
"""
setfig(fig)
plt.plot(self.ts, self.fs, '.', **kwargs)
if plot_trap and hasattr(self, 'trapfit'):
if trap_kwargs is None:
trap_kwargs = {} # depends on [control=['if'], data=['trap_kwargs']]
plt.plot(self.ts, traptransit(self.ts, self.trapfit), color=trap_color, **trap_kwargs) # depends on [control=['if'], data=[]]
if name is not None:
if type(name) == type(''):
text = name # depends on [control=['if'], data=[]]
else:
text = self.name
plt.annotate(text, xy=(0.1, 0.1), xycoords='axes fraction', fontsize=22) # depends on [control=['if'], data=['name']]
if hasattr(self, 'depthfit') and (not np.isnan(self.depthfit[0])):
lo = 1 - 3 * self.depthfit[0]
hi = 1 + 2 * self.depthfit[0] # depends on [control=['if'], data=[]]
else:
lo = 1
hi = 1
sig = qstd(self.fs, 0.005)
hi = max(hi, self.fs.mean() + 7 * sig)
lo = min(lo, self.fs.mean() - 7 * sig)
logging.debug('lo={}, hi={}'.format(lo, hi))
plt.ylim((lo, hi))
plt.xlabel('time [days]')
plt.ylabel('Relative flux') |
def _cleanup(self):
'''
Cleanup all the local data.
'''
self._select_cb = None
self._commit_cb = None
self._rollback_cb = None
super(TransactionClass, self)._cleanup() | def function[_cleanup, parameter[self]]:
constant[
Cleanup all the local data.
]
name[self]._select_cb assign[=] constant[None]
name[self]._commit_cb assign[=] constant[None]
name[self]._rollback_cb assign[=] constant[None]
call[call[name[super], parameter[name[TransactionClass], name[self]]]._cleanup, parameter[]] | keyword[def] identifier[_cleanup] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_select_cb] = keyword[None]
identifier[self] . identifier[_commit_cb] = keyword[None]
identifier[self] . identifier[_rollback_cb] = keyword[None]
identifier[super] ( identifier[TransactionClass] , identifier[self] ). identifier[_cleanup] () | def _cleanup(self):
"""
Cleanup all the local data.
"""
self._select_cb = None
self._commit_cb = None
self._rollback_cb = None
super(TransactionClass, self)._cleanup() |
def resample_factor(self, factor):
"""Resample to a new regular grid.
Parameters
----------
factor : float
The number of grid cells are scaled with `factor` in each
dimension, i.e., ``factor * N_i`` cells along each
dimension i.
Returns
-------
Grid
See Also
--------
resample
"""
# new number of edges N' = (N-1)*f + 1
newlengths = [(N - 1) * float(factor) + 1 for N in self._len_edges()]
edges = [numpy.linspace(start, stop, num=int(N), endpoint=True)
for (start, stop, N) in
zip(self._min_edges(), self._max_edges(), newlengths)]
return self.resample(edges) | def function[resample_factor, parameter[self, factor]]:
constant[Resample to a new regular grid.
Parameters
----------
factor : float
The number of grid cells are scaled with `factor` in each
dimension, i.e., ``factor * N_i`` cells along each
dimension i.
Returns
-------
Grid
See Also
--------
resample
]
variable[newlengths] assign[=] <ast.ListComp object at 0x7da1afe72c80>
variable[edges] assign[=] <ast.ListComp object at 0x7da1afe729e0>
return[call[name[self].resample, parameter[name[edges]]]] | keyword[def] identifier[resample_factor] ( identifier[self] , identifier[factor] ):
literal[string]
identifier[newlengths] =[( identifier[N] - literal[int] )* identifier[float] ( identifier[factor] )+ literal[int] keyword[for] identifier[N] keyword[in] identifier[self] . identifier[_len_edges] ()]
identifier[edges] =[ identifier[numpy] . identifier[linspace] ( identifier[start] , identifier[stop] , identifier[num] = identifier[int] ( identifier[N] ), identifier[endpoint] = keyword[True] )
keyword[for] ( identifier[start] , identifier[stop] , identifier[N] ) keyword[in]
identifier[zip] ( identifier[self] . identifier[_min_edges] (), identifier[self] . identifier[_max_edges] (), identifier[newlengths] )]
keyword[return] identifier[self] . identifier[resample] ( identifier[edges] ) | def resample_factor(self, factor):
"""Resample to a new regular grid.
Parameters
----------
factor : float
The number of grid cells are scaled with `factor` in each
dimension, i.e., ``factor * N_i`` cells along each
dimension i.
Returns
-------
Grid
See Also
--------
resample
"""
# new number of edges N' = (N-1)*f + 1
newlengths = [(N - 1) * float(factor) + 1 for N in self._len_edges()]
edges = [numpy.linspace(start, stop, num=int(N), endpoint=True) for (start, stop, N) in zip(self._min_edges(), self._max_edges(), newlengths)]
return self.resample(edges) |
def compile(source, name):
""" Compile the string source code into a shared object linked against
the static version of cufft for callback support.
"""
cache = os.path.join(pycbc._cache_dir_path, name)
hash_file = cache + ".hash"
lib_file = cache + ".so"
obj_file = cache + ".o"
try:
if int(open(hash_file, "r").read()) == hash(source):
return lib_file
raise ValueError
except:
pass
src_file = cache + ".cu"
fsrc = open(src_file, "w")
fsrc.write(source)
fsrc.close()
cmd = ["nvcc", "-ccbin", "g++", "-dc", "-m64",
"--compiler-options", "'-fPIC'",
"-o", obj_file,
"-c", src_file]
print(" ".join(cmd))
subprocess.check_call(cmd)
cmd = ["nvcc", "-shared", "-ccbin", "g++", "-m64",
"-o", lib_file, obj_file, "-lcufft_static", "-lculibos"]
print(" ".join(cmd))
subprocess.check_call(cmd)
hash_file = cache + ".hash"
fhash = open(hash_file, "w")
fhash.write(str(hash(source)))
return lib_file | def function[compile, parameter[source, name]]:
constant[ Compile the string source code into a shared object linked against
the static version of cufft for callback support.
]
variable[cache] assign[=] call[name[os].path.join, parameter[name[pycbc]._cache_dir_path, name[name]]]
variable[hash_file] assign[=] binary_operation[name[cache] + constant[.hash]]
variable[lib_file] assign[=] binary_operation[name[cache] + constant[.so]]
variable[obj_file] assign[=] binary_operation[name[cache] + constant[.o]]
<ast.Try object at 0x7da20c6a86a0>
variable[src_file] assign[=] binary_operation[name[cache] + constant[.cu]]
variable[fsrc] assign[=] call[name[open], parameter[name[src_file], constant[w]]]
call[name[fsrc].write, parameter[name[source]]]
call[name[fsrc].close, parameter[]]
variable[cmd] assign[=] list[[<ast.Constant object at 0x7da18bcc99c0>, <ast.Constant object at 0x7da18bcc93f0>, <ast.Constant object at 0x7da18bcc9930>, <ast.Constant object at 0x7da18bccb2b0>, <ast.Constant object at 0x7da18bccb670>, <ast.Constant object at 0x7da18bccbe80>, <ast.Constant object at 0x7da18bcc8f10>, <ast.Constant object at 0x7da18bccbf40>, <ast.Name object at 0x7da18bcc8ee0>, <ast.Constant object at 0x7da18bcca530>, <ast.Name object at 0x7da18bcc9240>]]
call[name[print], parameter[call[constant[ ].join, parameter[name[cmd]]]]]
call[name[subprocess].check_call, parameter[name[cmd]]]
variable[cmd] assign[=] list[[<ast.Constant object at 0x7da20e9b27d0>, <ast.Constant object at 0x7da20e9b0370>, <ast.Constant object at 0x7da20e9b32e0>, <ast.Constant object at 0x7da20e9b16c0>, <ast.Constant object at 0x7da20e9b1330>, <ast.Constant object at 0x7da20e9b0520>, <ast.Name object at 0x7da20e9b13c0>, <ast.Name object at 0x7da20e9b1cf0>, <ast.Constant object at 0x7da20e9b04f0>, <ast.Constant object at 0x7da20e9b2b30>]]
call[name[print], parameter[call[constant[ ].join, parameter[name[cmd]]]]]
call[name[subprocess].check_call, parameter[name[cmd]]]
variable[hash_file] assign[=] binary_operation[name[cache] + constant[.hash]]
variable[fhash] assign[=] call[name[open], parameter[name[hash_file], constant[w]]]
call[name[fhash].write, parameter[call[name[str], parameter[call[name[hash], parameter[name[source]]]]]]]
return[name[lib_file]] | keyword[def] identifier[compile] ( identifier[source] , identifier[name] ):
literal[string]
identifier[cache] = identifier[os] . identifier[path] . identifier[join] ( identifier[pycbc] . identifier[_cache_dir_path] , identifier[name] )
identifier[hash_file] = identifier[cache] + literal[string]
identifier[lib_file] = identifier[cache] + literal[string]
identifier[obj_file] = identifier[cache] + literal[string]
keyword[try] :
keyword[if] identifier[int] ( identifier[open] ( identifier[hash_file] , literal[string] ). identifier[read] ())== identifier[hash] ( identifier[source] ):
keyword[return] identifier[lib_file]
keyword[raise] identifier[ValueError]
keyword[except] :
keyword[pass]
identifier[src_file] = identifier[cache] + literal[string]
identifier[fsrc] = identifier[open] ( identifier[src_file] , literal[string] )
identifier[fsrc] . identifier[write] ( identifier[source] )
identifier[fsrc] . identifier[close] ()
identifier[cmd] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] ,
literal[string] , identifier[obj_file] ,
literal[string] , identifier[src_file] ]
identifier[print] ( literal[string] . identifier[join] ( identifier[cmd] ))
identifier[subprocess] . identifier[check_call] ( identifier[cmd] )
identifier[cmd] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , identifier[lib_file] , identifier[obj_file] , literal[string] , literal[string] ]
identifier[print] ( literal[string] . identifier[join] ( identifier[cmd] ))
identifier[subprocess] . identifier[check_call] ( identifier[cmd] )
identifier[hash_file] = identifier[cache] + literal[string]
identifier[fhash] = identifier[open] ( identifier[hash_file] , literal[string] )
identifier[fhash] . identifier[write] ( identifier[str] ( identifier[hash] ( identifier[source] )))
keyword[return] identifier[lib_file] | def compile(source, name):
""" Compile the string source code into a shared object linked against
the static version of cufft for callback support.
"""
cache = os.path.join(pycbc._cache_dir_path, name)
hash_file = cache + '.hash'
lib_file = cache + '.so'
obj_file = cache + '.o'
try:
if int(open(hash_file, 'r').read()) == hash(source):
return lib_file # depends on [control=['if'], data=[]]
raise ValueError # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]]
src_file = cache + '.cu'
fsrc = open(src_file, 'w')
fsrc.write(source)
fsrc.close()
cmd = ['nvcc', '-ccbin', 'g++', '-dc', '-m64', '--compiler-options', "'-fPIC'", '-o', obj_file, '-c', src_file]
print(' '.join(cmd))
subprocess.check_call(cmd)
cmd = ['nvcc', '-shared', '-ccbin', 'g++', '-m64', '-o', lib_file, obj_file, '-lcufft_static', '-lculibos']
print(' '.join(cmd))
subprocess.check_call(cmd)
hash_file = cache + '.hash'
fhash = open(hash_file, 'w')
fhash.write(str(hash(source)))
return lib_file |
def _initParams(self):
"""
initialize paramters to vector of zeros
"""
params = SP.zeros(self.getNumberParams())
self.setParams(params) | def function[_initParams, parameter[self]]:
constant[
initialize paramters to vector of zeros
]
variable[params] assign[=] call[name[SP].zeros, parameter[call[name[self].getNumberParams, parameter[]]]]
call[name[self].setParams, parameter[name[params]]] | keyword[def] identifier[_initParams] ( identifier[self] ):
literal[string]
identifier[params] = identifier[SP] . identifier[zeros] ( identifier[self] . identifier[getNumberParams] ())
identifier[self] . identifier[setParams] ( identifier[params] ) | def _initParams(self):
"""
initialize paramters to vector of zeros
"""
params = SP.zeros(self.getNumberParams())
self.setParams(params) |
def _crop_pad_default(x, size, padding_mode='reflection', row_pct:uniform = 0.5, col_pct:uniform = 0.5):
"Crop and pad tfm - `row_pct`,`col_pct` sets focal point."
padding_mode = _pad_mode_convert[padding_mode]
size = tis2hw(size)
if x.shape[1:] == torch.Size(size): return x
rows,cols = size
row_pct,col_pct = _minus_epsilon(row_pct,col_pct)
if x.size(1)<rows or x.size(2)<cols:
row_pad = max((rows-x.size(1)+1)//2, 0)
col_pad = max((cols-x.size(2)+1)//2, 0)
x = F.pad(x[None], (col_pad,col_pad,row_pad,row_pad), mode=padding_mode)[0]
row = int((x.size(1)-rows+1)*row_pct)
col = int((x.size(2)-cols+1)*col_pct)
x = x[:, row:row+rows, col:col+cols]
return x.contiguous() | def function[_crop_pad_default, parameter[x, size, padding_mode, row_pct, col_pct]]:
constant[Crop and pad tfm - `row_pct`,`col_pct` sets focal point.]
variable[padding_mode] assign[=] call[name[_pad_mode_convert]][name[padding_mode]]
variable[size] assign[=] call[name[tis2hw], parameter[name[size]]]
if compare[call[name[x].shape][<ast.Slice object at 0x7da20e9b20e0>] equal[==] call[name[torch].Size, parameter[name[size]]]] begin[:]
return[name[x]]
<ast.Tuple object at 0x7da20e9b15a0> assign[=] name[size]
<ast.Tuple object at 0x7da20e9b19f0> assign[=] call[name[_minus_epsilon], parameter[name[row_pct], name[col_pct]]]
if <ast.BoolOp object at 0x7da20e9b2020> begin[:]
variable[row_pad] assign[=] call[name[max], parameter[binary_operation[binary_operation[binary_operation[name[rows] - call[name[x].size, parameter[constant[1]]]] + constant[1]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[2]], constant[0]]]
variable[col_pad] assign[=] call[name[max], parameter[binary_operation[binary_operation[binary_operation[name[cols] - call[name[x].size, parameter[constant[2]]]] + constant[1]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[2]], constant[0]]]
variable[x] assign[=] call[call[name[F].pad, parameter[call[name[x]][constant[None]], tuple[[<ast.Name object at 0x7da20e9b0b50>, <ast.Name object at 0x7da20e9b16c0>, <ast.Name object at 0x7da20e9b1480>, <ast.Name object at 0x7da20e9b1810>]]]]][constant[0]]
variable[row] assign[=] call[name[int], parameter[binary_operation[binary_operation[binary_operation[call[name[x].size, parameter[constant[1]]] - name[rows]] + constant[1]] * name[row_pct]]]]
variable[col] assign[=] call[name[int], parameter[binary_operation[binary_operation[binary_operation[call[name[x].size, parameter[constant[2]]] - name[cols]] + constant[1]] * name[col_pct]]]]
variable[x] assign[=] call[name[x]][tuple[[<ast.Slice object at 0x7da1b1e9ada0>, <ast.Slice object at 0x7da1b1e99ed0>, <ast.Slice object at 0x7da1b1e98b20>]]]
return[call[name[x].contiguous, parameter[]]] | keyword[def] identifier[_crop_pad_default] ( identifier[x] , identifier[size] , identifier[padding_mode] = literal[string] , identifier[row_pct] : identifier[uniform] = literal[int] , identifier[col_pct] : identifier[uniform] = literal[int] ):
literal[string]
identifier[padding_mode] = identifier[_pad_mode_convert] [ identifier[padding_mode] ]
identifier[size] = identifier[tis2hw] ( identifier[size] )
keyword[if] identifier[x] . identifier[shape] [ literal[int] :]== identifier[torch] . identifier[Size] ( identifier[size] ): keyword[return] identifier[x]
identifier[rows] , identifier[cols] = identifier[size]
identifier[row_pct] , identifier[col_pct] = identifier[_minus_epsilon] ( identifier[row_pct] , identifier[col_pct] )
keyword[if] identifier[x] . identifier[size] ( literal[int] )< identifier[rows] keyword[or] identifier[x] . identifier[size] ( literal[int] )< identifier[cols] :
identifier[row_pad] = identifier[max] (( identifier[rows] - identifier[x] . identifier[size] ( literal[int] )+ literal[int] )// literal[int] , literal[int] )
identifier[col_pad] = identifier[max] (( identifier[cols] - identifier[x] . identifier[size] ( literal[int] )+ literal[int] )// literal[int] , literal[int] )
identifier[x] = identifier[F] . identifier[pad] ( identifier[x] [ keyword[None] ],( identifier[col_pad] , identifier[col_pad] , identifier[row_pad] , identifier[row_pad] ), identifier[mode] = identifier[padding_mode] )[ literal[int] ]
identifier[row] = identifier[int] (( identifier[x] . identifier[size] ( literal[int] )- identifier[rows] + literal[int] )* identifier[row_pct] )
identifier[col] = identifier[int] (( identifier[x] . identifier[size] ( literal[int] )- identifier[cols] + literal[int] )* identifier[col_pct] )
identifier[x] = identifier[x] [:, identifier[row] : identifier[row] + identifier[rows] , identifier[col] : identifier[col] + identifier[cols] ]
keyword[return] identifier[x] . identifier[contiguous] () | def _crop_pad_default(x, size, padding_mode='reflection', row_pct: uniform=0.5, col_pct: uniform=0.5):
"""Crop and pad tfm - `row_pct`,`col_pct` sets focal point."""
padding_mode = _pad_mode_convert[padding_mode]
size = tis2hw(size)
if x.shape[1:] == torch.Size(size):
return x # depends on [control=['if'], data=[]]
(rows, cols) = size
(row_pct, col_pct) = _minus_epsilon(row_pct, col_pct)
if x.size(1) < rows or x.size(2) < cols:
row_pad = max((rows - x.size(1) + 1) // 2, 0)
col_pad = max((cols - x.size(2) + 1) // 2, 0)
x = F.pad(x[None], (col_pad, col_pad, row_pad, row_pad), mode=padding_mode)[0] # depends on [control=['if'], data=[]]
row = int((x.size(1) - rows + 1) * row_pct)
col = int((x.size(2) - cols + 1) * col_pct)
x = x[:, row:row + rows, col:col + cols]
return x.contiguous() |
def before_render(self):
"""Before template render hook
"""
super(BatchFolderContentsView, self).before_render()
if self.context.portal_type == "BatchFolder":
self.request.set("disable_border", 1) | def function[before_render, parameter[self]]:
constant[Before template render hook
]
call[call[name[super], parameter[name[BatchFolderContentsView], name[self]]].before_render, parameter[]]
if compare[name[self].context.portal_type equal[==] constant[BatchFolder]] begin[:]
call[name[self].request.set, parameter[constant[disable_border], constant[1]]] | keyword[def] identifier[before_render] ( identifier[self] ):
literal[string]
identifier[super] ( identifier[BatchFolderContentsView] , identifier[self] ). identifier[before_render] ()
keyword[if] identifier[self] . identifier[context] . identifier[portal_type] == literal[string] :
identifier[self] . identifier[request] . identifier[set] ( literal[string] , literal[int] ) | def before_render(self):
"""Before template render hook
"""
super(BatchFolderContentsView, self).before_render()
if self.context.portal_type == 'BatchFolder':
self.request.set('disable_border', 1) # depends on [control=['if'], data=[]] |
def get_cfg_router_ids(self, context, host, router_ids=None,
hosting_device_ids=None):
"""Returns IDs of routers scheduled to l3 agent on <host>"""
return self._l3plugin.cfg_list_router_ids_on_host(context, host,
router_ids,
hosting_device_ids) | def function[get_cfg_router_ids, parameter[self, context, host, router_ids, hosting_device_ids]]:
constant[Returns IDs of routers scheduled to l3 agent on <host>]
return[call[name[self]._l3plugin.cfg_list_router_ids_on_host, parameter[name[context], name[host], name[router_ids], name[hosting_device_ids]]]] | keyword[def] identifier[get_cfg_router_ids] ( identifier[self] , identifier[context] , identifier[host] , identifier[router_ids] = keyword[None] ,
identifier[hosting_device_ids] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[_l3plugin] . identifier[cfg_list_router_ids_on_host] ( identifier[context] , identifier[host] ,
identifier[router_ids] ,
identifier[hosting_device_ids] ) | def get_cfg_router_ids(self, context, host, router_ids=None, hosting_device_ids=None):
"""Returns IDs of routers scheduled to l3 agent on <host>"""
return self._l3plugin.cfg_list_router_ids_on_host(context, host, router_ids, hosting_device_ids) |
def generate(self, model, outfolder):
"""
Generate artifacts for given model.
Attributes:
model:
Model for which to generate code.
outfolder:
Folder where code files are created.
"""
_logger.info('Generating code to {!r}.'.format(outfolder))
for task in self.tasks:
for element in task.filtered_elements(model):
task.run(element, outfolder) | def function[generate, parameter[self, model, outfolder]]:
constant[
Generate artifacts for given model.
Attributes:
model:
Model for which to generate code.
outfolder:
Folder where code files are created.
]
call[name[_logger].info, parameter[call[constant[Generating code to {!r}.].format, parameter[name[outfolder]]]]]
for taget[name[task]] in starred[name[self].tasks] begin[:]
for taget[name[element]] in starred[call[name[task].filtered_elements, parameter[name[model]]]] begin[:]
call[name[task].run, parameter[name[element], name[outfolder]]] | keyword[def] identifier[generate] ( identifier[self] , identifier[model] , identifier[outfolder] ):
literal[string]
identifier[_logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[outfolder] ))
keyword[for] identifier[task] keyword[in] identifier[self] . identifier[tasks] :
keyword[for] identifier[element] keyword[in] identifier[task] . identifier[filtered_elements] ( identifier[model] ):
identifier[task] . identifier[run] ( identifier[element] , identifier[outfolder] ) | def generate(self, model, outfolder):
"""
Generate artifacts for given model.
Attributes:
model:
Model for which to generate code.
outfolder:
Folder where code files are created.
"""
_logger.info('Generating code to {!r}.'.format(outfolder))
for task in self.tasks:
for element in task.filtered_elements(model):
task.run(element, outfolder) # depends on [control=['for'], data=['element']] # depends on [control=['for'], data=['task']] |
def append(self, listIndex, changeType, initialValue=None, isMd5=False):
'''
Adds a change spec to the current list of changes. The `listIndex`
represents the line number (in multi-line mode) or word number (in
single-line mode), and must be **INCLUSIVE** of both additions and
deletions.
'''
if not isMd5 and initialValue is not None and len(initialValue) > 32:
initialValue = hashlib.md5(initialValue).hexdigest()
isMd5 = True
cur = adict(index = int(listIndex),
op = changeType,
ival = initialValue,
md5 = isMd5)
for idx, val in enumerate(self.current):
if val.index < cur.index:
continue
if val.index > cur.index:
self.current.insert(idx, cur)
break
# todo: this should never happen... (there should not be a change
# reported for the same line without a `pushChangeSpec()` between)
# todo: perhaps attempt a merging?...
raise InvalidChangeSpec('conflicting changes for index %d' % (cur.index,))
else:
self.current.append(cur) | def function[append, parameter[self, listIndex, changeType, initialValue, isMd5]]:
constant[
Adds a change spec to the current list of changes. The `listIndex`
represents the line number (in multi-line mode) or word number (in
single-line mode), and must be **INCLUSIVE** of both additions and
deletions.
]
if <ast.BoolOp object at 0x7da1afe78bb0> begin[:]
variable[initialValue] assign[=] call[call[name[hashlib].md5, parameter[name[initialValue]]].hexdigest, parameter[]]
variable[isMd5] assign[=] constant[True]
variable[cur] assign[=] call[name[adict], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1afe7be80>, <ast.Name object at 0x7da1afe79720>]]] in starred[call[name[enumerate], parameter[name[self].current]]] begin[:]
if compare[name[val].index less[<] name[cur].index] begin[:]
continue
if compare[name[val].index greater[>] name[cur].index] begin[:]
call[name[self].current.insert, parameter[name[idx], name[cur]]]
break
<ast.Raise object at 0x7da1b0036a70> | keyword[def] identifier[append] ( identifier[self] , identifier[listIndex] , identifier[changeType] , identifier[initialValue] = keyword[None] , identifier[isMd5] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[isMd5] keyword[and] identifier[initialValue] keyword[is] keyword[not] keyword[None] keyword[and] identifier[len] ( identifier[initialValue] )> literal[int] :
identifier[initialValue] = identifier[hashlib] . identifier[md5] ( identifier[initialValue] ). identifier[hexdigest] ()
identifier[isMd5] = keyword[True]
identifier[cur] = identifier[adict] ( identifier[index] = identifier[int] ( identifier[listIndex] ),
identifier[op] = identifier[changeType] ,
identifier[ival] = identifier[initialValue] ,
identifier[md5] = identifier[isMd5] )
keyword[for] identifier[idx] , identifier[val] keyword[in] identifier[enumerate] ( identifier[self] . identifier[current] ):
keyword[if] identifier[val] . identifier[index] < identifier[cur] . identifier[index] :
keyword[continue]
keyword[if] identifier[val] . identifier[index] > identifier[cur] . identifier[index] :
identifier[self] . identifier[current] . identifier[insert] ( identifier[idx] , identifier[cur] )
keyword[break]
keyword[raise] identifier[InvalidChangeSpec] ( literal[string] %( identifier[cur] . identifier[index] ,))
keyword[else] :
identifier[self] . identifier[current] . identifier[append] ( identifier[cur] ) | def append(self, listIndex, changeType, initialValue=None, isMd5=False):
"""
Adds a change spec to the current list of changes. The `listIndex`
represents the line number (in multi-line mode) or word number (in
single-line mode), and must be **INCLUSIVE** of both additions and
deletions.
"""
if not isMd5 and initialValue is not None and (len(initialValue) > 32):
initialValue = hashlib.md5(initialValue).hexdigest()
isMd5 = True # depends on [control=['if'], data=[]]
cur = adict(index=int(listIndex), op=changeType, ival=initialValue, md5=isMd5)
for (idx, val) in enumerate(self.current):
if val.index < cur.index:
continue # depends on [control=['if'], data=[]]
if val.index > cur.index:
self.current.insert(idx, cur)
break # depends on [control=['if'], data=[]]
# todo: this should never happen... (there should not be a change
# reported for the same line without a `pushChangeSpec()` between)
# todo: perhaps attempt a merging?...
raise InvalidChangeSpec('conflicting changes for index %d' % (cur.index,)) # depends on [control=['for'], data=[]]
else:
self.current.append(cur) |
def get_converter(in_type, out_type, *args, **kwargs):
''' Scans the list of available Converters and returns an instantiation
of the first one whose input and output types match those passed in.
Args:
in_type (type): The type of input the converter must have.
out_type (type): The type of output the converter must have.
args, kwargs: Optional positional and keyword arguments to pass onto
matching Converter's initializer.
'''
convs = pliers.converters.__all__
# If config includes default converters for this combination, try them
# first
out_type = listify(out_type)[::-1]
default_convs = config.get_option('default_converters')
for ot in out_type:
conv_str = '%s->%s' % (in_type.__name__, ot.__name__)
if conv_str in default_convs:
convs = list(default_convs[conv_str]) + convs
for name in convs:
cls = getattr(pliers.converters, name)
if not issubclass(cls, Converter):
continue
available = cls.available if issubclass(
cls, EnvironmentKeyMixin) else True
if cls._input_type == in_type and cls._output_type in out_type \
and available:
conv = cls(*args, **kwargs)
return conv
return None | def function[get_converter, parameter[in_type, out_type]]:
constant[ Scans the list of available Converters and returns an instantiation
of the first one whose input and output types match those passed in.
Args:
in_type (type): The type of input the converter must have.
out_type (type): The type of output the converter must have.
args, kwargs: Optional positional and keyword arguments to pass onto
matching Converter's initializer.
]
variable[convs] assign[=] name[pliers].converters.__all__
variable[out_type] assign[=] call[call[name[listify], parameter[name[out_type]]]][<ast.Slice object at 0x7da204962470>]
variable[default_convs] assign[=] call[name[config].get_option, parameter[constant[default_converters]]]
for taget[name[ot]] in starred[name[out_type]] begin[:]
variable[conv_str] assign[=] binary_operation[constant[%s->%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da204960850>, <ast.Attribute object at 0x7da204963070>]]]
if compare[name[conv_str] in name[default_convs]] begin[:]
variable[convs] assign[=] binary_operation[call[name[list], parameter[call[name[default_convs]][name[conv_str]]]] + name[convs]]
for taget[name[name]] in starred[name[convs]] begin[:]
variable[cls] assign[=] call[name[getattr], parameter[name[pliers].converters, name[name]]]
if <ast.UnaryOp object at 0x7da204960820> begin[:]
continue
variable[available] assign[=] <ast.IfExp object at 0x7da204961120>
if <ast.BoolOp object at 0x7da204960fa0> begin[:]
variable[conv] assign[=] call[name[cls], parameter[<ast.Starred object at 0x7da204963fd0>]]
return[name[conv]]
return[constant[None]] | keyword[def] identifier[get_converter] ( identifier[in_type] , identifier[out_type] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[convs] = identifier[pliers] . identifier[converters] . identifier[__all__]
identifier[out_type] = identifier[listify] ( identifier[out_type] )[::- literal[int] ]
identifier[default_convs] = identifier[config] . identifier[get_option] ( literal[string] )
keyword[for] identifier[ot] keyword[in] identifier[out_type] :
identifier[conv_str] = literal[string] %( identifier[in_type] . identifier[__name__] , identifier[ot] . identifier[__name__] )
keyword[if] identifier[conv_str] keyword[in] identifier[default_convs] :
identifier[convs] = identifier[list] ( identifier[default_convs] [ identifier[conv_str] ])+ identifier[convs]
keyword[for] identifier[name] keyword[in] identifier[convs] :
identifier[cls] = identifier[getattr] ( identifier[pliers] . identifier[converters] , identifier[name] )
keyword[if] keyword[not] identifier[issubclass] ( identifier[cls] , identifier[Converter] ):
keyword[continue]
identifier[available] = identifier[cls] . identifier[available] keyword[if] identifier[issubclass] (
identifier[cls] , identifier[EnvironmentKeyMixin] ) keyword[else] keyword[True]
keyword[if] identifier[cls] . identifier[_input_type] == identifier[in_type] keyword[and] identifier[cls] . identifier[_output_type] keyword[in] identifier[out_type] keyword[and] identifier[available] :
identifier[conv] = identifier[cls] (* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[conv]
keyword[return] keyword[None] | def get_converter(in_type, out_type, *args, **kwargs):
""" Scans the list of available Converters and returns an instantiation
of the first one whose input and output types match those passed in.
Args:
in_type (type): The type of input the converter must have.
out_type (type): The type of output the converter must have.
args, kwargs: Optional positional and keyword arguments to pass onto
matching Converter's initializer.
"""
convs = pliers.converters.__all__
# If config includes default converters for this combination, try them
# first
out_type = listify(out_type)[::-1]
default_convs = config.get_option('default_converters')
for ot in out_type:
conv_str = '%s->%s' % (in_type.__name__, ot.__name__)
if conv_str in default_convs:
convs = list(default_convs[conv_str]) + convs # depends on [control=['if'], data=['conv_str', 'default_convs']] # depends on [control=['for'], data=['ot']]
for name in convs:
cls = getattr(pliers.converters, name)
if not issubclass(cls, Converter):
continue # depends on [control=['if'], data=[]]
available = cls.available if issubclass(cls, EnvironmentKeyMixin) else True
if cls._input_type == in_type and cls._output_type in out_type and available:
conv = cls(*args, **kwargs)
return conv # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['name']]
return None |
def encode_keys(self, keys):
""" Run the encoder on a dict of values """
return dict(((k, self.encode(v)) for k, v in six.iteritems(keys) if not
is_null(v))) | def function[encode_keys, parameter[self, keys]]:
constant[ Run the encoder on a dict of values ]
return[call[name[dict], parameter[<ast.GeneratorExp object at 0x7da2043444f0>]]] | keyword[def] identifier[encode_keys] ( identifier[self] , identifier[keys] ):
literal[string]
keyword[return] identifier[dict] ((( identifier[k] , identifier[self] . identifier[encode] ( identifier[v] )) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[six] . identifier[iteritems] ( identifier[keys] ) keyword[if] keyword[not]
identifier[is_null] ( identifier[v] ))) | def encode_keys(self, keys):
""" Run the encoder on a dict of values """
return dict(((k, self.encode(v)) for (k, v) in six.iteritems(keys) if not is_null(v))) |
def longest_increasing_subsequence(x):
"""Longest increasing subsequence
:param x: sequence
:returns: longest strictly increasing subsequence y
:complexity: `O(|x|*log(|y|))`
"""
n = len(x)
p = [None] * n
h = [None]
b = [float('-inf')] # - infinity
for i in range(n):
if x[i] > b[-1]:
p[i] = h[-1]
h.append(i)
b.append(x[i])
else:
# -- binary search: b[k - 1] < x[i] <= b[k]
k = bisect_left(b, x[i])
h[k] = i
b[k] = x[i]
p[i] = h[k - 1]
# extract solution
q = h[-1]
s = []
while q is not None:
s.append(x[q])
q = p[q]
return s[::-1] | def function[longest_increasing_subsequence, parameter[x]]:
constant[Longest increasing subsequence
:param x: sequence
:returns: longest strictly increasing subsequence y
:complexity: `O(|x|*log(|y|))`
]
variable[n] assign[=] call[name[len], parameter[name[x]]]
variable[p] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b07cfa30>]] * name[n]]
variable[h] assign[=] list[[<ast.Constant object at 0x7da1b07cc670>]]
variable[b] assign[=] list[[<ast.Call object at 0x7da1b07cc7c0>]]
for taget[name[i]] in starred[call[name[range], parameter[name[n]]]] begin[:]
if compare[call[name[x]][name[i]] greater[>] call[name[b]][<ast.UnaryOp object at 0x7da20c992350>]] begin[:]
call[name[p]][name[i]] assign[=] call[name[h]][<ast.UnaryOp object at 0x7da20c992620>]
call[name[h].append, parameter[name[i]]]
call[name[b].append, parameter[call[name[x]][name[i]]]]
variable[q] assign[=] call[name[h]][<ast.UnaryOp object at 0x7da20c991450>]
variable[s] assign[=] list[[]]
while compare[name[q] is_not constant[None]] begin[:]
call[name[s].append, parameter[call[name[x]][name[q]]]]
variable[q] assign[=] call[name[p]][name[q]]
return[call[name[s]][<ast.Slice object at 0x7da20c992c50>]] | keyword[def] identifier[longest_increasing_subsequence] ( identifier[x] ):
literal[string]
identifier[n] = identifier[len] ( identifier[x] )
identifier[p] =[ keyword[None] ]* identifier[n]
identifier[h] =[ keyword[None] ]
identifier[b] =[ identifier[float] ( literal[string] )]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n] ):
keyword[if] identifier[x] [ identifier[i] ]> identifier[b] [- literal[int] ]:
identifier[p] [ identifier[i] ]= identifier[h] [- literal[int] ]
identifier[h] . identifier[append] ( identifier[i] )
identifier[b] . identifier[append] ( identifier[x] [ identifier[i] ])
keyword[else] :
identifier[k] = identifier[bisect_left] ( identifier[b] , identifier[x] [ identifier[i] ])
identifier[h] [ identifier[k] ]= identifier[i]
identifier[b] [ identifier[k] ]= identifier[x] [ identifier[i] ]
identifier[p] [ identifier[i] ]= identifier[h] [ identifier[k] - literal[int] ]
identifier[q] = identifier[h] [- literal[int] ]
identifier[s] =[]
keyword[while] identifier[q] keyword[is] keyword[not] keyword[None] :
identifier[s] . identifier[append] ( identifier[x] [ identifier[q] ])
identifier[q] = identifier[p] [ identifier[q] ]
keyword[return] identifier[s] [::- literal[int] ] | def longest_increasing_subsequence(x):
"""Longest increasing subsequence
:param x: sequence
:returns: longest strictly increasing subsequence y
:complexity: `O(|x|*log(|y|))`
"""
n = len(x)
p = [None] * n
h = [None]
b = [float('-inf')] # - infinity
for i in range(n):
if x[i] > b[-1]:
p[i] = h[-1]
h.append(i)
b.append(x[i]) # depends on [control=['if'], data=[]]
else:
# -- binary search: b[k - 1] < x[i] <= b[k]
k = bisect_left(b, x[i])
h[k] = i
b[k] = x[i]
p[i] = h[k - 1] # depends on [control=['for'], data=['i']]
# extract solution
q = h[-1]
s = []
while q is not None:
s.append(x[q])
q = p[q] # depends on [control=['while'], data=['q']]
return s[::-1] |
def _check_nodegroup_minions(self, expr, greedy): # pylint: disable=unused-argument
'''
Return minions found by looking at nodegroups
'''
return self._check_compound_minions(nodegroup_comp(expr, self.opts['nodegroups']),
DEFAULT_TARGET_DELIM,
greedy) | def function[_check_nodegroup_minions, parameter[self, expr, greedy]]:
constant[
Return minions found by looking at nodegroups
]
return[call[name[self]._check_compound_minions, parameter[call[name[nodegroup_comp], parameter[name[expr], call[name[self].opts][constant[nodegroups]]]], name[DEFAULT_TARGET_DELIM], name[greedy]]]] | keyword[def] identifier[_check_nodegroup_minions] ( identifier[self] , identifier[expr] , identifier[greedy] ):
literal[string]
keyword[return] identifier[self] . identifier[_check_compound_minions] ( identifier[nodegroup_comp] ( identifier[expr] , identifier[self] . identifier[opts] [ literal[string] ]),
identifier[DEFAULT_TARGET_DELIM] ,
identifier[greedy] ) | def _check_nodegroup_minions(self, expr, greedy): # pylint: disable=unused-argument
'\n Return minions found by looking at nodegroups\n '
return self._check_compound_minions(nodegroup_comp(expr, self.opts['nodegroups']), DEFAULT_TARGET_DELIM, greedy) |
def _start_browsing_some_sites(self):
'''
Starts browsing some sites.
Raises:
NoBrowsersAvailable if none available
'''
# acquire_multi() raises NoBrowsersAvailable if none available
browsers = self._browser_pool.acquire_multi(
(self._browser_pool.num_available() + 1) // 2)
try:
sites = self._frontier.claim_sites(len(browsers))
except:
self._browser_pool.release_all(browsers)
raise
for i in range(len(browsers)):
if i < len(sites):
th = threading.Thread(
target=self._brozzle_site_thread_target,
args=(browsers[i], sites[i]),
name="BrozzlingThread:%s" % browsers[i].chrome.port,
daemon=True)
with self._browsing_threads_lock:
self._browsing_threads.add(th)
th.start()
else:
self._browser_pool.release(browsers[i]) | def function[_start_browsing_some_sites, parameter[self]]:
constant[
Starts browsing some sites.
Raises:
NoBrowsersAvailable if none available
]
variable[browsers] assign[=] call[name[self]._browser_pool.acquire_multi, parameter[binary_operation[binary_operation[call[name[self]._browser_pool.num_available, parameter[]] + constant[1]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[2]]]]
<ast.Try object at 0x7da1b20fa530>
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[browsers]]]]]] begin[:]
if compare[name[i] less[<] call[name[len], parameter[name[sites]]]] begin[:]
variable[th] assign[=] call[name[threading].Thread, parameter[]]
with name[self]._browsing_threads_lock begin[:]
call[name[self]._browsing_threads.add, parameter[name[th]]]
call[name[th].start, parameter[]] | keyword[def] identifier[_start_browsing_some_sites] ( identifier[self] ):
literal[string]
identifier[browsers] = identifier[self] . identifier[_browser_pool] . identifier[acquire_multi] (
( identifier[self] . identifier[_browser_pool] . identifier[num_available] ()+ literal[int] )// literal[int] )
keyword[try] :
identifier[sites] = identifier[self] . identifier[_frontier] . identifier[claim_sites] ( identifier[len] ( identifier[browsers] ))
keyword[except] :
identifier[self] . identifier[_browser_pool] . identifier[release_all] ( identifier[browsers] )
keyword[raise]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[browsers] )):
keyword[if] identifier[i] < identifier[len] ( identifier[sites] ):
identifier[th] = identifier[threading] . identifier[Thread] (
identifier[target] = identifier[self] . identifier[_brozzle_site_thread_target] ,
identifier[args] =( identifier[browsers] [ identifier[i] ], identifier[sites] [ identifier[i] ]),
identifier[name] = literal[string] % identifier[browsers] [ identifier[i] ]. identifier[chrome] . identifier[port] ,
identifier[daemon] = keyword[True] )
keyword[with] identifier[self] . identifier[_browsing_threads_lock] :
identifier[self] . identifier[_browsing_threads] . identifier[add] ( identifier[th] )
identifier[th] . identifier[start] ()
keyword[else] :
identifier[self] . identifier[_browser_pool] . identifier[release] ( identifier[browsers] [ identifier[i] ]) | def _start_browsing_some_sites(self):
"""
Starts browsing some sites.
Raises:
NoBrowsersAvailable if none available
"""
# acquire_multi() raises NoBrowsersAvailable if none available
browsers = self._browser_pool.acquire_multi((self._browser_pool.num_available() + 1) // 2)
try:
sites = self._frontier.claim_sites(len(browsers)) # depends on [control=['try'], data=[]]
except:
self._browser_pool.release_all(browsers)
raise # depends on [control=['except'], data=[]]
for i in range(len(browsers)):
if i < len(sites):
th = threading.Thread(target=self._brozzle_site_thread_target, args=(browsers[i], sites[i]), name='BrozzlingThread:%s' % browsers[i].chrome.port, daemon=True)
with self._browsing_threads_lock:
self._browsing_threads.add(th) # depends on [control=['with'], data=[]]
th.start() # depends on [control=['if'], data=['i']]
else:
self._browser_pool.release(browsers[i]) # depends on [control=['for'], data=['i']] |
def run_local(
context: cli.CommandContext,
project: projects.Project,
project_steps: typing.List[projects.ProjectStep],
force: bool,
continue_after: bool,
single_step: bool,
limit: int,
print_status: bool,
skip_library_reload: bool = False
) -> environ.Response:
"""
Execute the run command locally within this cauldron environment
:param context:
:param project:
:param project_steps:
:param force:
:param continue_after:
:param single_step:
:param limit:
:param print_status:
:param skip_library_reload:
Whether or not to skip reloading all project libraries prior to
execution of the project. By default this is False in which case
the project libraries are reloaded prior to execution.
:return:
"""
skip_reload = (
skip_library_reload
or environ.modes.has(environ.modes.TESTING)
)
if not skip_reload:
runner.reload_libraries()
environ.log_header('RUNNING', 5)
steps_run = []
if single_step:
# If the user specifies the single step flag, only run one step. Force
# the step to be run if they specified it explicitly
ps = project_steps[0] if len(project_steps) > 0 else None
force = force or (single_step and bool(ps is not None))
steps_run = runner.section(
response=context.response,
project=project,
starting=ps,
limit=1,
force=force
)
elif continue_after or len(project_steps) == 0:
# If the continue after flag is set, start with the specified step
# and run the rest of the project after that. Or, if no steps were
# specified, run the entire project with the specified flags.
ps = project_steps[0] if len(project_steps) > 0 else None
steps_run = runner.complete(
context.response,
project,
ps,
force=force,
limit=limit
)
else:
for ps in project_steps:
steps_run += runner.section(
response=context.response,
project=project,
starting=ps,
limit=max(1, limit),
force=force or (limit < 1 and len(project_steps) < 2),
skips=steps_run + []
)
project.write()
environ.log_blanks()
step_changes = []
for ps in steps_run:
step_changes.append(dict(
name=ps.definition.name,
action='updated',
step=writing.step_writer.serialize(ps)._asdict()
))
context.response.update(step_changes=step_changes)
if print_status or context.response.failed:
context.response.update(project=project.kernel_serialize())
return context.response | def function[run_local, parameter[context, project, project_steps, force, continue_after, single_step, limit, print_status, skip_library_reload]]:
constant[
Execute the run command locally within this cauldron environment
:param context:
:param project:
:param project_steps:
:param force:
:param continue_after:
:param single_step:
:param limit:
:param print_status:
:param skip_library_reload:
Whether or not to skip reloading all project libraries prior to
execution of the project. By default this is False in which case
the project libraries are reloaded prior to execution.
:return:
]
variable[skip_reload] assign[=] <ast.BoolOp object at 0x7da1b1bed9c0>
if <ast.UnaryOp object at 0x7da1b1bef250> begin[:]
call[name[runner].reload_libraries, parameter[]]
call[name[environ].log_header, parameter[constant[RUNNING], constant[5]]]
variable[steps_run] assign[=] list[[]]
if name[single_step] begin[:]
variable[ps] assign[=] <ast.IfExp object at 0x7da1b1bec0d0>
variable[force] assign[=] <ast.BoolOp object at 0x7da1b1becb20>
variable[steps_run] assign[=] call[name[runner].section, parameter[]]
call[name[project].write, parameter[]]
call[name[environ].log_blanks, parameter[]]
variable[step_changes] assign[=] list[[]]
for taget[name[ps]] in starred[name[steps_run]] begin[:]
call[name[step_changes].append, parameter[call[name[dict], parameter[]]]]
call[name[context].response.update, parameter[]]
if <ast.BoolOp object at 0x7da18f00f4c0> begin[:]
call[name[context].response.update, parameter[]]
return[name[context].response] | keyword[def] identifier[run_local] (
identifier[context] : identifier[cli] . identifier[CommandContext] ,
identifier[project] : identifier[projects] . identifier[Project] ,
identifier[project_steps] : identifier[typing] . identifier[List] [ identifier[projects] . identifier[ProjectStep] ],
identifier[force] : identifier[bool] ,
identifier[continue_after] : identifier[bool] ,
identifier[single_step] : identifier[bool] ,
identifier[limit] : identifier[int] ,
identifier[print_status] : identifier[bool] ,
identifier[skip_library_reload] : identifier[bool] = keyword[False]
)-> identifier[environ] . identifier[Response] :
literal[string]
identifier[skip_reload] =(
identifier[skip_library_reload]
keyword[or] identifier[environ] . identifier[modes] . identifier[has] ( identifier[environ] . identifier[modes] . identifier[TESTING] )
)
keyword[if] keyword[not] identifier[skip_reload] :
identifier[runner] . identifier[reload_libraries] ()
identifier[environ] . identifier[log_header] ( literal[string] , literal[int] )
identifier[steps_run] =[]
keyword[if] identifier[single_step] :
identifier[ps] = identifier[project_steps] [ literal[int] ] keyword[if] identifier[len] ( identifier[project_steps] )> literal[int] keyword[else] keyword[None]
identifier[force] = identifier[force] keyword[or] ( identifier[single_step] keyword[and] identifier[bool] ( identifier[ps] keyword[is] keyword[not] keyword[None] ))
identifier[steps_run] = identifier[runner] . identifier[section] (
identifier[response] = identifier[context] . identifier[response] ,
identifier[project] = identifier[project] ,
identifier[starting] = identifier[ps] ,
identifier[limit] = literal[int] ,
identifier[force] = identifier[force]
)
keyword[elif] identifier[continue_after] keyword[or] identifier[len] ( identifier[project_steps] )== literal[int] :
identifier[ps] = identifier[project_steps] [ literal[int] ] keyword[if] identifier[len] ( identifier[project_steps] )> literal[int] keyword[else] keyword[None]
identifier[steps_run] = identifier[runner] . identifier[complete] (
identifier[context] . identifier[response] ,
identifier[project] ,
identifier[ps] ,
identifier[force] = identifier[force] ,
identifier[limit] = identifier[limit]
)
keyword[else] :
keyword[for] identifier[ps] keyword[in] identifier[project_steps] :
identifier[steps_run] += identifier[runner] . identifier[section] (
identifier[response] = identifier[context] . identifier[response] ,
identifier[project] = identifier[project] ,
identifier[starting] = identifier[ps] ,
identifier[limit] = identifier[max] ( literal[int] , identifier[limit] ),
identifier[force] = identifier[force] keyword[or] ( identifier[limit] < literal[int] keyword[and] identifier[len] ( identifier[project_steps] )< literal[int] ),
identifier[skips] = identifier[steps_run] +[]
)
identifier[project] . identifier[write] ()
identifier[environ] . identifier[log_blanks] ()
identifier[step_changes] =[]
keyword[for] identifier[ps] keyword[in] identifier[steps_run] :
identifier[step_changes] . identifier[append] ( identifier[dict] (
identifier[name] = identifier[ps] . identifier[definition] . identifier[name] ,
identifier[action] = literal[string] ,
identifier[step] = identifier[writing] . identifier[step_writer] . identifier[serialize] ( identifier[ps] ). identifier[_asdict] ()
))
identifier[context] . identifier[response] . identifier[update] ( identifier[step_changes] = identifier[step_changes] )
keyword[if] identifier[print_status] keyword[or] identifier[context] . identifier[response] . identifier[failed] :
identifier[context] . identifier[response] . identifier[update] ( identifier[project] = identifier[project] . identifier[kernel_serialize] ())
keyword[return] identifier[context] . identifier[response] | def run_local(context: cli.CommandContext, project: projects.Project, project_steps: typing.List[projects.ProjectStep], force: bool, continue_after: bool, single_step: bool, limit: int, print_status: bool, skip_library_reload: bool=False) -> environ.Response:
"""
Execute the run command locally within this cauldron environment
:param context:
:param project:
:param project_steps:
:param force:
:param continue_after:
:param single_step:
:param limit:
:param print_status:
:param skip_library_reload:
Whether or not to skip reloading all project libraries prior to
execution of the project. By default this is False in which case
the project libraries are reloaded prior to execution.
:return:
"""
skip_reload = skip_library_reload or environ.modes.has(environ.modes.TESTING)
if not skip_reload:
runner.reload_libraries() # depends on [control=['if'], data=[]]
environ.log_header('RUNNING', 5)
steps_run = []
if single_step:
# If the user specifies the single step flag, only run one step. Force
# the step to be run if they specified it explicitly
ps = project_steps[0] if len(project_steps) > 0 else None
force = force or (single_step and bool(ps is not None))
steps_run = runner.section(response=context.response, project=project, starting=ps, limit=1, force=force) # depends on [control=['if'], data=[]]
elif continue_after or len(project_steps) == 0:
# If the continue after flag is set, start with the specified step
# and run the rest of the project after that. Or, if no steps were
# specified, run the entire project with the specified flags.
ps = project_steps[0] if len(project_steps) > 0 else None
steps_run = runner.complete(context.response, project, ps, force=force, limit=limit) # depends on [control=['if'], data=[]]
else:
for ps in project_steps:
steps_run += runner.section(response=context.response, project=project, starting=ps, limit=max(1, limit), force=force or (limit < 1 and len(project_steps) < 2), skips=steps_run + []) # depends on [control=['for'], data=['ps']]
project.write()
environ.log_blanks()
step_changes = []
for ps in steps_run:
step_changes.append(dict(name=ps.definition.name, action='updated', step=writing.step_writer.serialize(ps)._asdict())) # depends on [control=['for'], data=['ps']]
context.response.update(step_changes=step_changes)
if print_status or context.response.failed:
context.response.update(project=project.kernel_serialize()) # depends on [control=['if'], data=[]]
return context.response |
def array_to_hdf5(a, parent, name, **kwargs):
"""Write a Numpy array to an HDF5 dataset.
Parameters
----------
a : ndarray
Data to write.
parent : string or h5py group
Parent HDF5 file or group. If a string, will be treated as HDF5 file
name.
name : string
Name or path of dataset to write data into.
kwargs : keyword arguments
Passed through to h5py require_dataset() function.
Returns
-------
h5d : h5py dataset
"""
import h5py
h5f = None
if isinstance(parent, str):
h5f = h5py.File(parent, mode='a')
parent = h5f
try:
kwargs.setdefault('chunks', True) # auto-chunking
kwargs.setdefault('dtype', a.dtype)
kwargs.setdefault('compression', 'gzip')
h5d = parent.require_dataset(name, shape=a.shape, **kwargs)
h5d[...] = a
return h5d
finally:
if h5f is not None:
h5f.close() | def function[array_to_hdf5, parameter[a, parent, name]]:
constant[Write a Numpy array to an HDF5 dataset.
Parameters
----------
a : ndarray
Data to write.
parent : string or h5py group
Parent HDF5 file or group. If a string, will be treated as HDF5 file
name.
name : string
Name or path of dataset to write data into.
kwargs : keyword arguments
Passed through to h5py require_dataset() function.
Returns
-------
h5d : h5py dataset
]
import module[h5py]
variable[h5f] assign[=] constant[None]
if call[name[isinstance], parameter[name[parent], name[str]]] begin[:]
variable[h5f] assign[=] call[name[h5py].File, parameter[name[parent]]]
variable[parent] assign[=] name[h5f]
<ast.Try object at 0x7da2041dbca0> | keyword[def] identifier[array_to_hdf5] ( identifier[a] , identifier[parent] , identifier[name] ,** identifier[kwargs] ):
literal[string]
keyword[import] identifier[h5py]
identifier[h5f] = keyword[None]
keyword[if] identifier[isinstance] ( identifier[parent] , identifier[str] ):
identifier[h5f] = identifier[h5py] . identifier[File] ( identifier[parent] , identifier[mode] = literal[string] )
identifier[parent] = identifier[h5f]
keyword[try] :
identifier[kwargs] . identifier[setdefault] ( literal[string] , keyword[True] )
identifier[kwargs] . identifier[setdefault] ( literal[string] , identifier[a] . identifier[dtype] )
identifier[kwargs] . identifier[setdefault] ( literal[string] , literal[string] )
identifier[h5d] = identifier[parent] . identifier[require_dataset] ( identifier[name] , identifier[shape] = identifier[a] . identifier[shape] ,** identifier[kwargs] )
identifier[h5d] [...]= identifier[a]
keyword[return] identifier[h5d]
keyword[finally] :
keyword[if] identifier[h5f] keyword[is] keyword[not] keyword[None] :
identifier[h5f] . identifier[close] () | def array_to_hdf5(a, parent, name, **kwargs):
"""Write a Numpy array to an HDF5 dataset.
Parameters
----------
a : ndarray
Data to write.
parent : string or h5py group
Parent HDF5 file or group. If a string, will be treated as HDF5 file
name.
name : string
Name or path of dataset to write data into.
kwargs : keyword arguments
Passed through to h5py require_dataset() function.
Returns
-------
h5d : h5py dataset
"""
import h5py
h5f = None
if isinstance(parent, str):
h5f = h5py.File(parent, mode='a')
parent = h5f # depends on [control=['if'], data=[]]
try:
kwargs.setdefault('chunks', True) # auto-chunking
kwargs.setdefault('dtype', a.dtype)
kwargs.setdefault('compression', 'gzip')
h5d = parent.require_dataset(name, shape=a.shape, **kwargs)
h5d[...] = a
return h5d # depends on [control=['try'], data=[]]
finally:
if h5f is not None:
h5f.close() # depends on [control=['if'], data=['h5f']] |
def vlans(self):
"""list[dict]: A list of dictionary items describing the details of
vlan interfaces.
This method fetches the VLAN interfaces
Examples:
>>> import pynos.device
>>> switch = '10.24.39.202'
>>> auth = ('admin', 'password')
>>> conn = (switch, '22')
>>> with pynos.device.Device(conn=conn, auth=auth) as dev:
... output = dev.interface.add_vlan_int('736')
... interfaces = dev.interface.vlans
... is_vlan_interface_present = False
... for interface in interfaces:
... if interface['vlan-id'] == '736':
... is_vlan_interface_present = True
... break
... dev.interface.del_vlan_int('736')
... assert is_vlan_interface_present
True
"""
urn = "{urn:brocade.com:mgmt:brocade-interface-ext}"
result = []
has_more = ''
last_vlan_id = ''
while (has_more == '') or (has_more == 'true'):
request_interface = self.get_vlan_brief_request(last_vlan_id)
interface_result = self._callback(request_interface, 'get')
has_more = self.get_node_value(interface_result, '%shas-more', urn)
last_vlan_id = self.get_node_value(
interface_result, '%slast-vlan-id', urn)
for interface in interface_result.findall('%svlan' % urn):
vlan_id = self.get_node_value(interface, '%svlan-id', urn)
vlan_type = self.get_node_value(interface, '%svlan-type', urn)
vlan_name = self.get_node_value(interface, '%svlan-name', urn)
vlan_state = self.get_node_value(
interface, '%svlan-state', urn)
ports = []
for intf in interface.findall('%sinterface' % urn):
interface_type = self.get_node_value(
intf, '%sinterface-type', urn)
interface_name = self.get_node_value(
intf, '%sinterface-name', urn)
tag = self.get_node_value(intf, '%stag', urn)
port_results = {'interface-type': interface_type,
'interface-name': interface_name,
'tag': tag}
ports.append(port_results)
results = {'interface-name': vlan_name,
'vlan-state': vlan_state,
'vlan-id': vlan_id,
'vlan-type': vlan_type,
'interface': ports}
result.append(results)
return result | def function[vlans, parameter[self]]:
constant[list[dict]: A list of dictionary items describing the details of
vlan interfaces.
This method fetches the VLAN interfaces
Examples:
>>> import pynos.device
>>> switch = '10.24.39.202'
>>> auth = ('admin', 'password')
>>> conn = (switch, '22')
>>> with pynos.device.Device(conn=conn, auth=auth) as dev:
... output = dev.interface.add_vlan_int('736')
... interfaces = dev.interface.vlans
... is_vlan_interface_present = False
... for interface in interfaces:
... if interface['vlan-id'] == '736':
... is_vlan_interface_present = True
... break
... dev.interface.del_vlan_int('736')
... assert is_vlan_interface_present
True
]
variable[urn] assign[=] constant[{urn:brocade.com:mgmt:brocade-interface-ext}]
variable[result] assign[=] list[[]]
variable[has_more] assign[=] constant[]
variable[last_vlan_id] assign[=] constant[]
while <ast.BoolOp object at 0x7da1b26ad0f0> begin[:]
variable[request_interface] assign[=] call[name[self].get_vlan_brief_request, parameter[name[last_vlan_id]]]
variable[interface_result] assign[=] call[name[self]._callback, parameter[name[request_interface], constant[get]]]
variable[has_more] assign[=] call[name[self].get_node_value, parameter[name[interface_result], constant[%shas-more], name[urn]]]
variable[last_vlan_id] assign[=] call[name[self].get_node_value, parameter[name[interface_result], constant[%slast-vlan-id], name[urn]]]
for taget[name[interface]] in starred[call[name[interface_result].findall, parameter[binary_operation[constant[%svlan] <ast.Mod object at 0x7da2590d6920> name[urn]]]]] begin[:]
variable[vlan_id] assign[=] call[name[self].get_node_value, parameter[name[interface], constant[%svlan-id], name[urn]]]
variable[vlan_type] assign[=] call[name[self].get_node_value, parameter[name[interface], constant[%svlan-type], name[urn]]]
variable[vlan_name] assign[=] call[name[self].get_node_value, parameter[name[interface], constant[%svlan-name], name[urn]]]
variable[vlan_state] assign[=] call[name[self].get_node_value, parameter[name[interface], constant[%svlan-state], name[urn]]]
variable[ports] assign[=] list[[]]
for taget[name[intf]] in starred[call[name[interface].findall, parameter[binary_operation[constant[%sinterface] <ast.Mod object at 0x7da2590d6920> name[urn]]]]] begin[:]
variable[interface_type] assign[=] call[name[self].get_node_value, parameter[name[intf], constant[%sinterface-type], name[urn]]]
variable[interface_name] assign[=] call[name[self].get_node_value, parameter[name[intf], constant[%sinterface-name], name[urn]]]
variable[tag] assign[=] call[name[self].get_node_value, parameter[name[intf], constant[%stag], name[urn]]]
variable[port_results] assign[=] dictionary[[<ast.Constant object at 0x7da18f722c50>, <ast.Constant object at 0x7da18f723430>, <ast.Constant object at 0x7da18f721990>], [<ast.Name object at 0x7da18f723b50>, <ast.Name object at 0x7da18f722440>, <ast.Name object at 0x7da18f721d80>]]
call[name[ports].append, parameter[name[port_results]]]
variable[results] assign[=] dictionary[[<ast.Constant object at 0x7da18f7202b0>, <ast.Constant object at 0x7da18f723e80>, <ast.Constant object at 0x7da18f723310>, <ast.Constant object at 0x7da18f7219f0>, <ast.Constant object at 0x7da18f7221a0>], [<ast.Name object at 0x7da18f722290>, <ast.Name object at 0x7da18f720850>, <ast.Name object at 0x7da18f723df0>, <ast.Name object at 0x7da18f721b10>, <ast.Name object at 0x7da18f723100>]]
call[name[result].append, parameter[name[results]]]
return[name[result]] | keyword[def] identifier[vlans] ( identifier[self] ):
literal[string]
identifier[urn] = literal[string]
identifier[result] =[]
identifier[has_more] = literal[string]
identifier[last_vlan_id] = literal[string]
keyword[while] ( identifier[has_more] == literal[string] ) keyword[or] ( identifier[has_more] == literal[string] ):
identifier[request_interface] = identifier[self] . identifier[get_vlan_brief_request] ( identifier[last_vlan_id] )
identifier[interface_result] = identifier[self] . identifier[_callback] ( identifier[request_interface] , literal[string] )
identifier[has_more] = identifier[self] . identifier[get_node_value] ( identifier[interface_result] , literal[string] , identifier[urn] )
identifier[last_vlan_id] = identifier[self] . identifier[get_node_value] (
identifier[interface_result] , literal[string] , identifier[urn] )
keyword[for] identifier[interface] keyword[in] identifier[interface_result] . identifier[findall] ( literal[string] % identifier[urn] ):
identifier[vlan_id] = identifier[self] . identifier[get_node_value] ( identifier[interface] , literal[string] , identifier[urn] )
identifier[vlan_type] = identifier[self] . identifier[get_node_value] ( identifier[interface] , literal[string] , identifier[urn] )
identifier[vlan_name] = identifier[self] . identifier[get_node_value] ( identifier[interface] , literal[string] , identifier[urn] )
identifier[vlan_state] = identifier[self] . identifier[get_node_value] (
identifier[interface] , literal[string] , identifier[urn] )
identifier[ports] =[]
keyword[for] identifier[intf] keyword[in] identifier[interface] . identifier[findall] ( literal[string] % identifier[urn] ):
identifier[interface_type] = identifier[self] . identifier[get_node_value] (
identifier[intf] , literal[string] , identifier[urn] )
identifier[interface_name] = identifier[self] . identifier[get_node_value] (
identifier[intf] , literal[string] , identifier[urn] )
identifier[tag] = identifier[self] . identifier[get_node_value] ( identifier[intf] , literal[string] , identifier[urn] )
identifier[port_results] ={ literal[string] : identifier[interface_type] ,
literal[string] : identifier[interface_name] ,
literal[string] : identifier[tag] }
identifier[ports] . identifier[append] ( identifier[port_results] )
identifier[results] ={ literal[string] : identifier[vlan_name] ,
literal[string] : identifier[vlan_state] ,
literal[string] : identifier[vlan_id] ,
literal[string] : identifier[vlan_type] ,
literal[string] : identifier[ports] }
identifier[result] . identifier[append] ( identifier[results] )
keyword[return] identifier[result] | def vlans(self):
"""list[dict]: A list of dictionary items describing the details of
vlan interfaces.
This method fetches the VLAN interfaces
Examples:
>>> import pynos.device
>>> switch = '10.24.39.202'
>>> auth = ('admin', 'password')
>>> conn = (switch, '22')
>>> with pynos.device.Device(conn=conn, auth=auth) as dev:
... output = dev.interface.add_vlan_int('736')
... interfaces = dev.interface.vlans
... is_vlan_interface_present = False
... for interface in interfaces:
... if interface['vlan-id'] == '736':
... is_vlan_interface_present = True
... break
... dev.interface.del_vlan_int('736')
... assert is_vlan_interface_present
True
"""
urn = '{urn:brocade.com:mgmt:brocade-interface-ext}'
result = []
has_more = ''
last_vlan_id = ''
while has_more == '' or has_more == 'true':
request_interface = self.get_vlan_brief_request(last_vlan_id)
interface_result = self._callback(request_interface, 'get')
has_more = self.get_node_value(interface_result, '%shas-more', urn)
last_vlan_id = self.get_node_value(interface_result, '%slast-vlan-id', urn)
for interface in interface_result.findall('%svlan' % urn):
vlan_id = self.get_node_value(interface, '%svlan-id', urn)
vlan_type = self.get_node_value(interface, '%svlan-type', urn)
vlan_name = self.get_node_value(interface, '%svlan-name', urn)
vlan_state = self.get_node_value(interface, '%svlan-state', urn)
ports = []
for intf in interface.findall('%sinterface' % urn):
interface_type = self.get_node_value(intf, '%sinterface-type', urn)
interface_name = self.get_node_value(intf, '%sinterface-name', urn)
tag = self.get_node_value(intf, '%stag', urn)
port_results = {'interface-type': interface_type, 'interface-name': interface_name, 'tag': tag}
ports.append(port_results) # depends on [control=['for'], data=['intf']]
results = {'interface-name': vlan_name, 'vlan-state': vlan_state, 'vlan-id': vlan_id, 'vlan-type': vlan_type, 'interface': ports}
result.append(results) # depends on [control=['for'], data=['interface']] # depends on [control=['while'], data=[]]
return result |
def EMAIL_VERIFICATION(self):
"""
See e-mail verification method
"""
from allauth.account import app_settings as account_settings
return self._setting("EMAIL_VERIFICATION",
account_settings.EMAIL_VERIFICATION) | def function[EMAIL_VERIFICATION, parameter[self]]:
constant[
See e-mail verification method
]
from relative_module[allauth.account] import module[app_settings]
return[call[name[self]._setting, parameter[constant[EMAIL_VERIFICATION], name[account_settings].EMAIL_VERIFICATION]]] | keyword[def] identifier[EMAIL_VERIFICATION] ( identifier[self] ):
literal[string]
keyword[from] identifier[allauth] . identifier[account] keyword[import] identifier[app_settings] keyword[as] identifier[account_settings]
keyword[return] identifier[self] . identifier[_setting] ( literal[string] ,
identifier[account_settings] . identifier[EMAIL_VERIFICATION] ) | def EMAIL_VERIFICATION(self):
"""
See e-mail verification method
"""
from allauth.account import app_settings as account_settings
return self._setting('EMAIL_VERIFICATION', account_settings.EMAIL_VERIFICATION) |
def saveBestScore(self):
"""
save current best score in the default file
"""
if self.score > self.best_score:
self.best_score = self.score
try:
with open(self.scores_file, 'w') as f:
f.write(str(self.best_score))
except:
return False
return True | def function[saveBestScore, parameter[self]]:
constant[
save current best score in the default file
]
if compare[name[self].score greater[>] name[self].best_score] begin[:]
name[self].best_score assign[=] name[self].score
<ast.Try object at 0x7da1b0832cb0>
return[constant[True]] | keyword[def] identifier[saveBestScore] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[score] > identifier[self] . identifier[best_score] :
identifier[self] . identifier[best_score] = identifier[self] . identifier[score]
keyword[try] :
keyword[with] identifier[open] ( identifier[self] . identifier[scores_file] , literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( identifier[str] ( identifier[self] . identifier[best_score] ))
keyword[except] :
keyword[return] keyword[False]
keyword[return] keyword[True] | def saveBestScore(self):
"""
save current best score in the default file
"""
if self.score > self.best_score:
self.best_score = self.score # depends on [control=['if'], data=[]]
try:
with open(self.scores_file, 'w') as f:
f.write(str(self.best_score)) # depends on [control=['with'], data=['f']] # depends on [control=['try'], data=[]]
except:
return False # depends on [control=['except'], data=[]]
return True |
def attn(image_feat, query, hparams, name="attn"):
"""Attention on image feature with question as query."""
with tf.variable_scope(name, "attn", values=[image_feat, query]):
attn_dim = hparams.attn_dim
num_glimps = hparams.num_glimps
num_channels = common_layers.shape_list(image_feat)[-1]
if len(common_layers.shape_list(image_feat)) == 4:
image_feat = common_layers.flatten4d3d(image_feat)
query = tf.expand_dims(query, 1)
image_proj = common_attention.compute_attention_component(
image_feat, attn_dim, name="image_proj")
query_proj = common_attention.compute_attention_component(
query, attn_dim, name="query_proj")
h = tf.nn.relu(image_proj + query_proj)
h_proj = common_attention.compute_attention_component(
h, num_glimps, name="h_proj")
p = tf.nn.softmax(h_proj, axis=1)
image_ave = tf.matmul(image_feat, p, transpose_a=True)
image_ave = tf.reshape(image_ave, [-1, num_channels*num_glimps])
return image_ave | def function[attn, parameter[image_feat, query, hparams, name]]:
constant[Attention on image feature with question as query.]
with call[name[tf].variable_scope, parameter[name[name], constant[attn]]] begin[:]
variable[attn_dim] assign[=] name[hparams].attn_dim
variable[num_glimps] assign[=] name[hparams].num_glimps
variable[num_channels] assign[=] call[call[name[common_layers].shape_list, parameter[name[image_feat]]]][<ast.UnaryOp object at 0x7da2045663b0>]
if compare[call[name[len], parameter[call[name[common_layers].shape_list, parameter[name[image_feat]]]]] equal[==] constant[4]] begin[:]
variable[image_feat] assign[=] call[name[common_layers].flatten4d3d, parameter[name[image_feat]]]
variable[query] assign[=] call[name[tf].expand_dims, parameter[name[query], constant[1]]]
variable[image_proj] assign[=] call[name[common_attention].compute_attention_component, parameter[name[image_feat], name[attn_dim]]]
variable[query_proj] assign[=] call[name[common_attention].compute_attention_component, parameter[name[query], name[attn_dim]]]
variable[h] assign[=] call[name[tf].nn.relu, parameter[binary_operation[name[image_proj] + name[query_proj]]]]
variable[h_proj] assign[=] call[name[common_attention].compute_attention_component, parameter[name[h], name[num_glimps]]]
variable[p] assign[=] call[name[tf].nn.softmax, parameter[name[h_proj]]]
variable[image_ave] assign[=] call[name[tf].matmul, parameter[name[image_feat], name[p]]]
variable[image_ave] assign[=] call[name[tf].reshape, parameter[name[image_ave], list[[<ast.UnaryOp object at 0x7da1b2346290>, <ast.BinOp object at 0x7da1b23469b0>]]]]
return[name[image_ave]] | keyword[def] identifier[attn] ( identifier[image_feat] , identifier[query] , identifier[hparams] , identifier[name] = literal[string] ):
literal[string]
keyword[with] identifier[tf] . identifier[variable_scope] ( identifier[name] , literal[string] , identifier[values] =[ identifier[image_feat] , identifier[query] ]):
identifier[attn_dim] = identifier[hparams] . identifier[attn_dim]
identifier[num_glimps] = identifier[hparams] . identifier[num_glimps]
identifier[num_channels] = identifier[common_layers] . identifier[shape_list] ( identifier[image_feat] )[- literal[int] ]
keyword[if] identifier[len] ( identifier[common_layers] . identifier[shape_list] ( identifier[image_feat] ))== literal[int] :
identifier[image_feat] = identifier[common_layers] . identifier[flatten4d3d] ( identifier[image_feat] )
identifier[query] = identifier[tf] . identifier[expand_dims] ( identifier[query] , literal[int] )
identifier[image_proj] = identifier[common_attention] . identifier[compute_attention_component] (
identifier[image_feat] , identifier[attn_dim] , identifier[name] = literal[string] )
identifier[query_proj] = identifier[common_attention] . identifier[compute_attention_component] (
identifier[query] , identifier[attn_dim] , identifier[name] = literal[string] )
identifier[h] = identifier[tf] . identifier[nn] . identifier[relu] ( identifier[image_proj] + identifier[query_proj] )
identifier[h_proj] = identifier[common_attention] . identifier[compute_attention_component] (
identifier[h] , identifier[num_glimps] , identifier[name] = literal[string] )
identifier[p] = identifier[tf] . identifier[nn] . identifier[softmax] ( identifier[h_proj] , identifier[axis] = literal[int] )
identifier[image_ave] = identifier[tf] . identifier[matmul] ( identifier[image_feat] , identifier[p] , identifier[transpose_a] = keyword[True] )
identifier[image_ave] = identifier[tf] . identifier[reshape] ( identifier[image_ave] ,[- literal[int] , identifier[num_channels] * identifier[num_glimps] ])
keyword[return] identifier[image_ave] | def attn(image_feat, query, hparams, name='attn'):
"""Attention on image feature with question as query."""
with tf.variable_scope(name, 'attn', values=[image_feat, query]):
attn_dim = hparams.attn_dim
num_glimps = hparams.num_glimps
num_channels = common_layers.shape_list(image_feat)[-1]
if len(common_layers.shape_list(image_feat)) == 4:
image_feat = common_layers.flatten4d3d(image_feat) # depends on [control=['if'], data=[]]
query = tf.expand_dims(query, 1)
image_proj = common_attention.compute_attention_component(image_feat, attn_dim, name='image_proj')
query_proj = common_attention.compute_attention_component(query, attn_dim, name='query_proj')
h = tf.nn.relu(image_proj + query_proj)
h_proj = common_attention.compute_attention_component(h, num_glimps, name='h_proj')
p = tf.nn.softmax(h_proj, axis=1)
image_ave = tf.matmul(image_feat, p, transpose_a=True)
image_ave = tf.reshape(image_ave, [-1, num_channels * num_glimps])
return image_ave # depends on [control=['with'], data=[]] |
def _get_link(self, cobj):
"""Get a valid link, False if not found"""
fullname = cobj['module_short'] + '.' + cobj['name']
try:
value = self._searchindex['objects'][cobj['module_short']]
match = value[cobj['name']]
except KeyError:
link = False
else:
fname_idx = match[0]
objname_idx = str(match[1])
anchor = match[3]
fname = self._searchindex['filenames'][fname_idx]
# In 1.5+ Sphinx seems to have changed from .rst.html to only
# .html extension in converted files. Find this from the options.
ext = self._docopts.get('FILE_SUFFIX', '.rst.html')
fname = os.path.splitext(fname)[0] + ext
if self._is_windows:
fname = fname.replace('/', '\\')
link = os.path.join(self.doc_url, fname)
else:
link = posixpath.join(self.doc_url, fname)
if anchor == '':
anchor = fullname
elif anchor == '-':
anchor = (self._searchindex['objnames'][objname_idx][1] + '-' +
fullname)
link = link + '#' + anchor
return link | def function[_get_link, parameter[self, cobj]]:
constant[Get a valid link, False if not found]
variable[fullname] assign[=] binary_operation[binary_operation[call[name[cobj]][constant[module_short]] + constant[.]] + call[name[cobj]][constant[name]]]
<ast.Try object at 0x7da1b26af280>
return[name[link]] | keyword[def] identifier[_get_link] ( identifier[self] , identifier[cobj] ):
literal[string]
identifier[fullname] = identifier[cobj] [ literal[string] ]+ literal[string] + identifier[cobj] [ literal[string] ]
keyword[try] :
identifier[value] = identifier[self] . identifier[_searchindex] [ literal[string] ][ identifier[cobj] [ literal[string] ]]
identifier[match] = identifier[value] [ identifier[cobj] [ literal[string] ]]
keyword[except] identifier[KeyError] :
identifier[link] = keyword[False]
keyword[else] :
identifier[fname_idx] = identifier[match] [ literal[int] ]
identifier[objname_idx] = identifier[str] ( identifier[match] [ literal[int] ])
identifier[anchor] = identifier[match] [ literal[int] ]
identifier[fname] = identifier[self] . identifier[_searchindex] [ literal[string] ][ identifier[fname_idx] ]
identifier[ext] = identifier[self] . identifier[_docopts] . identifier[get] ( literal[string] , literal[string] )
identifier[fname] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[fname] )[ literal[int] ]+ identifier[ext]
keyword[if] identifier[self] . identifier[_is_windows] :
identifier[fname] = identifier[fname] . identifier[replace] ( literal[string] , literal[string] )
identifier[link] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[doc_url] , identifier[fname] )
keyword[else] :
identifier[link] = identifier[posixpath] . identifier[join] ( identifier[self] . identifier[doc_url] , identifier[fname] )
keyword[if] identifier[anchor] == literal[string] :
identifier[anchor] = identifier[fullname]
keyword[elif] identifier[anchor] == literal[string] :
identifier[anchor] =( identifier[self] . identifier[_searchindex] [ literal[string] ][ identifier[objname_idx] ][ literal[int] ]+ literal[string] +
identifier[fullname] )
identifier[link] = identifier[link] + literal[string] + identifier[anchor]
keyword[return] identifier[link] | def _get_link(self, cobj):
"""Get a valid link, False if not found"""
fullname = cobj['module_short'] + '.' + cobj['name']
try:
value = self._searchindex['objects'][cobj['module_short']]
match = value[cobj['name']] # depends on [control=['try'], data=[]]
except KeyError:
link = False # depends on [control=['except'], data=[]]
else:
fname_idx = match[0]
objname_idx = str(match[1])
anchor = match[3]
fname = self._searchindex['filenames'][fname_idx]
# In 1.5+ Sphinx seems to have changed from .rst.html to only
# .html extension in converted files. Find this from the options.
ext = self._docopts.get('FILE_SUFFIX', '.rst.html')
fname = os.path.splitext(fname)[0] + ext
if self._is_windows:
fname = fname.replace('/', '\\')
link = os.path.join(self.doc_url, fname) # depends on [control=['if'], data=[]]
else:
link = posixpath.join(self.doc_url, fname)
if anchor == '':
anchor = fullname # depends on [control=['if'], data=['anchor']]
elif anchor == '-':
anchor = self._searchindex['objnames'][objname_idx][1] + '-' + fullname # depends on [control=['if'], data=['anchor']]
link = link + '#' + anchor
return link |
def _rfc3339_nanos_to_datetime(dt_str):
"""Convert a nanosecond-precision timestamp to a native datetime.
.. note::
Python datetimes do not support nanosecond precision; this function
therefore truncates such values to microseconds.
:type dt_str: str
:param dt_str: The string to convert.
:rtype: :class:`datetime.datetime`
:returns: The datetime object created from the string.
:raises ValueError: If the timestamp does not match the RFC 3339
regular expression.
"""
with_nanos = _RFC3339_NANOS.match(dt_str)
if with_nanos is None:
raise ValueError(
"Timestamp: %r, does not match pattern: %r"
% (dt_str, _RFC3339_NANOS.pattern)
)
bare_seconds = datetime.datetime.strptime(
with_nanos.group("no_fraction"), _RFC3339_NO_FRACTION
)
fraction = with_nanos.group("nanos")
if fraction is None:
micros = 0
else:
scale = 9 - len(fraction)
nanos = int(fraction) * (10 ** scale)
micros = nanos // 1000
return bare_seconds.replace(microsecond=micros, tzinfo=UTC) | def function[_rfc3339_nanos_to_datetime, parameter[dt_str]]:
constant[Convert a nanosecond-precision timestamp to a native datetime.
.. note::
Python datetimes do not support nanosecond precision; this function
therefore truncates such values to microseconds.
:type dt_str: str
:param dt_str: The string to convert.
:rtype: :class:`datetime.datetime`
:returns: The datetime object created from the string.
:raises ValueError: If the timestamp does not match the RFC 3339
regular expression.
]
variable[with_nanos] assign[=] call[name[_RFC3339_NANOS].match, parameter[name[dt_str]]]
if compare[name[with_nanos] is constant[None]] begin[:]
<ast.Raise object at 0x7da204564c70>
variable[bare_seconds] assign[=] call[name[datetime].datetime.strptime, parameter[call[name[with_nanos].group, parameter[constant[no_fraction]]], name[_RFC3339_NO_FRACTION]]]
variable[fraction] assign[=] call[name[with_nanos].group, parameter[constant[nanos]]]
if compare[name[fraction] is constant[None]] begin[:]
variable[micros] assign[=] constant[0]
return[call[name[bare_seconds].replace, parameter[]]] | keyword[def] identifier[_rfc3339_nanos_to_datetime] ( identifier[dt_str] ):
literal[string]
identifier[with_nanos] = identifier[_RFC3339_NANOS] . identifier[match] ( identifier[dt_str] )
keyword[if] identifier[with_nanos] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] (
literal[string]
%( identifier[dt_str] , identifier[_RFC3339_NANOS] . identifier[pattern] )
)
identifier[bare_seconds] = identifier[datetime] . identifier[datetime] . identifier[strptime] (
identifier[with_nanos] . identifier[group] ( literal[string] ), identifier[_RFC3339_NO_FRACTION]
)
identifier[fraction] = identifier[with_nanos] . identifier[group] ( literal[string] )
keyword[if] identifier[fraction] keyword[is] keyword[None] :
identifier[micros] = literal[int]
keyword[else] :
identifier[scale] = literal[int] - identifier[len] ( identifier[fraction] )
identifier[nanos] = identifier[int] ( identifier[fraction] )*( literal[int] ** identifier[scale] )
identifier[micros] = identifier[nanos] // literal[int]
keyword[return] identifier[bare_seconds] . identifier[replace] ( identifier[microsecond] = identifier[micros] , identifier[tzinfo] = identifier[UTC] ) | def _rfc3339_nanos_to_datetime(dt_str):
"""Convert a nanosecond-precision timestamp to a native datetime.
.. note::
Python datetimes do not support nanosecond precision; this function
therefore truncates such values to microseconds.
:type dt_str: str
:param dt_str: The string to convert.
:rtype: :class:`datetime.datetime`
:returns: The datetime object created from the string.
:raises ValueError: If the timestamp does not match the RFC 3339
regular expression.
"""
with_nanos = _RFC3339_NANOS.match(dt_str)
if with_nanos is None:
raise ValueError('Timestamp: %r, does not match pattern: %r' % (dt_str, _RFC3339_NANOS.pattern)) # depends on [control=['if'], data=[]]
bare_seconds = datetime.datetime.strptime(with_nanos.group('no_fraction'), _RFC3339_NO_FRACTION)
fraction = with_nanos.group('nanos')
if fraction is None:
micros = 0 # depends on [control=['if'], data=[]]
else:
scale = 9 - len(fraction)
nanos = int(fraction) * 10 ** scale
micros = nanos // 1000
return bare_seconds.replace(microsecond=micros, tzinfo=UTC) |
def maximum_size_estimated(self, sz):
"""
Set the CoRE Link Format sz attribute of the resource.
:param sz: the CoRE Link Format sz attribute
"""
if not isinstance(sz, str):
sz = str(sz)
self._attributes["sz"] = sz | def function[maximum_size_estimated, parameter[self, sz]]:
constant[
Set the CoRE Link Format sz attribute of the resource.
:param sz: the CoRE Link Format sz attribute
]
if <ast.UnaryOp object at 0x7da1b0689030> begin[:]
variable[sz] assign[=] call[name[str], parameter[name[sz]]]
call[name[self]._attributes][constant[sz]] assign[=] name[sz] | keyword[def] identifier[maximum_size_estimated] ( identifier[self] , identifier[sz] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[sz] , identifier[str] ):
identifier[sz] = identifier[str] ( identifier[sz] )
identifier[self] . identifier[_attributes] [ literal[string] ]= identifier[sz] | def maximum_size_estimated(self, sz):
"""
Set the CoRE Link Format sz attribute of the resource.
:param sz: the CoRE Link Format sz attribute
"""
if not isinstance(sz, str):
sz = str(sz) # depends on [control=['if'], data=[]]
self._attributes['sz'] = sz |
def add_criterion(self, name, priority, and_or, search_type, value): # pylint: disable=too-many-arguments
"""Add a search criteria object to a smart group.
Args:
name: String Criteria type name (e.g. "Application Title")
priority: Int or Str number priority of criterion.
and_or: Str, either "and" or "or".
search_type: String Criteria search type. (e.g. "is", "is
not", "member of", etc). Construct a SmartGroup with the
criteria of interest in the web interface to determine
what range of values are available.
value: String value to search for/against.
"""
criterion = SearchCriteria(name, priority, and_or, search_type, value)
self.criteria.append(criterion) | def function[add_criterion, parameter[self, name, priority, and_or, search_type, value]]:
constant[Add a search criteria object to a smart group.
Args:
name: String Criteria type name (e.g. "Application Title")
priority: Int or Str number priority of criterion.
and_or: Str, either "and" or "or".
search_type: String Criteria search type. (e.g. "is", "is
not", "member of", etc). Construct a SmartGroup with the
criteria of interest in the web interface to determine
what range of values are available.
value: String value to search for/against.
]
variable[criterion] assign[=] call[name[SearchCriteria], parameter[name[name], name[priority], name[and_or], name[search_type], name[value]]]
call[name[self].criteria.append, parameter[name[criterion]]] | keyword[def] identifier[add_criterion] ( identifier[self] , identifier[name] , identifier[priority] , identifier[and_or] , identifier[search_type] , identifier[value] ):
literal[string]
identifier[criterion] = identifier[SearchCriteria] ( identifier[name] , identifier[priority] , identifier[and_or] , identifier[search_type] , identifier[value] )
identifier[self] . identifier[criteria] . identifier[append] ( identifier[criterion] ) | def add_criterion(self, name, priority, and_or, search_type, value): # pylint: disable=too-many-arguments
'Add a search criteria object to a smart group.\n\n Args:\n name: String Criteria type name (e.g. "Application Title")\n priority: Int or Str number priority of criterion.\n and_or: Str, either "and" or "or".\n search_type: String Criteria search type. (e.g. "is", "is\n not", "member of", etc). Construct a SmartGroup with the\n criteria of interest in the web interface to determine\n what range of values are available.\n value: String value to search for/against.\n '
criterion = SearchCriteria(name, priority, and_or, search_type, value)
self.criteria.append(criterion) |
def remove_property(self, prop):
"""Remove a property from the definition."""
self._properties.remove(prop)
self._pairs.difference_update((o, prop) for o in self._objects) | def function[remove_property, parameter[self, prop]]:
constant[Remove a property from the definition.]
call[name[self]._properties.remove, parameter[name[prop]]]
call[name[self]._pairs.difference_update, parameter[<ast.GeneratorExp object at 0x7da20c992c80>]] | keyword[def] identifier[remove_property] ( identifier[self] , identifier[prop] ):
literal[string]
identifier[self] . identifier[_properties] . identifier[remove] ( identifier[prop] )
identifier[self] . identifier[_pairs] . identifier[difference_update] (( identifier[o] , identifier[prop] ) keyword[for] identifier[o] keyword[in] identifier[self] . identifier[_objects] ) | def remove_property(self, prop):
"""Remove a property from the definition."""
self._properties.remove(prop)
self._pairs.difference_update(((o, prop) for o in self._objects)) |
def get_uri(self):
"""Return the Item source"""
if self.source_file and os.path.exists(self.source_file.path):
return self.source_file.path
elif self.source_url:
return self.source_url
return None | def function[get_uri, parameter[self]]:
constant[Return the Item source]
if <ast.BoolOp object at 0x7da18dc99f90> begin[:]
return[name[self].source_file.path]
return[constant[None]] | keyword[def] identifier[get_uri] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[source_file] keyword[and] identifier[os] . identifier[path] . identifier[exists] ( identifier[self] . identifier[source_file] . identifier[path] ):
keyword[return] identifier[self] . identifier[source_file] . identifier[path]
keyword[elif] identifier[self] . identifier[source_url] :
keyword[return] identifier[self] . identifier[source_url]
keyword[return] keyword[None] | def get_uri(self):
"""Return the Item source"""
if self.source_file and os.path.exists(self.source_file.path):
return self.source_file.path # depends on [control=['if'], data=[]]
elif self.source_url:
return self.source_url # depends on [control=['if'], data=[]]
return None |
def get_nameserver_detail_output_show_nameserver_nameserver_cos(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_nameserver_detail = ET.Element("get_nameserver_detail")
config = get_nameserver_detail
output = ET.SubElement(get_nameserver_detail, "output")
show_nameserver = ET.SubElement(output, "show-nameserver")
nameserver_portid_key = ET.SubElement(show_nameserver, "nameserver-portid")
nameserver_portid_key.text = kwargs.pop('nameserver_portid')
nameserver_cos = ET.SubElement(show_nameserver, "nameserver-cos")
nameserver_cos.text = kwargs.pop('nameserver_cos')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[get_nameserver_detail_output_show_nameserver_nameserver_cos, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[get_nameserver_detail] assign[=] call[name[ET].Element, parameter[constant[get_nameserver_detail]]]
variable[config] assign[=] name[get_nameserver_detail]
variable[output] assign[=] call[name[ET].SubElement, parameter[name[get_nameserver_detail], constant[output]]]
variable[show_nameserver] assign[=] call[name[ET].SubElement, parameter[name[output], constant[show-nameserver]]]
variable[nameserver_portid_key] assign[=] call[name[ET].SubElement, parameter[name[show_nameserver], constant[nameserver-portid]]]
name[nameserver_portid_key].text assign[=] call[name[kwargs].pop, parameter[constant[nameserver_portid]]]
variable[nameserver_cos] assign[=] call[name[ET].SubElement, parameter[name[show_nameserver], constant[nameserver-cos]]]
name[nameserver_cos].text assign[=] call[name[kwargs].pop, parameter[constant[nameserver_cos]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[get_nameserver_detail_output_show_nameserver_nameserver_cos] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[get_nameserver_detail] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[config] = identifier[get_nameserver_detail]
identifier[output] = identifier[ET] . identifier[SubElement] ( identifier[get_nameserver_detail] , literal[string] )
identifier[show_nameserver] = identifier[ET] . identifier[SubElement] ( identifier[output] , literal[string] )
identifier[nameserver_portid_key] = identifier[ET] . identifier[SubElement] ( identifier[show_nameserver] , literal[string] )
identifier[nameserver_portid_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[nameserver_cos] = identifier[ET] . identifier[SubElement] ( identifier[show_nameserver] , literal[string] )
identifier[nameserver_cos] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def get_nameserver_detail_output_show_nameserver_nameserver_cos(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
get_nameserver_detail = ET.Element('get_nameserver_detail')
config = get_nameserver_detail
output = ET.SubElement(get_nameserver_detail, 'output')
show_nameserver = ET.SubElement(output, 'show-nameserver')
nameserver_portid_key = ET.SubElement(show_nameserver, 'nameserver-portid')
nameserver_portid_key.text = kwargs.pop('nameserver_portid')
nameserver_cos = ET.SubElement(show_nameserver, 'nameserver-cos')
nameserver_cos.text = kwargs.pop('nameserver_cos')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def approximate_split(x, num_splits, axis=0):
"""Split approximately equally into num_splits parts.
Args:
x: a Tensor
num_splits: an integer
axis: an integer.
Returns:
a list of num_splits Tensors.
"""
size = shape_list(x)[axis]
size_splits = [tf.div(size + i, num_splits) for i in range(num_splits)]
return tf.split(x, size_splits, axis=axis) | def function[approximate_split, parameter[x, num_splits, axis]]:
constant[Split approximately equally into num_splits parts.
Args:
x: a Tensor
num_splits: an integer
axis: an integer.
Returns:
a list of num_splits Tensors.
]
variable[size] assign[=] call[call[name[shape_list], parameter[name[x]]]][name[axis]]
variable[size_splits] assign[=] <ast.ListComp object at 0x7da1b201f5e0>
return[call[name[tf].split, parameter[name[x], name[size_splits]]]] | keyword[def] identifier[approximate_split] ( identifier[x] , identifier[num_splits] , identifier[axis] = literal[int] ):
literal[string]
identifier[size] = identifier[shape_list] ( identifier[x] )[ identifier[axis] ]
identifier[size_splits] =[ identifier[tf] . identifier[div] ( identifier[size] + identifier[i] , identifier[num_splits] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[num_splits] )]
keyword[return] identifier[tf] . identifier[split] ( identifier[x] , identifier[size_splits] , identifier[axis] = identifier[axis] ) | def approximate_split(x, num_splits, axis=0):
"""Split approximately equally into num_splits parts.
Args:
x: a Tensor
num_splits: an integer
axis: an integer.
Returns:
a list of num_splits Tensors.
"""
size = shape_list(x)[axis]
size_splits = [tf.div(size + i, num_splits) for i in range(num_splits)]
return tf.split(x, size_splits, axis=axis) |
def plot (data, pconfig=None):
""" Plot a scatter plot with X,Y data.
:param data: 2D dict, first keys as sample names, then x:y data pairs
:param pconfig: optional dict with config key:value pairs. See CONTRIBUTING.md
:return: HTML and JS, ready to be inserted into the page
"""
if pconfig is None:
pconfig = {}
# Allow user to overwrite any given config for this plot
if 'id' in pconfig and pconfig['id'] and pconfig['id'] in config.custom_plot_config:
for k, v in config.custom_plot_config[pconfig['id']].items():
pconfig[k] = v
# Given one dataset - turn it into a list
if type(data) is not list:
data = [data]
# Generate the data dict structure expected by HighCharts series
plotdata = list()
for data_index, ds in enumerate(data):
d = list()
for s_name in ds:
# Ensure any overwritting conditionals from data_labels (e.g. ymax) are taken in consideration
series_config = pconfig.copy()
if 'data_labels' in pconfig and type(pconfig['data_labels'][data_index]) is dict: # if not a dict: only dataset name is provided
series_config.update(pconfig['data_labels'][data_index])
if type(ds[s_name]) is not list:
ds[s_name] = [ ds[s_name] ]
for k in ds[s_name]:
if k['x'] is not None:
if 'xmax' in series_config and float(k['x']) > float(series_config['xmax']):
continue
if 'xmin' in series_config and float(k['x']) < float(series_config['xmin']):
continue
if k['y'] is not None:
if 'ymax' in series_config and float(k['y']) > float(series_config['ymax']):
continue
if 'ymin' in series_config and float(k['y']) < float(series_config['ymin']):
continue
this_series = { 'x': k['x'], 'y': k['y'] }
try:
this_series['name'] = "{}: {}".format(s_name, k['name'])
except KeyError:
this_series['name'] = s_name
try:
this_series['color'] = k['color']
except KeyError:
try:
this_series['color'] = series_config['colors'][s_name]
except KeyError:
pass
d.append(this_series)
plotdata.append(d)
# Add on annotation data series
try:
if pconfig.get('extra_series'):
extra_series = pconfig['extra_series']
if type(pconfig['extra_series']) == dict:
extra_series = [[ pconfig['extra_series'] ]]
elif type(pconfig['extra_series']) == list and type(pconfig['extra_series'][0]) == dict:
extra_series = [ pconfig['extra_series'] ]
for i, es in enumerate(extra_series):
for s in es:
plotdata[i].append(s)
except (KeyError, IndexError):
pass
# Make a plot
return highcharts_scatter_plot(plotdata, pconfig) | def function[plot, parameter[data, pconfig]]:
constant[ Plot a scatter plot with X,Y data.
:param data: 2D dict, first keys as sample names, then x:y data pairs
:param pconfig: optional dict with config key:value pairs. See CONTRIBUTING.md
:return: HTML and JS, ready to be inserted into the page
]
if compare[name[pconfig] is constant[None]] begin[:]
variable[pconfig] assign[=] dictionary[[], []]
if <ast.BoolOp object at 0x7da1b2344970> begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b2347520>, <ast.Name object at 0x7da1b2344070>]]] in starred[call[call[name[config].custom_plot_config][call[name[pconfig]][constant[id]]].items, parameter[]]] begin[:]
call[name[pconfig]][name[k]] assign[=] name[v]
if compare[call[name[type], parameter[name[data]]] is_not name[list]] begin[:]
variable[data] assign[=] list[[<ast.Name object at 0x7da1b23463b0>]]
variable[plotdata] assign[=] call[name[list], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b23442e0>, <ast.Name object at 0x7da1b2347580>]]] in starred[call[name[enumerate], parameter[name[data]]]] begin[:]
variable[d] assign[=] call[name[list], parameter[]]
for taget[name[s_name]] in starred[name[ds]] begin[:]
variable[series_config] assign[=] call[name[pconfig].copy, parameter[]]
if <ast.BoolOp object at 0x7da1b2346cb0> begin[:]
call[name[series_config].update, parameter[call[call[name[pconfig]][constant[data_labels]]][name[data_index]]]]
if compare[call[name[type], parameter[call[name[ds]][name[s_name]]]] is_not name[list]] begin[:]
call[name[ds]][name[s_name]] assign[=] list[[<ast.Subscript object at 0x7da1b2347910>]]
for taget[name[k]] in starred[call[name[ds]][name[s_name]]] begin[:]
if compare[call[name[k]][constant[x]] is_not constant[None]] begin[:]
if <ast.BoolOp object at 0x7da1b2344130> begin[:]
continue
if <ast.BoolOp object at 0x7da1b2345cf0> begin[:]
continue
if compare[call[name[k]][constant[y]] is_not constant[None]] begin[:]
if <ast.BoolOp object at 0x7da1b2347e80> begin[:]
continue
if <ast.BoolOp object at 0x7da1b2347f40> begin[:]
continue
variable[this_series] assign[=] dictionary[[<ast.Constant object at 0x7da18fe916c0>, <ast.Constant object at 0x7da18fe90340>], [<ast.Subscript object at 0x7da18fe92350>, <ast.Subscript object at 0x7da18fe93b20>]]
<ast.Try object at 0x7da18fe92ad0>
<ast.Try object at 0x7da18fe92500>
call[name[d].append, parameter[name[this_series]]]
call[name[plotdata].append, parameter[name[d]]]
<ast.Try object at 0x7da18fe93af0>
return[call[name[highcharts_scatter_plot], parameter[name[plotdata], name[pconfig]]]] | keyword[def] identifier[plot] ( identifier[data] , identifier[pconfig] = keyword[None] ):
literal[string]
keyword[if] identifier[pconfig] keyword[is] keyword[None] :
identifier[pconfig] ={}
keyword[if] literal[string] keyword[in] identifier[pconfig] keyword[and] identifier[pconfig] [ literal[string] ] keyword[and] identifier[pconfig] [ literal[string] ] keyword[in] identifier[config] . identifier[custom_plot_config] :
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[config] . identifier[custom_plot_config] [ identifier[pconfig] [ literal[string] ]]. identifier[items] ():
identifier[pconfig] [ identifier[k] ]= identifier[v]
keyword[if] identifier[type] ( identifier[data] ) keyword[is] keyword[not] identifier[list] :
identifier[data] =[ identifier[data] ]
identifier[plotdata] = identifier[list] ()
keyword[for] identifier[data_index] , identifier[ds] keyword[in] identifier[enumerate] ( identifier[data] ):
identifier[d] = identifier[list] ()
keyword[for] identifier[s_name] keyword[in] identifier[ds] :
identifier[series_config] = identifier[pconfig] . identifier[copy] ()
keyword[if] literal[string] keyword[in] identifier[pconfig] keyword[and] identifier[type] ( identifier[pconfig] [ literal[string] ][ identifier[data_index] ]) keyword[is] identifier[dict] :
identifier[series_config] . identifier[update] ( identifier[pconfig] [ literal[string] ][ identifier[data_index] ])
keyword[if] identifier[type] ( identifier[ds] [ identifier[s_name] ]) keyword[is] keyword[not] identifier[list] :
identifier[ds] [ identifier[s_name] ]=[ identifier[ds] [ identifier[s_name] ]]
keyword[for] identifier[k] keyword[in] identifier[ds] [ identifier[s_name] ]:
keyword[if] identifier[k] [ literal[string] ] keyword[is] keyword[not] keyword[None] :
keyword[if] literal[string] keyword[in] identifier[series_config] keyword[and] identifier[float] ( identifier[k] [ literal[string] ])> identifier[float] ( identifier[series_config] [ literal[string] ]):
keyword[continue]
keyword[if] literal[string] keyword[in] identifier[series_config] keyword[and] identifier[float] ( identifier[k] [ literal[string] ])< identifier[float] ( identifier[series_config] [ literal[string] ]):
keyword[continue]
keyword[if] identifier[k] [ literal[string] ] keyword[is] keyword[not] keyword[None] :
keyword[if] literal[string] keyword[in] identifier[series_config] keyword[and] identifier[float] ( identifier[k] [ literal[string] ])> identifier[float] ( identifier[series_config] [ literal[string] ]):
keyword[continue]
keyword[if] literal[string] keyword[in] identifier[series_config] keyword[and] identifier[float] ( identifier[k] [ literal[string] ])< identifier[float] ( identifier[series_config] [ literal[string] ]):
keyword[continue]
identifier[this_series] ={ literal[string] : identifier[k] [ literal[string] ], literal[string] : identifier[k] [ literal[string] ]}
keyword[try] :
identifier[this_series] [ literal[string] ]= literal[string] . identifier[format] ( identifier[s_name] , identifier[k] [ literal[string] ])
keyword[except] identifier[KeyError] :
identifier[this_series] [ literal[string] ]= identifier[s_name]
keyword[try] :
identifier[this_series] [ literal[string] ]= identifier[k] [ literal[string] ]
keyword[except] identifier[KeyError] :
keyword[try] :
identifier[this_series] [ literal[string] ]= identifier[series_config] [ literal[string] ][ identifier[s_name] ]
keyword[except] identifier[KeyError] :
keyword[pass]
identifier[d] . identifier[append] ( identifier[this_series] )
identifier[plotdata] . identifier[append] ( identifier[d] )
keyword[try] :
keyword[if] identifier[pconfig] . identifier[get] ( literal[string] ):
identifier[extra_series] = identifier[pconfig] [ literal[string] ]
keyword[if] identifier[type] ( identifier[pconfig] [ literal[string] ])== identifier[dict] :
identifier[extra_series] =[[ identifier[pconfig] [ literal[string] ]]]
keyword[elif] identifier[type] ( identifier[pconfig] [ literal[string] ])== identifier[list] keyword[and] identifier[type] ( identifier[pconfig] [ literal[string] ][ literal[int] ])== identifier[dict] :
identifier[extra_series] =[ identifier[pconfig] [ literal[string] ]]
keyword[for] identifier[i] , identifier[es] keyword[in] identifier[enumerate] ( identifier[extra_series] ):
keyword[for] identifier[s] keyword[in] identifier[es] :
identifier[plotdata] [ identifier[i] ]. identifier[append] ( identifier[s] )
keyword[except] ( identifier[KeyError] , identifier[IndexError] ):
keyword[pass]
keyword[return] identifier[highcharts_scatter_plot] ( identifier[plotdata] , identifier[pconfig] ) | def plot(data, pconfig=None):
""" Plot a scatter plot with X,Y data.
:param data: 2D dict, first keys as sample names, then x:y data pairs
:param pconfig: optional dict with config key:value pairs. See CONTRIBUTING.md
:return: HTML and JS, ready to be inserted into the page
"""
if pconfig is None:
pconfig = {} # depends on [control=['if'], data=['pconfig']]
# Allow user to overwrite any given config for this plot
if 'id' in pconfig and pconfig['id'] and (pconfig['id'] in config.custom_plot_config):
for (k, v) in config.custom_plot_config[pconfig['id']].items():
pconfig[k] = v # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
# Given one dataset - turn it into a list
if type(data) is not list:
data = [data] # depends on [control=['if'], data=[]]
# Generate the data dict structure expected by HighCharts series
plotdata = list()
for (data_index, ds) in enumerate(data):
d = list()
for s_name in ds:
# Ensure any overwritting conditionals from data_labels (e.g. ymax) are taken in consideration
series_config = pconfig.copy()
if 'data_labels' in pconfig and type(pconfig['data_labels'][data_index]) is dict: # if not a dict: only dataset name is provided
series_config.update(pconfig['data_labels'][data_index]) # depends on [control=['if'], data=[]]
if type(ds[s_name]) is not list:
ds[s_name] = [ds[s_name]] # depends on [control=['if'], data=[]]
for k in ds[s_name]:
if k['x'] is not None:
if 'xmax' in series_config and float(k['x']) > float(series_config['xmax']):
continue # depends on [control=['if'], data=[]]
if 'xmin' in series_config and float(k['x']) < float(series_config['xmin']):
continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if k['y'] is not None:
if 'ymax' in series_config and float(k['y']) > float(series_config['ymax']):
continue # depends on [control=['if'], data=[]]
if 'ymin' in series_config and float(k['y']) < float(series_config['ymin']):
continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
this_series = {'x': k['x'], 'y': k['y']}
try:
this_series['name'] = '{}: {}'.format(s_name, k['name']) # depends on [control=['try'], data=[]]
except KeyError:
this_series['name'] = s_name # depends on [control=['except'], data=[]]
try:
this_series['color'] = k['color'] # depends on [control=['try'], data=[]]
except KeyError:
try:
this_series['color'] = series_config['colors'][s_name] # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]]
d.append(this_series) # depends on [control=['for'], data=['k']] # depends on [control=['for'], data=['s_name']]
plotdata.append(d) # depends on [control=['for'], data=[]]
# Add on annotation data series
try:
if pconfig.get('extra_series'):
extra_series = pconfig['extra_series']
if type(pconfig['extra_series']) == dict:
extra_series = [[pconfig['extra_series']]] # depends on [control=['if'], data=[]]
elif type(pconfig['extra_series']) == list and type(pconfig['extra_series'][0]) == dict:
extra_series = [pconfig['extra_series']] # depends on [control=['if'], data=[]]
for (i, es) in enumerate(extra_series):
for s in es:
plotdata[i].append(s) # depends on [control=['for'], data=['s']] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except (KeyError, IndexError):
pass # depends on [control=['except'], data=[]]
# Make a plot
return highcharts_scatter_plot(plotdata, pconfig) |
def t_t_eopen(self, t):
r'~"|~\''
if t.value[1] == '"':
t.lexer.push_state('escapequotes')
elif t.value[1] == '\'':
t.lexer.push_state('escapeapostrophe')
return t | def function[t_t_eopen, parameter[self, t]]:
constant[~"|~\']
if compare[call[name[t].value][constant[1]] equal[==] constant["]] begin[:]
call[name[t].lexer.push_state, parameter[constant[escapequotes]]]
return[name[t]] | keyword[def] identifier[t_t_eopen] ( identifier[self] , identifier[t] ):
literal[string]
keyword[if] identifier[t] . identifier[value] [ literal[int] ]== literal[string] :
identifier[t] . identifier[lexer] . identifier[push_state] ( literal[string] )
keyword[elif] identifier[t] . identifier[value] [ literal[int] ]== literal[string] :
identifier[t] . identifier[lexer] . identifier[push_state] ( literal[string] )
keyword[return] identifier[t] | def t_t_eopen(self, t):
"""~"|~\\'"""
if t.value[1] == '"':
t.lexer.push_state('escapequotes') # depends on [control=['if'], data=[]]
elif t.value[1] == "'":
t.lexer.push_state('escapeapostrophe') # depends on [control=['if'], data=[]]
return t |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.