code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def spend_key(self):
"""
Returns private spend key. None if wallet is view-only.
:rtype: str or None
"""
key = self._backend.spend_key()
if key == numbers.EMPTY_KEY:
return None
return key | def function[spend_key, parameter[self]]:
constant[
Returns private spend key. None if wallet is view-only.
:rtype: str or None
]
variable[key] assign[=] call[name[self]._backend.spend_key, parameter[]]
if compare[name[key] equal[==] name[numbers].EMPTY_KEY] begin[:]
return[constant[None]]
return[name[key]] | keyword[def] identifier[spend_key] ( identifier[self] ):
literal[string]
identifier[key] = identifier[self] . identifier[_backend] . identifier[spend_key] ()
keyword[if] identifier[key] == identifier[numbers] . identifier[EMPTY_KEY] :
keyword[return] keyword[None]
keyword[return] identifier[key] | def spend_key(self):
"""
Returns private spend key. None if wallet is view-only.
:rtype: str or None
"""
key = self._backend.spend_key()
if key == numbers.EMPTY_KEY:
return None # depends on [control=['if'], data=[]]
return key |
def get_polygon_filter_names():
"""Get the names of all polygon filters in the order of creation"""
names = []
for p in PolygonFilter.instances:
names.append(p.name)
return names | def function[get_polygon_filter_names, parameter[]]:
constant[Get the names of all polygon filters in the order of creation]
variable[names] assign[=] list[[]]
for taget[name[p]] in starred[name[PolygonFilter].instances] begin[:]
call[name[names].append, parameter[name[p].name]]
return[name[names]] | keyword[def] identifier[get_polygon_filter_names] ():
literal[string]
identifier[names] =[]
keyword[for] identifier[p] keyword[in] identifier[PolygonFilter] . identifier[instances] :
identifier[names] . identifier[append] ( identifier[p] . identifier[name] )
keyword[return] identifier[names] | def get_polygon_filter_names():
"""Get the names of all polygon filters in the order of creation"""
names = []
for p in PolygonFilter.instances:
names.append(p.name) # depends on [control=['for'], data=['p']]
return names |
def get_language_from_request(request):
"""Return the most obvious language according the request."""
language = request.GET.get('language', None)
if language:
return language
if hasattr(request, 'LANGUAGE_CODE'):
lang = settings.PAGE_LANGUAGE_MAPPING(str(request.LANGUAGE_CODE))
if lang not in LANGUAGE_KEYS:
return settings.PAGE_DEFAULT_LANGUAGE
else:
return lang
else:
return settings.PAGE_DEFAULT_LANGUAGE | def function[get_language_from_request, parameter[request]]:
constant[Return the most obvious language according the request.]
variable[language] assign[=] call[name[request].GET.get, parameter[constant[language], constant[None]]]
if name[language] begin[:]
return[name[language]]
if call[name[hasattr], parameter[name[request], constant[LANGUAGE_CODE]]] begin[:]
variable[lang] assign[=] call[name[settings].PAGE_LANGUAGE_MAPPING, parameter[call[name[str], parameter[name[request].LANGUAGE_CODE]]]]
if compare[name[lang] <ast.NotIn object at 0x7da2590d7190> name[LANGUAGE_KEYS]] begin[:]
return[name[settings].PAGE_DEFAULT_LANGUAGE] | keyword[def] identifier[get_language_from_request] ( identifier[request] ):
literal[string]
identifier[language] = identifier[request] . identifier[GET] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] identifier[language] :
keyword[return] identifier[language]
keyword[if] identifier[hasattr] ( identifier[request] , literal[string] ):
identifier[lang] = identifier[settings] . identifier[PAGE_LANGUAGE_MAPPING] ( identifier[str] ( identifier[request] . identifier[LANGUAGE_CODE] ))
keyword[if] identifier[lang] keyword[not] keyword[in] identifier[LANGUAGE_KEYS] :
keyword[return] identifier[settings] . identifier[PAGE_DEFAULT_LANGUAGE]
keyword[else] :
keyword[return] identifier[lang]
keyword[else] :
keyword[return] identifier[settings] . identifier[PAGE_DEFAULT_LANGUAGE] | def get_language_from_request(request):
"""Return the most obvious language according the request."""
language = request.GET.get('language', None)
if language:
return language # depends on [control=['if'], data=[]]
if hasattr(request, 'LANGUAGE_CODE'):
lang = settings.PAGE_LANGUAGE_MAPPING(str(request.LANGUAGE_CODE))
if lang not in LANGUAGE_KEYS:
return settings.PAGE_DEFAULT_LANGUAGE # depends on [control=['if'], data=[]]
else:
return lang # depends on [control=['if'], data=[]]
else:
return settings.PAGE_DEFAULT_LANGUAGE |
def get_xchange_rate(self, pairs, items=None):
"""Retrieves currency exchange rate data for given pair(s).
Accepts both where pair='eurusd, gbpusd' and where pair in ('eurusd', 'gpbusd, usdaud')
"""
response = self.select('yahoo.finance.xchange', items).where(['pair', 'in', pairs])
return response | def function[get_xchange_rate, parameter[self, pairs, items]]:
constant[Retrieves currency exchange rate data for given pair(s).
Accepts both where pair='eurusd, gbpusd' and where pair in ('eurusd', 'gpbusd, usdaud')
]
variable[response] assign[=] call[call[name[self].select, parameter[constant[yahoo.finance.xchange], name[items]]].where, parameter[list[[<ast.Constant object at 0x7da18ede7df0>, <ast.Constant object at 0x7da18ede5a20>, <ast.Name object at 0x7da18ede6800>]]]]
return[name[response]] | keyword[def] identifier[get_xchange_rate] ( identifier[self] , identifier[pairs] , identifier[items] = keyword[None] ):
literal[string]
identifier[response] = identifier[self] . identifier[select] ( literal[string] , identifier[items] ). identifier[where] ([ literal[string] , literal[string] , identifier[pairs] ])
keyword[return] identifier[response] | def get_xchange_rate(self, pairs, items=None):
"""Retrieves currency exchange rate data for given pair(s).
Accepts both where pair='eurusd, gbpusd' and where pair in ('eurusd', 'gpbusd, usdaud')
"""
response = self.select('yahoo.finance.xchange', items).where(['pair', 'in', pairs])
return response |
def Genra(request):
"""
Generate dict of Dept and its grade.
"""
school = request.GET['school']
c = Course(school=school)
return JsonResponse(c.getGenra(), safe=False) | def function[Genra, parameter[request]]:
constant[
Generate dict of Dept and its grade.
]
variable[school] assign[=] call[name[request].GET][constant[school]]
variable[c] assign[=] call[name[Course], parameter[]]
return[call[name[JsonResponse], parameter[call[name[c].getGenra, parameter[]]]]] | keyword[def] identifier[Genra] ( identifier[request] ):
literal[string]
identifier[school] = identifier[request] . identifier[GET] [ literal[string] ]
identifier[c] = identifier[Course] ( identifier[school] = identifier[school] )
keyword[return] identifier[JsonResponse] ( identifier[c] . identifier[getGenra] (), identifier[safe] = keyword[False] ) | def Genra(request):
"""
Generate dict of Dept and its grade.
"""
school = request.GET['school']
c = Course(school=school)
return JsonResponse(c.getGenra(), safe=False) |
def transmute(*args, **kwargs):
""" Similar to `select` but allows mutation in column definitions.
In : (diamonds >>
head(3) >>
transmute(new_price=X.price * 2, x_plus_y=X.x + X.y))
Out:
new_price x_plus_y
0 652 7.93
1 652 7.73
2 654 8.12
"""
mutate_dateframe_fn = mutate(*args, **dict(kwargs))
column_names_args = [str(arg) for arg in args]
column_names_kwargs = [name for name, _
in _dict_to_possibly_ordered_tuples(kwargs)]
column_names = column_names_args + column_names_kwargs
return lambda df: mutate_dateframe_fn(df)[column_names] | def function[transmute, parameter[]]:
constant[ Similar to `select` but allows mutation in column definitions.
In : (diamonds >>
head(3) >>
transmute(new_price=X.price * 2, x_plus_y=X.x + X.y))
Out:
new_price x_plus_y
0 652 7.93
1 652 7.73
2 654 8.12
]
variable[mutate_dateframe_fn] assign[=] call[name[mutate], parameter[<ast.Starred object at 0x7da1b12b5ae0>]]
variable[column_names_args] assign[=] <ast.ListComp object at 0x7da1b12b44f0>
variable[column_names_kwargs] assign[=] <ast.ListComp object at 0x7da1b12b5030>
variable[column_names] assign[=] binary_operation[name[column_names_args] + name[column_names_kwargs]]
return[<ast.Lambda object at 0x7da1b12b5a80>] | keyword[def] identifier[transmute] (* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[mutate_dateframe_fn] = identifier[mutate] (* identifier[args] ,** identifier[dict] ( identifier[kwargs] ))
identifier[column_names_args] =[ identifier[str] ( identifier[arg] ) keyword[for] identifier[arg] keyword[in] identifier[args] ]
identifier[column_names_kwargs] =[ identifier[name] keyword[for] identifier[name] , identifier[_]
keyword[in] identifier[_dict_to_possibly_ordered_tuples] ( identifier[kwargs] )]
identifier[column_names] = identifier[column_names_args] + identifier[column_names_kwargs]
keyword[return] keyword[lambda] identifier[df] : identifier[mutate_dateframe_fn] ( identifier[df] )[ identifier[column_names] ] | def transmute(*args, **kwargs):
""" Similar to `select` but allows mutation in column definitions.
In : (diamonds >>
head(3) >>
transmute(new_price=X.price * 2, x_plus_y=X.x + X.y))
Out:
new_price x_plus_y
0 652 7.93
1 652 7.73
2 654 8.12
"""
mutate_dateframe_fn = mutate(*args, **dict(kwargs))
column_names_args = [str(arg) for arg in args]
column_names_kwargs = [name for (name, _) in _dict_to_possibly_ordered_tuples(kwargs)]
column_names = column_names_args + column_names_kwargs
return lambda df: mutate_dateframe_fn(df)[column_names] |
def split_string_at_suffix(s, numbers_into_suffix=False):
"""
Split a string into two parts: a prefix and a suffix. Splitting is done from the end,
so the split is done around the position of the last digit in the string
(that means the prefix may include any character, mixing digits and chars).
The flag 'numbers_into_suffix' determines whether the suffix consists of digits or non-digits.
"""
if not s:
return (s, '')
pos = len(s)
while pos and numbers_into_suffix == s[pos-1].isdigit():
pos -= 1
return (s[:pos], s[pos:]) | def function[split_string_at_suffix, parameter[s, numbers_into_suffix]]:
constant[
Split a string into two parts: a prefix and a suffix. Splitting is done from the end,
so the split is done around the position of the last digit in the string
(that means the prefix may include any character, mixing digits and chars).
The flag 'numbers_into_suffix' determines whether the suffix consists of digits or non-digits.
]
if <ast.UnaryOp object at 0x7da18dc9ab60> begin[:]
return[tuple[[<ast.Name object at 0x7da18dc98040>, <ast.Constant object at 0x7da18dc98ac0>]]]
variable[pos] assign[=] call[name[len], parameter[name[s]]]
while <ast.BoolOp object at 0x7da18dc9a380> begin[:]
<ast.AugAssign object at 0x7da18dc99450>
return[tuple[[<ast.Subscript object at 0x7da18dc99ed0>, <ast.Subscript object at 0x7da18dc9a320>]]] | keyword[def] identifier[split_string_at_suffix] ( identifier[s] , identifier[numbers_into_suffix] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[s] :
keyword[return] ( identifier[s] , literal[string] )
identifier[pos] = identifier[len] ( identifier[s] )
keyword[while] identifier[pos] keyword[and] identifier[numbers_into_suffix] == identifier[s] [ identifier[pos] - literal[int] ]. identifier[isdigit] ():
identifier[pos] -= literal[int]
keyword[return] ( identifier[s] [: identifier[pos] ], identifier[s] [ identifier[pos] :]) | def split_string_at_suffix(s, numbers_into_suffix=False):
"""
Split a string into two parts: a prefix and a suffix. Splitting is done from the end,
so the split is done around the position of the last digit in the string
(that means the prefix may include any character, mixing digits and chars).
The flag 'numbers_into_suffix' determines whether the suffix consists of digits or non-digits.
"""
if not s:
return (s, '') # depends on [control=['if'], data=[]]
pos = len(s)
while pos and numbers_into_suffix == s[pos - 1].isdigit():
pos -= 1 # depends on [control=['while'], data=[]]
return (s[:pos], s[pos:]) |
def data_to_uuid(data):
"""Convert an array of binary data to the iBeacon uuid format."""
string = data_to_hexstring(data)
return string[0:8]+'-'+string[8:12]+'-'+string[12:16]+'-'+string[16:20]+'-'+string[20:32] | def function[data_to_uuid, parameter[data]]:
constant[Convert an array of binary data to the iBeacon uuid format.]
variable[string] assign[=] call[name[data_to_hexstring], parameter[name[data]]]
return[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[call[name[string]][<ast.Slice object at 0x7da20e957580>] + constant[-]] + call[name[string]][<ast.Slice object at 0x7da20e957340>]] + constant[-]] + call[name[string]][<ast.Slice object at 0x7da20e957b50>]] + constant[-]] + call[name[string]][<ast.Slice object at 0x7da20e957be0>]] + constant[-]] + call[name[string]][<ast.Slice object at 0x7da20e9553f0>]]] | keyword[def] identifier[data_to_uuid] ( identifier[data] ):
literal[string]
identifier[string] = identifier[data_to_hexstring] ( identifier[data] )
keyword[return] identifier[string] [ literal[int] : literal[int] ]+ literal[string] + identifier[string] [ literal[int] : literal[int] ]+ literal[string] + identifier[string] [ literal[int] : literal[int] ]+ literal[string] + identifier[string] [ literal[int] : literal[int] ]+ literal[string] + identifier[string] [ literal[int] : literal[int] ] | def data_to_uuid(data):
"""Convert an array of binary data to the iBeacon uuid format."""
string = data_to_hexstring(data)
return string[0:8] + '-' + string[8:12] + '-' + string[12:16] + '-' + string[16:20] + '-' + string[20:32] |
def override_locale(self, locale: str = locales.EN,
) -> Generator['BaseDataProvider', None, None]:
"""Context manager which allows overriding current locale.
Temporarily overrides current locale for
locale-dependent providers.
:param locale: Locale.
:return: Provider with overridden locale.
"""
try:
origin_locale = self.locale
self._override_locale(locale)
try:
yield self
finally:
self._override_locale(origin_locale)
except AttributeError:
raise ValueError('«{}» has not locale dependent'.format(
self.__class__.__name__)) | def function[override_locale, parameter[self, locale]]:
constant[Context manager which allows overriding current locale.
Temporarily overrides current locale for
locale-dependent providers.
:param locale: Locale.
:return: Provider with overridden locale.
]
<ast.Try object at 0x7da2044c15d0> | keyword[def] identifier[override_locale] ( identifier[self] , identifier[locale] : identifier[str] = identifier[locales] . identifier[EN] ,
)-> identifier[Generator] [ literal[string] , keyword[None] , keyword[None] ]:
literal[string]
keyword[try] :
identifier[origin_locale] = identifier[self] . identifier[locale]
identifier[self] . identifier[_override_locale] ( identifier[locale] )
keyword[try] :
keyword[yield] identifier[self]
keyword[finally] :
identifier[self] . identifier[_override_locale] ( identifier[origin_locale] )
keyword[except] identifier[AttributeError] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] (
identifier[self] . identifier[__class__] . identifier[__name__] )) | def override_locale(self, locale: str=locales.EN) -> Generator['BaseDataProvider', None, None]:
"""Context manager which allows overriding current locale.
Temporarily overrides current locale for
locale-dependent providers.
:param locale: Locale.
:return: Provider with overridden locale.
"""
try:
origin_locale = self.locale
self._override_locale(locale)
try:
yield self # depends on [control=['try'], data=[]]
finally:
self._override_locale(origin_locale) # depends on [control=['try'], data=[]]
except AttributeError:
raise ValueError('«{}» has not locale dependent'.format(self.__class__.__name__)) # depends on [control=['except'], data=[]] |
def require_auth(function):
"""
A decorator that wraps the passed in function and raises exception
if access token is missing
"""
@functools.wraps(function)
def wrapper(self, *args, **kwargs):
if not self.access_token():
raise MissingAccessTokenError
return function(self, *args, **kwargs)
return wrapper | def function[require_auth, parameter[function]]:
constant[
A decorator that wraps the passed in function and raises exception
if access token is missing
]
def function[wrapper, parameter[self]]:
if <ast.UnaryOp object at 0x7da18f721b10> begin[:]
<ast.Raise object at 0x7da18f720940>
return[call[name[function], parameter[name[self], <ast.Starred object at 0x7da204620040>]]]
return[name[wrapper]] | keyword[def] identifier[require_auth] ( identifier[function] ):
literal[string]
@ identifier[functools] . identifier[wraps] ( identifier[function] )
keyword[def] identifier[wrapper] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
keyword[if] keyword[not] identifier[self] . identifier[access_token] ():
keyword[raise] identifier[MissingAccessTokenError]
keyword[return] identifier[function] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[wrapper] | def require_auth(function):
"""
A decorator that wraps the passed in function and raises exception
if access token is missing
"""
@functools.wraps(function)
def wrapper(self, *args, **kwargs):
if not self.access_token():
raise MissingAccessTokenError # depends on [control=['if'], data=[]]
return function(self, *args, **kwargs)
return wrapper |
def update_font(self):
"""Update font from Preferences"""
color_scheme = self.get_color_scheme()
font = self.get_plugin_font()
for editor in self.editors:
editor.set_font(font, color_scheme) | def function[update_font, parameter[self]]:
constant[Update font from Preferences]
variable[color_scheme] assign[=] call[name[self].get_color_scheme, parameter[]]
variable[font] assign[=] call[name[self].get_plugin_font, parameter[]]
for taget[name[editor]] in starred[name[self].editors] begin[:]
call[name[editor].set_font, parameter[name[font], name[color_scheme]]] | keyword[def] identifier[update_font] ( identifier[self] ):
literal[string]
identifier[color_scheme] = identifier[self] . identifier[get_color_scheme] ()
identifier[font] = identifier[self] . identifier[get_plugin_font] ()
keyword[for] identifier[editor] keyword[in] identifier[self] . identifier[editors] :
identifier[editor] . identifier[set_font] ( identifier[font] , identifier[color_scheme] ) | def update_font(self):
"""Update font from Preferences"""
color_scheme = self.get_color_scheme()
font = self.get_plugin_font()
for editor in self.editors:
editor.set_font(font, color_scheme) # depends on [control=['for'], data=['editor']] |
def check_release_number(release):
"""
Check to make sure a release is in the valid range of
Ensembl releases.
"""
try:
release = int(release)
except:
raise ValueError("Invalid Ensembl release: %s" % release)
if release < MIN_ENSEMBL_RELEASE or release > MAX_ENSEMBL_RELEASE:
raise ValueError(
"Invalid Ensembl releases %d, must be between %d and %d" % (
release, MIN_ENSEMBL_RELEASE, MAX_ENSEMBL_RELEASE))
return release | def function[check_release_number, parameter[release]]:
constant[
Check to make sure a release is in the valid range of
Ensembl releases.
]
<ast.Try object at 0x7da1b08db5e0>
if <ast.BoolOp object at 0x7da1b08dbee0> begin[:]
<ast.Raise object at 0x7da1b08dbdc0>
return[name[release]] | keyword[def] identifier[check_release_number] ( identifier[release] ):
literal[string]
keyword[try] :
identifier[release] = identifier[int] ( identifier[release] )
keyword[except] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[release] )
keyword[if] identifier[release] < identifier[MIN_ENSEMBL_RELEASE] keyword[or] identifier[release] > identifier[MAX_ENSEMBL_RELEASE] :
keyword[raise] identifier[ValueError] (
literal[string] %(
identifier[release] , identifier[MIN_ENSEMBL_RELEASE] , identifier[MAX_ENSEMBL_RELEASE] ))
keyword[return] identifier[release] | def check_release_number(release):
"""
Check to make sure a release is in the valid range of
Ensembl releases.
"""
try:
release = int(release) # depends on [control=['try'], data=[]]
except:
raise ValueError('Invalid Ensembl release: %s' % release) # depends on [control=['except'], data=[]]
if release < MIN_ENSEMBL_RELEASE or release > MAX_ENSEMBL_RELEASE:
raise ValueError('Invalid Ensembl releases %d, must be between %d and %d' % (release, MIN_ENSEMBL_RELEASE, MAX_ENSEMBL_RELEASE)) # depends on [control=['if'], data=[]]
return release |
def to_bytes(self):
"""Convert the entire image to bytes.
:rtype: bytes
"""
# grab the chunks we needs
out = [PNG_SIGN]
# FIXME: it's tricky to define "other_chunks". HoneyView stop the
# animation if it sees chunks other than fctl or idat, so we put other
# chunks to the end of the file
other_chunks = []
seq = 0
# for first frame
png, control = self.frames[0]
# header
out.append(png.hdr)
# acTL
out.append(make_chunk("acTL", struct.pack("!II", len(self.frames), self.num_plays)))
# fcTL
if control:
out.append(make_chunk("fcTL", struct.pack("!I", seq) + control.to_bytes()))
seq += 1
# and others...
idat_chunks = []
for type_, data in png.chunks:
if type_ in ("IHDR", "IEND"):
continue
if type_ == "IDAT":
# put at last
idat_chunks.append(data)
continue
out.append(data)
out.extend(idat_chunks)
# FIXME: we should do some optimization to frames...
# for other frames
for png, control in self.frames[1:]:
# fcTL
out.append(
make_chunk("fcTL", struct.pack("!I", seq) + control.to_bytes())
)
seq += 1
# and others...
for type_, data in png.chunks:
if type_ in ("IHDR", "IEND") or type_ in CHUNK_BEFORE_IDAT:
continue
elif type_ == "IDAT":
# convert IDAT to fdAT
out.append(
make_chunk("fdAT", struct.pack("!I", seq) + data[8:-4])
)
seq += 1
else:
other_chunks.append(data)
# end
out.extend(other_chunks)
out.append(png.end)
return b"".join(out) | def function[to_bytes, parameter[self]]:
constant[Convert the entire image to bytes.
:rtype: bytes
]
variable[out] assign[=] list[[<ast.Name object at 0x7da18f721360>]]
variable[other_chunks] assign[=] list[[]]
variable[seq] assign[=] constant[0]
<ast.Tuple object at 0x7da18f721300> assign[=] call[name[self].frames][constant[0]]
call[name[out].append, parameter[name[png].hdr]]
call[name[out].append, parameter[call[name[make_chunk], parameter[constant[acTL], call[name[struct].pack, parameter[constant[!II], call[name[len], parameter[name[self].frames]], name[self].num_plays]]]]]]
if name[control] begin[:]
call[name[out].append, parameter[call[name[make_chunk], parameter[constant[fcTL], binary_operation[call[name[struct].pack, parameter[constant[!I], name[seq]]] + call[name[control].to_bytes, parameter[]]]]]]]
<ast.AugAssign object at 0x7da1b05368c0>
variable[idat_chunks] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b0537460>, <ast.Name object at 0x7da1b05362f0>]]] in starred[name[png].chunks] begin[:]
if compare[name[type_] in tuple[[<ast.Constant object at 0x7da1b05377c0>, <ast.Constant object at 0x7da1b0536e90>]]] begin[:]
continue
if compare[name[type_] equal[==] constant[IDAT]] begin[:]
call[name[idat_chunks].append, parameter[name[data]]]
continue
call[name[out].append, parameter[name[data]]]
call[name[out].extend, parameter[name[idat_chunks]]]
for taget[tuple[[<ast.Name object at 0x7da1b0534160>, <ast.Name object at 0x7da1b0536440>]]] in starred[call[name[self].frames][<ast.Slice object at 0x7da1b0535b70>]] begin[:]
call[name[out].append, parameter[call[name[make_chunk], parameter[constant[fcTL], binary_operation[call[name[struct].pack, parameter[constant[!I], name[seq]]] + call[name[control].to_bytes, parameter[]]]]]]]
<ast.AugAssign object at 0x7da1b0535750>
for taget[tuple[[<ast.Name object at 0x7da1b0536ec0>, <ast.Name object at 0x7da1b0535a20>]]] in starred[name[png].chunks] begin[:]
if <ast.BoolOp object at 0x7da1b0536050> begin[:]
continue
call[name[out].extend, parameter[name[other_chunks]]]
call[name[out].append, parameter[name[png].end]]
return[call[constant[b''].join, parameter[name[out]]]] | keyword[def] identifier[to_bytes] ( identifier[self] ):
literal[string]
identifier[out] =[ identifier[PNG_SIGN] ]
identifier[other_chunks] =[]
identifier[seq] = literal[int]
identifier[png] , identifier[control] = identifier[self] . identifier[frames] [ literal[int] ]
identifier[out] . identifier[append] ( identifier[png] . identifier[hdr] )
identifier[out] . identifier[append] ( identifier[make_chunk] ( literal[string] , identifier[struct] . identifier[pack] ( literal[string] , identifier[len] ( identifier[self] . identifier[frames] ), identifier[self] . identifier[num_plays] )))
keyword[if] identifier[control] :
identifier[out] . identifier[append] ( identifier[make_chunk] ( literal[string] , identifier[struct] . identifier[pack] ( literal[string] , identifier[seq] )+ identifier[control] . identifier[to_bytes] ()))
identifier[seq] += literal[int]
identifier[idat_chunks] =[]
keyword[for] identifier[type_] , identifier[data] keyword[in] identifier[png] . identifier[chunks] :
keyword[if] identifier[type_] keyword[in] ( literal[string] , literal[string] ):
keyword[continue]
keyword[if] identifier[type_] == literal[string] :
identifier[idat_chunks] . identifier[append] ( identifier[data] )
keyword[continue]
identifier[out] . identifier[append] ( identifier[data] )
identifier[out] . identifier[extend] ( identifier[idat_chunks] )
keyword[for] identifier[png] , identifier[control] keyword[in] identifier[self] . identifier[frames] [ literal[int] :]:
identifier[out] . identifier[append] (
identifier[make_chunk] ( literal[string] , identifier[struct] . identifier[pack] ( literal[string] , identifier[seq] )+ identifier[control] . identifier[to_bytes] ())
)
identifier[seq] += literal[int]
keyword[for] identifier[type_] , identifier[data] keyword[in] identifier[png] . identifier[chunks] :
keyword[if] identifier[type_] keyword[in] ( literal[string] , literal[string] ) keyword[or] identifier[type_] keyword[in] identifier[CHUNK_BEFORE_IDAT] :
keyword[continue]
keyword[elif] identifier[type_] == literal[string] :
identifier[out] . identifier[append] (
identifier[make_chunk] ( literal[string] , identifier[struct] . identifier[pack] ( literal[string] , identifier[seq] )+ identifier[data] [ literal[int] :- literal[int] ])
)
identifier[seq] += literal[int]
keyword[else] :
identifier[other_chunks] . identifier[append] ( identifier[data] )
identifier[out] . identifier[extend] ( identifier[other_chunks] )
identifier[out] . identifier[append] ( identifier[png] . identifier[end] )
keyword[return] literal[string] . identifier[join] ( identifier[out] ) | def to_bytes(self):
"""Convert the entire image to bytes.
:rtype: bytes
""" # grab the chunks we needs
out = [PNG_SIGN] # FIXME: it's tricky to define "other_chunks". HoneyView stop the
# animation if it sees chunks other than fctl or idat, so we put other
# chunks to the end of the file
other_chunks = []
seq = 0 # for first frame
(png, control) = self.frames[0] # header
out.append(png.hdr) # acTL
out.append(make_chunk('acTL', struct.pack('!II', len(self.frames), self.num_plays))) # fcTL
if control:
out.append(make_chunk('fcTL', struct.pack('!I', seq) + control.to_bytes()))
seq += 1 # depends on [control=['if'], data=[]] # and others...
idat_chunks = []
for (type_, data) in png.chunks:
if type_ in ('IHDR', 'IEND'):
continue # depends on [control=['if'], data=[]]
if type_ == 'IDAT': # put at last
idat_chunks.append(data)
continue # depends on [control=['if'], data=[]]
out.append(data) # depends on [control=['for'], data=[]]
out.extend(idat_chunks) # FIXME: we should do some optimization to frames...
# for other frames
for (png, control) in self.frames[1:]: # fcTL
out.append(make_chunk('fcTL', struct.pack('!I', seq) + control.to_bytes()))
seq += 1 # and others...
for (type_, data) in png.chunks:
if type_ in ('IHDR', 'IEND') or type_ in CHUNK_BEFORE_IDAT:
continue # depends on [control=['if'], data=[]]
elif type_ == 'IDAT': # convert IDAT to fdAT
out.append(make_chunk('fdAT', struct.pack('!I', seq) + data[8:-4]))
seq += 1 # depends on [control=['if'], data=[]]
else:
other_chunks.append(data) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] # end
out.extend(other_chunks)
out.append(png.end)
return b''.join(out) |
def hz2cents(freq_hz, base_frequency=10.0):
"""Convert an array of frequency values in Hz to cents.
0 values are left in place.
Parameters
----------
freq_hz : np.ndarray
Array of frequencies in Hz.
base_frequency : float
Base frequency for conversion.
(Default value = 10.0)
Returns
-------
cent : np.ndarray
Array of frequencies in cents, relative to base_frequency
"""
freq_cent = np.zeros(freq_hz.shape[0])
freq_nonz_ind = np.flatnonzero(freq_hz)
normalized_frequency = np.abs(freq_hz[freq_nonz_ind])/base_frequency
freq_cent[freq_nonz_ind] = 1200*np.log2(normalized_frequency)
return freq_cent | def function[hz2cents, parameter[freq_hz, base_frequency]]:
constant[Convert an array of frequency values in Hz to cents.
0 values are left in place.
Parameters
----------
freq_hz : np.ndarray
Array of frequencies in Hz.
base_frequency : float
Base frequency for conversion.
(Default value = 10.0)
Returns
-------
cent : np.ndarray
Array of frequencies in cents, relative to base_frequency
]
variable[freq_cent] assign[=] call[name[np].zeros, parameter[call[name[freq_hz].shape][constant[0]]]]
variable[freq_nonz_ind] assign[=] call[name[np].flatnonzero, parameter[name[freq_hz]]]
variable[normalized_frequency] assign[=] binary_operation[call[name[np].abs, parameter[call[name[freq_hz]][name[freq_nonz_ind]]]] / name[base_frequency]]
call[name[freq_cent]][name[freq_nonz_ind]] assign[=] binary_operation[constant[1200] * call[name[np].log2, parameter[name[normalized_frequency]]]]
return[name[freq_cent]] | keyword[def] identifier[hz2cents] ( identifier[freq_hz] , identifier[base_frequency] = literal[int] ):
literal[string]
identifier[freq_cent] = identifier[np] . identifier[zeros] ( identifier[freq_hz] . identifier[shape] [ literal[int] ])
identifier[freq_nonz_ind] = identifier[np] . identifier[flatnonzero] ( identifier[freq_hz] )
identifier[normalized_frequency] = identifier[np] . identifier[abs] ( identifier[freq_hz] [ identifier[freq_nonz_ind] ])/ identifier[base_frequency]
identifier[freq_cent] [ identifier[freq_nonz_ind] ]= literal[int] * identifier[np] . identifier[log2] ( identifier[normalized_frequency] )
keyword[return] identifier[freq_cent] | def hz2cents(freq_hz, base_frequency=10.0):
"""Convert an array of frequency values in Hz to cents.
0 values are left in place.
Parameters
----------
freq_hz : np.ndarray
Array of frequencies in Hz.
base_frequency : float
Base frequency for conversion.
(Default value = 10.0)
Returns
-------
cent : np.ndarray
Array of frequencies in cents, relative to base_frequency
"""
freq_cent = np.zeros(freq_hz.shape[0])
freq_nonz_ind = np.flatnonzero(freq_hz)
normalized_frequency = np.abs(freq_hz[freq_nonz_ind]) / base_frequency
freq_cent[freq_nonz_ind] = 1200 * np.log2(normalized_frequency)
return freq_cent |
def parse_transdos(path_dir, efermi, dos_spin=1, trim_dos=False):
"""
Parses .transdos (total DOS) and .transdos_x_y (partial DOS) files
Args:
path_dir: (str) dir containing DOS files
efermi: (float) Fermi energy
dos_spin: (int) -1 for spin down, +1 for spin up
trim_dos: (bool) whether to post-process / trim DOS
Returns:
tuple - (DOS, dict of partial DOS)
"""
data_dos = {'total': [], 'partial': {}}
# parse the total DOS data
## format is energy, DOS, integrated DOS
with open(os.path.join(path_dir, "boltztrap.transdos"), 'r') as f:
count_series = 0 # TODO: why is count_series needed?
for line in f:
if line.lstrip().startswith("#"):
count_series += 1
if count_series > 1:
break
else:
data_dos['total'].append(
[Energy(float(line.split()[0]), "Ry").to("eV"),
float(line.split()[1])])
total_elec = float(line.split()[2])
lw_l = 0
hg_l = -len(data_dos['total'])
if trim_dos:
# Francesco knows what this does
# It has something to do with a trick of adding fake energies
# at the endpoints of the DOS, and then re-trimming it. This is
# to get the same energy scale for up and down spin DOS.
tmp_data = np.array(data_dos['total'])
tmp_den = np.trim_zeros(tmp_data[:, 1], 'f')[1:]
lw_l = len(tmp_data[:, 1]) - len(tmp_den)
tmp_ene = tmp_data[lw_l:, 0]
tmp_den = np.trim_zeros(tmp_den, 'b')[:-1]
hg_l = len(tmp_ene) - len(tmp_den)
tmp_ene = tmp_ene[:-hg_l]
tmp_data = np.vstack((tmp_ene, tmp_den)).T
data_dos['total'] = tmp_data.tolist()
# parse partial DOS data
for file_name in os.listdir(path_dir):
if file_name.endswith(
"transdos") and file_name != 'boltztrap.transdos':
tokens = file_name.split(".")[1].split("_")
site = tokens[1]
orb = '_'.join(tokens[2:])
with open(os.path.join(path_dir, file_name), 'r') as f:
for line in f:
if not line.lstrip().startswith(" #"):
if site not in data_dos['partial']:
data_dos['partial'][site] = {}
if orb not in data_dos['partial'][site]:
data_dos['partial'][site][orb] = []
data_dos['partial'][site][orb].append(
float(line.split()[1]))
data_dos['partial'][site][orb] = data_dos['partial'][site][
orb][lw_l:-hg_l]
dos_full = {'energy': [], 'density': []}
for t in data_dos['total']:
dos_full['energy'].append(t[0])
dos_full['density'].append(t[1])
dos = Dos(efermi, dos_full['energy'],
{Spin(dos_spin): dos_full['density']})
dos_partial = data_dos['partial'] # TODO: make this real DOS object?
return dos, dos_partial | def function[parse_transdos, parameter[path_dir, efermi, dos_spin, trim_dos]]:
constant[
Parses .transdos (total DOS) and .transdos_x_y (partial DOS) files
Args:
path_dir: (str) dir containing DOS files
efermi: (float) Fermi energy
dos_spin: (int) -1 for spin down, +1 for spin up
trim_dos: (bool) whether to post-process / trim DOS
Returns:
tuple - (DOS, dict of partial DOS)
]
variable[data_dos] assign[=] dictionary[[<ast.Constant object at 0x7da18fe910c0>, <ast.Constant object at 0x7da18fe917e0>], [<ast.List object at 0x7da18fe91c90>, <ast.Dict object at 0x7da18fe92f50>]]
with call[name[open], parameter[call[name[os].path.join, parameter[name[path_dir], constant[boltztrap.transdos]]], constant[r]]] begin[:]
variable[count_series] assign[=] constant[0]
for taget[name[line]] in starred[name[f]] begin[:]
if call[call[name[line].lstrip, parameter[]].startswith, parameter[constant[#]]] begin[:]
<ast.AugAssign object at 0x7da18fe901c0>
if compare[name[count_series] greater[>] constant[1]] begin[:]
break
variable[lw_l] assign[=] constant[0]
variable[hg_l] assign[=] <ast.UnaryOp object at 0x7da18fe90040>
if name[trim_dos] begin[:]
variable[tmp_data] assign[=] call[name[np].array, parameter[call[name[data_dos]][constant[total]]]]
variable[tmp_den] assign[=] call[call[name[np].trim_zeros, parameter[call[name[tmp_data]][tuple[[<ast.Slice object at 0x7da18fe92080>, <ast.Constant object at 0x7da18fe91d50>]]], constant[f]]]][<ast.Slice object at 0x7da18fe91e40>]
variable[lw_l] assign[=] binary_operation[call[name[len], parameter[call[name[tmp_data]][tuple[[<ast.Slice object at 0x7da18fe93940>, <ast.Constant object at 0x7da18fe91120>]]]]] - call[name[len], parameter[name[tmp_den]]]]
variable[tmp_ene] assign[=] call[name[tmp_data]][tuple[[<ast.Slice object at 0x7da18fe91690>, <ast.Constant object at 0x7da18fe93640>]]]
variable[tmp_den] assign[=] call[call[name[np].trim_zeros, parameter[name[tmp_den], constant[b]]]][<ast.Slice object at 0x7da18fe92560>]
variable[hg_l] assign[=] binary_operation[call[name[len], parameter[name[tmp_ene]]] - call[name[len], parameter[name[tmp_den]]]]
variable[tmp_ene] assign[=] call[name[tmp_ene]][<ast.Slice object at 0x7da1b1caef50>]
variable[tmp_data] assign[=] call[name[np].vstack, parameter[tuple[[<ast.Name object at 0x7da1b1caed70>, <ast.Name object at 0x7da1b1caed40>]]]].T
call[name[data_dos]][constant[total]] assign[=] call[name[tmp_data].tolist, parameter[]]
for taget[name[file_name]] in starred[call[name[os].listdir, parameter[name[path_dir]]]] begin[:]
if <ast.BoolOp object at 0x7da1b1caea70> begin[:]
variable[tokens] assign[=] call[call[call[name[file_name].split, parameter[constant[.]]]][constant[1]].split, parameter[constant[_]]]
variable[site] assign[=] call[name[tokens]][constant[1]]
variable[orb] assign[=] call[constant[_].join, parameter[call[name[tokens]][<ast.Slice object at 0x7da1b1cae470>]]]
with call[name[open], parameter[call[name[os].path.join, parameter[name[path_dir], name[file_name]]], constant[r]]] begin[:]
for taget[name[line]] in starred[name[f]] begin[:]
if <ast.UnaryOp object at 0x7da1b1cae0e0> begin[:]
if compare[name[site] <ast.NotIn object at 0x7da2590d7190> call[name[data_dos]][constant[partial]]] begin[:]
call[call[name[data_dos]][constant[partial]]][name[site]] assign[=] dictionary[[], []]
if compare[name[orb] <ast.NotIn object at 0x7da2590d7190> call[call[name[data_dos]][constant[partial]]][name[site]]] begin[:]
call[call[call[name[data_dos]][constant[partial]]][name[site]]][name[orb]] assign[=] list[[]]
call[call[call[call[name[data_dos]][constant[partial]]][name[site]]][name[orb]].append, parameter[call[name[float], parameter[call[call[name[line].split, parameter[]]][constant[1]]]]]]
call[call[call[name[data_dos]][constant[partial]]][name[site]]][name[orb]] assign[=] call[call[call[call[name[data_dos]][constant[partial]]][name[site]]][name[orb]]][<ast.Slice object at 0x7da1b1cacd30>]
variable[dos_full] assign[=] dictionary[[<ast.Constant object at 0x7da1b1cacbe0>, <ast.Constant object at 0x7da1b1cacbb0>], [<ast.List object at 0x7da1b1cacb80>, <ast.List object at 0x7da1b1cacb50>]]
for taget[name[t]] in starred[call[name[data_dos]][constant[total]]] begin[:]
call[call[name[dos_full]][constant[energy]].append, parameter[call[name[t]][constant[0]]]]
call[call[name[dos_full]][constant[density]].append, parameter[call[name[t]][constant[1]]]]
variable[dos] assign[=] call[name[Dos], parameter[name[efermi], call[name[dos_full]][constant[energy]], dictionary[[<ast.Call object at 0x7da1b1cac4c0>], [<ast.Subscript object at 0x7da1b1cac430>]]]]
variable[dos_partial] assign[=] call[name[data_dos]][constant[partial]]
return[tuple[[<ast.Name object at 0x7da1b1cac250>, <ast.Name object at 0x7da1b1cac220>]]] | keyword[def] identifier[parse_transdos] ( identifier[path_dir] , identifier[efermi] , identifier[dos_spin] = literal[int] , identifier[trim_dos] = keyword[False] ):
literal[string]
identifier[data_dos] ={ literal[string] :[], literal[string] :{}}
keyword[with] identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( identifier[path_dir] , literal[string] ), literal[string] ) keyword[as] identifier[f] :
identifier[count_series] = literal[int]
keyword[for] identifier[line] keyword[in] identifier[f] :
keyword[if] identifier[line] . identifier[lstrip] (). identifier[startswith] ( literal[string] ):
identifier[count_series] += literal[int]
keyword[if] identifier[count_series] > literal[int] :
keyword[break]
keyword[else] :
identifier[data_dos] [ literal[string] ]. identifier[append] (
[ identifier[Energy] ( identifier[float] ( identifier[line] . identifier[split] ()[ literal[int] ]), literal[string] ). identifier[to] ( literal[string] ),
identifier[float] ( identifier[line] . identifier[split] ()[ literal[int] ])])
identifier[total_elec] = identifier[float] ( identifier[line] . identifier[split] ()[ literal[int] ])
identifier[lw_l] = literal[int]
identifier[hg_l] =- identifier[len] ( identifier[data_dos] [ literal[string] ])
keyword[if] identifier[trim_dos] :
identifier[tmp_data] = identifier[np] . identifier[array] ( identifier[data_dos] [ literal[string] ])
identifier[tmp_den] = identifier[np] . identifier[trim_zeros] ( identifier[tmp_data] [:, literal[int] ], literal[string] )[ literal[int] :]
identifier[lw_l] = identifier[len] ( identifier[tmp_data] [:, literal[int] ])- identifier[len] ( identifier[tmp_den] )
identifier[tmp_ene] = identifier[tmp_data] [ identifier[lw_l] :, literal[int] ]
identifier[tmp_den] = identifier[np] . identifier[trim_zeros] ( identifier[tmp_den] , literal[string] )[:- literal[int] ]
identifier[hg_l] = identifier[len] ( identifier[tmp_ene] )- identifier[len] ( identifier[tmp_den] )
identifier[tmp_ene] = identifier[tmp_ene] [:- identifier[hg_l] ]
identifier[tmp_data] = identifier[np] . identifier[vstack] (( identifier[tmp_ene] , identifier[tmp_den] )). identifier[T]
identifier[data_dos] [ literal[string] ]= identifier[tmp_data] . identifier[tolist] ()
keyword[for] identifier[file_name] keyword[in] identifier[os] . identifier[listdir] ( identifier[path_dir] ):
keyword[if] identifier[file_name] . identifier[endswith] (
literal[string] ) keyword[and] identifier[file_name] != literal[string] :
identifier[tokens] = identifier[file_name] . identifier[split] ( literal[string] )[ literal[int] ]. identifier[split] ( literal[string] )
identifier[site] = identifier[tokens] [ literal[int] ]
identifier[orb] = literal[string] . identifier[join] ( identifier[tokens] [ literal[int] :])
keyword[with] identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( identifier[path_dir] , identifier[file_name] ), literal[string] ) keyword[as] identifier[f] :
keyword[for] identifier[line] keyword[in] identifier[f] :
keyword[if] keyword[not] identifier[line] . identifier[lstrip] (). identifier[startswith] ( literal[string] ):
keyword[if] identifier[site] keyword[not] keyword[in] identifier[data_dos] [ literal[string] ]:
identifier[data_dos] [ literal[string] ][ identifier[site] ]={}
keyword[if] identifier[orb] keyword[not] keyword[in] identifier[data_dos] [ literal[string] ][ identifier[site] ]:
identifier[data_dos] [ literal[string] ][ identifier[site] ][ identifier[orb] ]=[]
identifier[data_dos] [ literal[string] ][ identifier[site] ][ identifier[orb] ]. identifier[append] (
identifier[float] ( identifier[line] . identifier[split] ()[ literal[int] ]))
identifier[data_dos] [ literal[string] ][ identifier[site] ][ identifier[orb] ]= identifier[data_dos] [ literal[string] ][ identifier[site] ][
identifier[orb] ][ identifier[lw_l] :- identifier[hg_l] ]
identifier[dos_full] ={ literal[string] :[], literal[string] :[]}
keyword[for] identifier[t] keyword[in] identifier[data_dos] [ literal[string] ]:
identifier[dos_full] [ literal[string] ]. identifier[append] ( identifier[t] [ literal[int] ])
identifier[dos_full] [ literal[string] ]. identifier[append] ( identifier[t] [ literal[int] ])
identifier[dos] = identifier[Dos] ( identifier[efermi] , identifier[dos_full] [ literal[string] ],
{ identifier[Spin] ( identifier[dos_spin] ): identifier[dos_full] [ literal[string] ]})
identifier[dos_partial] = identifier[data_dos] [ literal[string] ]
keyword[return] identifier[dos] , identifier[dos_partial] | def parse_transdos(path_dir, efermi, dos_spin=1, trim_dos=False):
"""
Parses .transdos (total DOS) and .transdos_x_y (partial DOS) files
Args:
path_dir: (str) dir containing DOS files
efermi: (float) Fermi energy
dos_spin: (int) -1 for spin down, +1 for spin up
trim_dos: (bool) whether to post-process / trim DOS
Returns:
tuple - (DOS, dict of partial DOS)
"""
data_dos = {'total': [], 'partial': {}}
# parse the total DOS data
## format is energy, DOS, integrated DOS
with open(os.path.join(path_dir, 'boltztrap.transdos'), 'r') as f:
count_series = 0 # TODO: why is count_series needed?
for line in f:
if line.lstrip().startswith('#'):
count_series += 1
if count_series > 1:
break # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
data_dos['total'].append([Energy(float(line.split()[0]), 'Ry').to('eV'), float(line.split()[1])])
total_elec = float(line.split()[2]) # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['f']]
lw_l = 0
hg_l = -len(data_dos['total'])
if trim_dos:
# Francesco knows what this does
# It has something to do with a trick of adding fake energies
# at the endpoints of the DOS, and then re-trimming it. This is
# to get the same energy scale for up and down spin DOS.
tmp_data = np.array(data_dos['total'])
tmp_den = np.trim_zeros(tmp_data[:, 1], 'f')[1:]
lw_l = len(tmp_data[:, 1]) - len(tmp_den)
tmp_ene = tmp_data[lw_l:, 0]
tmp_den = np.trim_zeros(tmp_den, 'b')[:-1]
hg_l = len(tmp_ene) - len(tmp_den)
tmp_ene = tmp_ene[:-hg_l]
tmp_data = np.vstack((tmp_ene, tmp_den)).T
data_dos['total'] = tmp_data.tolist() # depends on [control=['if'], data=[]]
# parse partial DOS data
for file_name in os.listdir(path_dir):
if file_name.endswith('transdos') and file_name != 'boltztrap.transdos':
tokens = file_name.split('.')[1].split('_')
site = tokens[1]
orb = '_'.join(tokens[2:])
with open(os.path.join(path_dir, file_name), 'r') as f:
for line in f:
if not line.lstrip().startswith(' #'):
if site not in data_dos['partial']:
data_dos['partial'][site] = {} # depends on [control=['if'], data=['site']]
if orb not in data_dos['partial'][site]:
data_dos['partial'][site][orb] = [] # depends on [control=['if'], data=['orb']]
data_dos['partial'][site][orb].append(float(line.split()[1])) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['f']]
data_dos['partial'][site][orb] = data_dos['partial'][site][orb][lw_l:-hg_l] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['file_name']]
dos_full = {'energy': [], 'density': []}
for t in data_dos['total']:
dos_full['energy'].append(t[0])
dos_full['density'].append(t[1]) # depends on [control=['for'], data=['t']]
dos = Dos(efermi, dos_full['energy'], {Spin(dos_spin): dos_full['density']})
dos_partial = data_dos['partial'] # TODO: make this real DOS object?
return (dos, dos_partial) |
def abort_transaction(self):
"""Abort a multi-statement transaction.
.. versionadded:: 3.7
"""
self._check_ended()
state = self._transaction.state
if state is _TxnState.NONE:
raise InvalidOperation("No transaction started")
elif state is _TxnState.STARTING:
# Server transaction was never started, no need to send a command.
self._transaction.state = _TxnState.ABORTED
return
elif state is _TxnState.ABORTED:
raise InvalidOperation("Cannot call abortTransaction twice")
elif state in (_TxnState.COMMITTED, _TxnState.COMMITTED_EMPTY):
raise InvalidOperation(
"Cannot call abortTransaction after calling commitTransaction")
try:
self._finish_transaction_with_retry("abortTransaction", False)
except (OperationFailure, ConnectionFailure):
# The transactions spec says to ignore abortTransaction errors.
pass
finally:
self._transaction.state = _TxnState.ABORTED | def function[abort_transaction, parameter[self]]:
constant[Abort a multi-statement transaction.
.. versionadded:: 3.7
]
call[name[self]._check_ended, parameter[]]
variable[state] assign[=] name[self]._transaction.state
if compare[name[state] is name[_TxnState].NONE] begin[:]
<ast.Raise object at 0x7da20c6a94e0>
<ast.Try object at 0x7da20c6ab100> | keyword[def] identifier[abort_transaction] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_check_ended] ()
identifier[state] = identifier[self] . identifier[_transaction] . identifier[state]
keyword[if] identifier[state] keyword[is] identifier[_TxnState] . identifier[NONE] :
keyword[raise] identifier[InvalidOperation] ( literal[string] )
keyword[elif] identifier[state] keyword[is] identifier[_TxnState] . identifier[STARTING] :
identifier[self] . identifier[_transaction] . identifier[state] = identifier[_TxnState] . identifier[ABORTED]
keyword[return]
keyword[elif] identifier[state] keyword[is] identifier[_TxnState] . identifier[ABORTED] :
keyword[raise] identifier[InvalidOperation] ( literal[string] )
keyword[elif] identifier[state] keyword[in] ( identifier[_TxnState] . identifier[COMMITTED] , identifier[_TxnState] . identifier[COMMITTED_EMPTY] ):
keyword[raise] identifier[InvalidOperation] (
literal[string] )
keyword[try] :
identifier[self] . identifier[_finish_transaction_with_retry] ( literal[string] , keyword[False] )
keyword[except] ( identifier[OperationFailure] , identifier[ConnectionFailure] ):
keyword[pass]
keyword[finally] :
identifier[self] . identifier[_transaction] . identifier[state] = identifier[_TxnState] . identifier[ABORTED] | def abort_transaction(self):
"""Abort a multi-statement transaction.
.. versionadded:: 3.7
"""
self._check_ended()
state = self._transaction.state
if state is _TxnState.NONE:
raise InvalidOperation('No transaction started') # depends on [control=['if'], data=[]]
elif state is _TxnState.STARTING:
# Server transaction was never started, no need to send a command.
self._transaction.state = _TxnState.ABORTED
return # depends on [control=['if'], data=[]]
elif state is _TxnState.ABORTED:
raise InvalidOperation('Cannot call abortTransaction twice') # depends on [control=['if'], data=[]]
elif state in (_TxnState.COMMITTED, _TxnState.COMMITTED_EMPTY):
raise InvalidOperation('Cannot call abortTransaction after calling commitTransaction') # depends on [control=['if'], data=[]]
try:
self._finish_transaction_with_retry('abortTransaction', False) # depends on [control=['try'], data=[]]
except (OperationFailure, ConnectionFailure):
# The transactions spec says to ignore abortTransaction errors.
pass # depends on [control=['except'], data=[]]
finally:
self._transaction.state = _TxnState.ABORTED |
def to_pngs(pdf_path):
''' Converts a multi-page pdfs to a list of pngs via the `sips` command
:returns: A list of converted pngs
'''
pdf_list = split_pdf(pdf_path)
pngs = []
for pdf in pdf_list:
pngs.append(to_png(pdf))
os.remove(pdf) # Clean up
return pngs | def function[to_pngs, parameter[pdf_path]]:
constant[ Converts a multi-page pdfs to a list of pngs via the `sips` command
:returns: A list of converted pngs
]
variable[pdf_list] assign[=] call[name[split_pdf], parameter[name[pdf_path]]]
variable[pngs] assign[=] list[[]]
for taget[name[pdf]] in starred[name[pdf_list]] begin[:]
call[name[pngs].append, parameter[call[name[to_png], parameter[name[pdf]]]]]
call[name[os].remove, parameter[name[pdf]]]
return[name[pngs]] | keyword[def] identifier[to_pngs] ( identifier[pdf_path] ):
literal[string]
identifier[pdf_list] = identifier[split_pdf] ( identifier[pdf_path] )
identifier[pngs] =[]
keyword[for] identifier[pdf] keyword[in] identifier[pdf_list] :
identifier[pngs] . identifier[append] ( identifier[to_png] ( identifier[pdf] ))
identifier[os] . identifier[remove] ( identifier[pdf] )
keyword[return] identifier[pngs] | def to_pngs(pdf_path):
""" Converts a multi-page pdfs to a list of pngs via the `sips` command
:returns: A list of converted pngs
"""
pdf_list = split_pdf(pdf_path)
pngs = []
for pdf in pdf_list:
pngs.append(to_png(pdf))
os.remove(pdf) # Clean up # depends on [control=['for'], data=['pdf']]
return pngs |
def float2json(value):
"""
CONVERT NUMBER TO JSON STRING, WITH BETTER CONTROL OVER ACCURACY
:param value: float, int, long, Decimal
:return: unicode
"""
if value == 0:
return u'0'
try:
sign = "-" if value < 0 else ""
value = abs(value)
sci = value.__format__(".15e")
mantissa, str_exp = sci.split("e")
digits, more_digits = _snap_to_base_10(mantissa)
int_exp = int(str_exp) + more_digits
if int_exp > 15:
return sign + digits[0] + '.' + (digits[1:].rstrip('0') or '0') + u"e" + text_type(int_exp)
elif int_exp >= 0:
return sign + (digits[:1 + int_exp] + '.' + digits[1 + int_exp:].rstrip('0')).rstrip('.')
elif -4 < int_exp:
digits = ("0" * (-int_exp)) + digits
return sign + (digits[:1] + '.' + digits[1:].rstrip('0')).rstrip('.')
else:
return sign + digits[0] + '.' + (digits[1:].rstrip('0') or '0') + u"e" + text_type(int_exp)
except Exception as e:
from mo_logs import Log
Log.error("not expected", e) | def function[float2json, parameter[value]]:
constant[
CONVERT NUMBER TO JSON STRING, WITH BETTER CONTROL OVER ACCURACY
:param value: float, int, long, Decimal
:return: unicode
]
if compare[name[value] equal[==] constant[0]] begin[:]
return[constant[0]]
<ast.Try object at 0x7da1b1f9e3b0> | keyword[def] identifier[float2json] ( identifier[value] ):
literal[string]
keyword[if] identifier[value] == literal[int] :
keyword[return] literal[string]
keyword[try] :
identifier[sign] = literal[string] keyword[if] identifier[value] < literal[int] keyword[else] literal[string]
identifier[value] = identifier[abs] ( identifier[value] )
identifier[sci] = identifier[value] . identifier[__format__] ( literal[string] )
identifier[mantissa] , identifier[str_exp] = identifier[sci] . identifier[split] ( literal[string] )
identifier[digits] , identifier[more_digits] = identifier[_snap_to_base_10] ( identifier[mantissa] )
identifier[int_exp] = identifier[int] ( identifier[str_exp] )+ identifier[more_digits]
keyword[if] identifier[int_exp] > literal[int] :
keyword[return] identifier[sign] + identifier[digits] [ literal[int] ]+ literal[string] +( identifier[digits] [ literal[int] :]. identifier[rstrip] ( literal[string] ) keyword[or] literal[string] )+ literal[string] + identifier[text_type] ( identifier[int_exp] )
keyword[elif] identifier[int_exp] >= literal[int] :
keyword[return] identifier[sign] +( identifier[digits] [: literal[int] + identifier[int_exp] ]+ literal[string] + identifier[digits] [ literal[int] + identifier[int_exp] :]. identifier[rstrip] ( literal[string] )). identifier[rstrip] ( literal[string] )
keyword[elif] - literal[int] < identifier[int_exp] :
identifier[digits] =( literal[string] *(- identifier[int_exp] ))+ identifier[digits]
keyword[return] identifier[sign] +( identifier[digits] [: literal[int] ]+ literal[string] + identifier[digits] [ literal[int] :]. identifier[rstrip] ( literal[string] )). identifier[rstrip] ( literal[string] )
keyword[else] :
keyword[return] identifier[sign] + identifier[digits] [ literal[int] ]+ literal[string] +( identifier[digits] [ literal[int] :]. identifier[rstrip] ( literal[string] ) keyword[or] literal[string] )+ literal[string] + identifier[text_type] ( identifier[int_exp] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[from] identifier[mo_logs] keyword[import] identifier[Log]
identifier[Log] . identifier[error] ( literal[string] , identifier[e] ) | def float2json(value):
"""
CONVERT NUMBER TO JSON STRING, WITH BETTER CONTROL OVER ACCURACY
:param value: float, int, long, Decimal
:return: unicode
"""
if value == 0:
return u'0' # depends on [control=['if'], data=[]]
try:
sign = '-' if value < 0 else ''
value = abs(value)
sci = value.__format__('.15e')
(mantissa, str_exp) = sci.split('e')
(digits, more_digits) = _snap_to_base_10(mantissa)
int_exp = int(str_exp) + more_digits
if int_exp > 15:
return sign + digits[0] + '.' + (digits[1:].rstrip('0') or '0') + u'e' + text_type(int_exp) # depends on [control=['if'], data=['int_exp']]
elif int_exp >= 0:
return sign + (digits[:1 + int_exp] + '.' + digits[1 + int_exp:].rstrip('0')).rstrip('.') # depends on [control=['if'], data=['int_exp']]
elif -4 < int_exp:
digits = '0' * -int_exp + digits
return sign + (digits[:1] + '.' + digits[1:].rstrip('0')).rstrip('.') # depends on [control=['if'], data=['int_exp']]
else:
return sign + digits[0] + '.' + (digits[1:].rstrip('0') or '0') + u'e' + text_type(int_exp) # depends on [control=['try'], data=[]]
except Exception as e:
from mo_logs import Log
Log.error('not expected', e) # depends on [control=['except'], data=['e']] |
def pull(image, tag=None):
""" pull a docker image """
if tag:
image = ":".join([image, tag])
utils.xrun("docker pull", [image]) | def function[pull, parameter[image, tag]]:
constant[ pull a docker image ]
if name[tag] begin[:]
variable[image] assign[=] call[constant[:].join, parameter[list[[<ast.Name object at 0x7da1b0c21120>, <ast.Name object at 0x7da1b0c21150>]]]]
call[name[utils].xrun, parameter[constant[docker pull], list[[<ast.Name object at 0x7da1b0c212a0>]]]] | keyword[def] identifier[pull] ( identifier[image] , identifier[tag] = keyword[None] ):
literal[string]
keyword[if] identifier[tag] :
identifier[image] = literal[string] . identifier[join] ([ identifier[image] , identifier[tag] ])
identifier[utils] . identifier[xrun] ( literal[string] ,[ identifier[image] ]) | def pull(image, tag=None):
""" pull a docker image """
if tag:
image = ':'.join([image, tag]) # depends on [control=['if'], data=[]]
utils.xrun('docker pull', [image]) |
def download_all_inputs(exclude=None, parallel=False, max_threads=8):
'''
:param exclude: List of input variables that should not be downloaded.
:type exclude: Array of strings
:param parallel: Should we download multiple files in parallel? (default: False)
:type filename: boolean
:param max_threads: If parallel is True, how many threads should be used
to download files? (default: 8)
:type append: int
:returns: dict of lists of strings where each key is the input variable
and each list element is the full path to the file that has
been downloaded.
This function downloads all files that were supplied as inputs to the app.
By convention, if an input parameter "FOO" has value
{"$dnanexus_link": "file-xxxx"}
and filename INPUT.TXT, then the linked file will be downloaded into the
path:
$HOME/in/FOO/INPUT.TXT
If an input is an array of files, then all files will be placed into
numbered subdirectories under a parent directory named for the
input. For example, if the input key is FOO, and the inputs are {A, B,
C}.vcf then, the directory structure will be:
$HOME/in/FOO/0/A.vcf
1/B.vcf
2/C.vcf
Zero padding is used to ensure argument order. For example, if there are
12 input files {A, B, C, D, E, F, G, H, I, J, K, L}.txt, the directory
structure will be:
$HOME/in/FOO/00/A.vcf
...
11/L.vcf
This allows using shell globbing (FOO/*/*.vcf) to get all the files in the input
order and prevents issues with files which have the same filename.'''
# Input directory, where all inputs are downloaded
idir = file_load_utils.get_input_dir()
try:
job_input_file = file_load_utils.get_input_json_file()
dirs, inputs, rest = file_load_utils.get_job_input_filenames(job_input_file)
except IOError:
msg = 'Error: Could not find the input json file: {0}.\n'.format(job_input_file)
msg += ' This function should only be called from within a running job.'
print(msg)
raise
# Exclude directories
# dirs contain all folders (e.g. $HOME/in/FOO) and their sub folders (e.g. $HOME/in/FOO/1, $HOME/in/FOO/2, etc.)
# If the main folder is excluded, its sub-folder would also be excluded from dirs_to_create
dirs_to_create = []
for d in dirs:
keep = True
if (exclude is not None) and (d is not None):
if (d.split('/')[0] in exclude):
keep = False
if keep:
dirs_to_create.append(d)
# Create the directory structure, in preparation for download.
# Allows performing the download in parallel.
_create_dirs(idir, dirs_to_create)
# Remove excluded inputs
if exclude:
inputs = file_load_utils.filter_dict(inputs, exclude)
# Convert to a flat list of elements to download
to_download = []
for ival_list in inputs.values():
to_download.extend(ival_list)
# Download the files
if parallel:
total_mem = psutil.virtual_memory().total >> 20 # Total RAM in MB
num_cores = multiprocessing.cpu_count()
max_num_parallel_downloads = _get_num_parallel_threads(max_threads, num_cores, total_mem)
sys.stderr.write("Downloading files using {} threads".format(max_num_parallel_downloads))
_parallel_file_download(to_download, idir, max_num_parallel_downloads)
else:
_sequential_file_download(to_download, idir)
helper_vars = _gen_helper_dict(inputs)
return helper_vars | def function[download_all_inputs, parameter[exclude, parallel, max_threads]]:
constant[
:param exclude: List of input variables that should not be downloaded.
:type exclude: Array of strings
:param parallel: Should we download multiple files in parallel? (default: False)
:type filename: boolean
:param max_threads: If parallel is True, how many threads should be used
to download files? (default: 8)
:type append: int
:returns: dict of lists of strings where each key is the input variable
and each list element is the full path to the file that has
been downloaded.
This function downloads all files that were supplied as inputs to the app.
By convention, if an input parameter "FOO" has value
{"$dnanexus_link": "file-xxxx"}
and filename INPUT.TXT, then the linked file will be downloaded into the
path:
$HOME/in/FOO/INPUT.TXT
If an input is an array of files, then all files will be placed into
numbered subdirectories under a parent directory named for the
input. For example, if the input key is FOO, and the inputs are {A, B,
C}.vcf then, the directory structure will be:
$HOME/in/FOO/0/A.vcf
1/B.vcf
2/C.vcf
Zero padding is used to ensure argument order. For example, if there are
12 input files {A, B, C, D, E, F, G, H, I, J, K, L}.txt, the directory
structure will be:
$HOME/in/FOO/00/A.vcf
...
11/L.vcf
This allows using shell globbing (FOO/*/*.vcf) to get all the files in the input
order and prevents issues with files which have the same filename.]
variable[idir] assign[=] call[name[file_load_utils].get_input_dir, parameter[]]
<ast.Try object at 0x7da18ede66b0>
variable[dirs_to_create] assign[=] list[[]]
for taget[name[d]] in starred[name[dirs]] begin[:]
variable[keep] assign[=] constant[True]
if <ast.BoolOp object at 0x7da18ede4e20> begin[:]
if compare[call[call[name[d].split, parameter[constant[/]]]][constant[0]] in name[exclude]] begin[:]
variable[keep] assign[=] constant[False]
if name[keep] begin[:]
call[name[dirs_to_create].append, parameter[name[d]]]
call[name[_create_dirs], parameter[name[idir], name[dirs_to_create]]]
if name[exclude] begin[:]
variable[inputs] assign[=] call[name[file_load_utils].filter_dict, parameter[name[inputs], name[exclude]]]
variable[to_download] assign[=] list[[]]
for taget[name[ival_list]] in starred[call[name[inputs].values, parameter[]]] begin[:]
call[name[to_download].extend, parameter[name[ival_list]]]
if name[parallel] begin[:]
variable[total_mem] assign[=] binary_operation[call[name[psutil].virtual_memory, parameter[]].total <ast.RShift object at 0x7da2590d6a40> constant[20]]
variable[num_cores] assign[=] call[name[multiprocessing].cpu_count, parameter[]]
variable[max_num_parallel_downloads] assign[=] call[name[_get_num_parallel_threads], parameter[name[max_threads], name[num_cores], name[total_mem]]]
call[name[sys].stderr.write, parameter[call[constant[Downloading files using {} threads].format, parameter[name[max_num_parallel_downloads]]]]]
call[name[_parallel_file_download], parameter[name[to_download], name[idir], name[max_num_parallel_downloads]]]
variable[helper_vars] assign[=] call[name[_gen_helper_dict], parameter[name[inputs]]]
return[name[helper_vars]] | keyword[def] identifier[download_all_inputs] ( identifier[exclude] = keyword[None] , identifier[parallel] = keyword[False] , identifier[max_threads] = literal[int] ):
literal[string]
identifier[idir] = identifier[file_load_utils] . identifier[get_input_dir] ()
keyword[try] :
identifier[job_input_file] = identifier[file_load_utils] . identifier[get_input_json_file] ()
identifier[dirs] , identifier[inputs] , identifier[rest] = identifier[file_load_utils] . identifier[get_job_input_filenames] ( identifier[job_input_file] )
keyword[except] identifier[IOError] :
identifier[msg] = literal[string] . identifier[format] ( identifier[job_input_file] )
identifier[msg] += literal[string]
identifier[print] ( identifier[msg] )
keyword[raise]
identifier[dirs_to_create] =[]
keyword[for] identifier[d] keyword[in] identifier[dirs] :
identifier[keep] = keyword[True]
keyword[if] ( identifier[exclude] keyword[is] keyword[not] keyword[None] ) keyword[and] ( identifier[d] keyword[is] keyword[not] keyword[None] ):
keyword[if] ( identifier[d] . identifier[split] ( literal[string] )[ literal[int] ] keyword[in] identifier[exclude] ):
identifier[keep] = keyword[False]
keyword[if] identifier[keep] :
identifier[dirs_to_create] . identifier[append] ( identifier[d] )
identifier[_create_dirs] ( identifier[idir] , identifier[dirs_to_create] )
keyword[if] identifier[exclude] :
identifier[inputs] = identifier[file_load_utils] . identifier[filter_dict] ( identifier[inputs] , identifier[exclude] )
identifier[to_download] =[]
keyword[for] identifier[ival_list] keyword[in] identifier[inputs] . identifier[values] ():
identifier[to_download] . identifier[extend] ( identifier[ival_list] )
keyword[if] identifier[parallel] :
identifier[total_mem] = identifier[psutil] . identifier[virtual_memory] (). identifier[total] >> literal[int]
identifier[num_cores] = identifier[multiprocessing] . identifier[cpu_count] ()
identifier[max_num_parallel_downloads] = identifier[_get_num_parallel_threads] ( identifier[max_threads] , identifier[num_cores] , identifier[total_mem] )
identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] . identifier[format] ( identifier[max_num_parallel_downloads] ))
identifier[_parallel_file_download] ( identifier[to_download] , identifier[idir] , identifier[max_num_parallel_downloads] )
keyword[else] :
identifier[_sequential_file_download] ( identifier[to_download] , identifier[idir] )
identifier[helper_vars] = identifier[_gen_helper_dict] ( identifier[inputs] )
keyword[return] identifier[helper_vars] | def download_all_inputs(exclude=None, parallel=False, max_threads=8):
"""
:param exclude: List of input variables that should not be downloaded.
:type exclude: Array of strings
:param parallel: Should we download multiple files in parallel? (default: False)
:type filename: boolean
:param max_threads: If parallel is True, how many threads should be used
to download files? (default: 8)
:type append: int
:returns: dict of lists of strings where each key is the input variable
and each list element is the full path to the file that has
been downloaded.
This function downloads all files that were supplied as inputs to the app.
By convention, if an input parameter "FOO" has value
{"$dnanexus_link": "file-xxxx"}
and filename INPUT.TXT, then the linked file will be downloaded into the
path:
$HOME/in/FOO/INPUT.TXT
If an input is an array of files, then all files will be placed into
numbered subdirectories under a parent directory named for the
input. For example, if the input key is FOO, and the inputs are {A, B,
C}.vcf then, the directory structure will be:
$HOME/in/FOO/0/A.vcf
1/B.vcf
2/C.vcf
Zero padding is used to ensure argument order. For example, if there are
12 input files {A, B, C, D, E, F, G, H, I, J, K, L}.txt, the directory
structure will be:
$HOME/in/FOO/00/A.vcf
...
11/L.vcf
This allows using shell globbing (FOO/*/*.vcf) to get all the files in the input
order and prevents issues with files which have the same filename."""
# Input directory, where all inputs are downloaded
idir = file_load_utils.get_input_dir()
try:
job_input_file = file_load_utils.get_input_json_file()
(dirs, inputs, rest) = file_load_utils.get_job_input_filenames(job_input_file) # depends on [control=['try'], data=[]]
except IOError:
msg = 'Error: Could not find the input json file: {0}.\n'.format(job_input_file)
msg += ' This function should only be called from within a running job.'
print(msg)
raise # depends on [control=['except'], data=[]]
# Exclude directories
# dirs contain all folders (e.g. $HOME/in/FOO) and their sub folders (e.g. $HOME/in/FOO/1, $HOME/in/FOO/2, etc.)
# If the main folder is excluded, its sub-folder would also be excluded from dirs_to_create
dirs_to_create = []
for d in dirs:
keep = True
if exclude is not None and d is not None:
if d.split('/')[0] in exclude:
keep = False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if keep:
dirs_to_create.append(d) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['d']]
# Create the directory structure, in preparation for download.
# Allows performing the download in parallel.
_create_dirs(idir, dirs_to_create)
# Remove excluded inputs
if exclude:
inputs = file_load_utils.filter_dict(inputs, exclude) # depends on [control=['if'], data=[]]
# Convert to a flat list of elements to download
to_download = []
for ival_list in inputs.values():
to_download.extend(ival_list) # depends on [control=['for'], data=['ival_list']]
# Download the files
if parallel:
total_mem = psutil.virtual_memory().total >> 20 # Total RAM in MB
num_cores = multiprocessing.cpu_count()
max_num_parallel_downloads = _get_num_parallel_threads(max_threads, num_cores, total_mem)
sys.stderr.write('Downloading files using {} threads'.format(max_num_parallel_downloads))
_parallel_file_download(to_download, idir, max_num_parallel_downloads) # depends on [control=['if'], data=[]]
else:
_sequential_file_download(to_download, idir)
helper_vars = _gen_helper_dict(inputs)
return helper_vars |
def PushEventSource(self, event_source):
"""Pushes an event source onto the heap.
Args:
event_source (EventSource): event source.
"""
if event_source.file_entry_type == (
dfvfs_definitions.FILE_ENTRY_TYPE_DIRECTORY):
weight = 1
else:
weight = 100
heap_values = (weight, time.time(), event_source)
heapq.heappush(self._heap, heap_values) | def function[PushEventSource, parameter[self, event_source]]:
constant[Pushes an event source onto the heap.
Args:
event_source (EventSource): event source.
]
if compare[name[event_source].file_entry_type equal[==] name[dfvfs_definitions].FILE_ENTRY_TYPE_DIRECTORY] begin[:]
variable[weight] assign[=] constant[1]
variable[heap_values] assign[=] tuple[[<ast.Name object at 0x7da20c796aa0>, <ast.Call object at 0x7da20c795a50>, <ast.Name object at 0x7da20c795720>]]
call[name[heapq].heappush, parameter[name[self]._heap, name[heap_values]]] | keyword[def] identifier[PushEventSource] ( identifier[self] , identifier[event_source] ):
literal[string]
keyword[if] identifier[event_source] . identifier[file_entry_type] ==(
identifier[dfvfs_definitions] . identifier[FILE_ENTRY_TYPE_DIRECTORY] ):
identifier[weight] = literal[int]
keyword[else] :
identifier[weight] = literal[int]
identifier[heap_values] =( identifier[weight] , identifier[time] . identifier[time] (), identifier[event_source] )
identifier[heapq] . identifier[heappush] ( identifier[self] . identifier[_heap] , identifier[heap_values] ) | def PushEventSource(self, event_source):
"""Pushes an event source onto the heap.
Args:
event_source (EventSource): event source.
"""
if event_source.file_entry_type == dfvfs_definitions.FILE_ENTRY_TYPE_DIRECTORY:
weight = 1 # depends on [control=['if'], data=[]]
else:
weight = 100
heap_values = (weight, time.time(), event_source)
heapq.heappush(self._heap, heap_values) |
def get_options(argv=None):
"""
Convert options into commands
return commands, message
"""
parser = argparse.ArgumentParser(usage="spyder [options] files")
parser.add_argument('--new-instance', action='store_true', default=False,
help="Run a new instance of Spyder, even if the single "
"instance mode has been turned on (default)")
parser.add_argument('--defaults', dest="reset_to_defaults",
action='store_true', default=False,
help="Reset configuration settings to defaults")
parser.add_argument('--reset', dest="reset_config_files",
action='store_true', default=False,
help="Remove all configuration files!")
parser.add_argument('--optimize', action='store_true', default=False,
help="Optimize Spyder bytecode (this may require "
"administrative privileges)")
parser.add_argument('-w', '--workdir', dest="working_directory", default=None,
help="Default working directory")
parser.add_argument('--hide-console', action='store_true', default=False,
help="Hide parent console window (Windows)")
parser.add_argument('--show-console', action='store_true', default=False,
help="(Deprecated) Does nothing, now the default behavior "
"is to show the console")
parser.add_argument('--multithread', dest="multithreaded",
action='store_true', default=False,
help="Internal console is executed in another thread "
"(separate from main application thread)")
parser.add_argument('--profile', action='store_true', default=False,
help="Profile mode (internal test, "
"not related with Python profiling)")
parser.add_argument('--window-title', type=str, default=None,
help="String to show in the main window title")
parser.add_argument('-p', '--project', default=None, type=str,
dest="project",
help="Path that contains an Spyder project")
parser.add_argument('--opengl', default=None,
dest="opengl_implementation",
choices=['software', 'desktop', 'gles'],
help=("OpenGL implementation to pass to Qt")
)
parser.add_argument('--debug-info', default=None,
dest="debug_info",
choices=['minimal', 'verbose'],
help=("Level of internal debugging info to give. "
"'minimal' only logs a small amount of "
"confirmation messages and 'verbose' logs a "
"lot of detailed information.")
)
parser.add_argument('--debug-output', default='terminal',
dest="debug_output",
choices=['terminal', 'file'],
help=("Print internal debugging info either to the "
"terminal or to a file called spyder-debug.log "
"in your current working directory. Default is "
"'terminal'.")
)
parser.add_argument('files', nargs='*')
options = parser.parse_args(argv)
args = options.files
return options, args | def function[get_options, parameter[argv]]:
constant[
Convert options into commands
return commands, message
]
variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[]]
call[name[parser].add_argument, parameter[constant[--new-instance]]]
call[name[parser].add_argument, parameter[constant[--defaults]]]
call[name[parser].add_argument, parameter[constant[--reset]]]
call[name[parser].add_argument, parameter[constant[--optimize]]]
call[name[parser].add_argument, parameter[constant[-w], constant[--workdir]]]
call[name[parser].add_argument, parameter[constant[--hide-console]]]
call[name[parser].add_argument, parameter[constant[--show-console]]]
call[name[parser].add_argument, parameter[constant[--multithread]]]
call[name[parser].add_argument, parameter[constant[--profile]]]
call[name[parser].add_argument, parameter[constant[--window-title]]]
call[name[parser].add_argument, parameter[constant[-p], constant[--project]]]
call[name[parser].add_argument, parameter[constant[--opengl]]]
call[name[parser].add_argument, parameter[constant[--debug-info]]]
call[name[parser].add_argument, parameter[constant[--debug-output]]]
call[name[parser].add_argument, parameter[constant[files]]]
variable[options] assign[=] call[name[parser].parse_args, parameter[name[argv]]]
variable[args] assign[=] name[options].files
return[tuple[[<ast.Name object at 0x7da18bcc96c0>, <ast.Name object at 0x7da18bcca1a0>]]] | keyword[def] identifier[get_options] ( identifier[argv] = keyword[None] ):
literal[string]
identifier[parser] = identifier[argparse] . identifier[ArgumentParser] ( identifier[usage] = literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[action] = literal[string] , identifier[default] = keyword[False] ,
identifier[help] = literal[string]
literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[dest] = literal[string] ,
identifier[action] = literal[string] , identifier[default] = keyword[False] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[dest] = literal[string] ,
identifier[action] = literal[string] , identifier[default] = keyword[False] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[action] = literal[string] , identifier[default] = keyword[False] ,
identifier[help] = literal[string]
literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[dest] = literal[string] , identifier[default] = keyword[None] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[action] = literal[string] , identifier[default] = keyword[False] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[action] = literal[string] , identifier[default] = keyword[False] ,
identifier[help] = literal[string]
literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[dest] = literal[string] ,
identifier[action] = literal[string] , identifier[default] = keyword[False] ,
identifier[help] = literal[string]
literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[action] = literal[string] , identifier[default] = keyword[False] ,
identifier[help] = literal[string]
literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[str] , identifier[default] = keyword[None] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[default] = keyword[None] , identifier[type] = identifier[str] ,
identifier[dest] = literal[string] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[default] = keyword[None] ,
identifier[dest] = literal[string] ,
identifier[choices] =[ literal[string] , literal[string] , literal[string] ],
identifier[help] =( literal[string] )
)
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[default] = keyword[None] ,
identifier[dest] = literal[string] ,
identifier[choices] =[ literal[string] , literal[string] ],
identifier[help] =( literal[string]
literal[string]
literal[string]
literal[string] )
)
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[default] = literal[string] ,
identifier[dest] = literal[string] ,
identifier[choices] =[ literal[string] , literal[string] ],
identifier[help] =( literal[string]
literal[string]
literal[string]
literal[string] )
)
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[nargs] = literal[string] )
identifier[options] = identifier[parser] . identifier[parse_args] ( identifier[argv] )
identifier[args] = identifier[options] . identifier[files]
keyword[return] identifier[options] , identifier[args] | def get_options(argv=None):
"""
Convert options into commands
return commands, message
"""
parser = argparse.ArgumentParser(usage='spyder [options] files')
parser.add_argument('--new-instance', action='store_true', default=False, help='Run a new instance of Spyder, even if the single instance mode has been turned on (default)')
parser.add_argument('--defaults', dest='reset_to_defaults', action='store_true', default=False, help='Reset configuration settings to defaults')
parser.add_argument('--reset', dest='reset_config_files', action='store_true', default=False, help='Remove all configuration files!')
parser.add_argument('--optimize', action='store_true', default=False, help='Optimize Spyder bytecode (this may require administrative privileges)')
parser.add_argument('-w', '--workdir', dest='working_directory', default=None, help='Default working directory')
parser.add_argument('--hide-console', action='store_true', default=False, help='Hide parent console window (Windows)')
parser.add_argument('--show-console', action='store_true', default=False, help='(Deprecated) Does nothing, now the default behavior is to show the console')
parser.add_argument('--multithread', dest='multithreaded', action='store_true', default=False, help='Internal console is executed in another thread (separate from main application thread)')
parser.add_argument('--profile', action='store_true', default=False, help='Profile mode (internal test, not related with Python profiling)')
parser.add_argument('--window-title', type=str, default=None, help='String to show in the main window title')
parser.add_argument('-p', '--project', default=None, type=str, dest='project', help='Path that contains an Spyder project')
parser.add_argument('--opengl', default=None, dest='opengl_implementation', choices=['software', 'desktop', 'gles'], help='OpenGL implementation to pass to Qt')
parser.add_argument('--debug-info', default=None, dest='debug_info', choices=['minimal', 'verbose'], help="Level of internal debugging info to give. 'minimal' only logs a small amount of confirmation messages and 'verbose' logs a lot of detailed information.")
parser.add_argument('--debug-output', default='terminal', dest='debug_output', choices=['terminal', 'file'], help="Print internal debugging info either to the terminal or to a file called spyder-debug.log in your current working directory. Default is 'terminal'.")
parser.add_argument('files', nargs='*')
options = parser.parse_args(argv)
args = options.files
return (options, args) |
def _internal_verify_cas(ticket, service, suffix):
"""Verifies CAS 2.0 and 3.0 XML-based authentication ticket.
Returns username on success and None on failure.
"""
params = {'ticket': ticket, 'service': service}
if settings.CAS_PROXY_CALLBACK:
params['pgtUrl'] = settings.CAS_PROXY_CALLBACK
url = (urljoin(settings.CAS_SERVER_URL, suffix) + '?' +
urlencode(params))
page = urlopen(url)
username = None
try:
response = page.read()
tree = ElementTree.fromstring(response)
document = minidom.parseString(response)
if tree[0].tag.endswith('authenticationSuccess'):
if settings.CAS_RESPONSE_CALLBACKS:
cas_response_callbacks(tree)
username = tree[0][0].text
pgt_el = document.getElementsByTagName('cas:proxyGrantingTicket')
if pgt_el:
pgt = pgt_el[0].firstChild.nodeValue
try:
pgtIou = _get_pgtiou(pgt)
tgt = Tgt.objects.get(username=username)
tgt.tgt = pgtIou.tgt
tgt.save()
pgtIou.delete()
except Tgt.DoesNotExist:
Tgt.objects.create(username=username, tgt=pgtIou.tgt)
logger.info('Creating TGT ticket for {user}'.format(
user=username
))
pgtIou.delete()
except Exception as e:
logger.warning('Failed to do proxy authentication. {message}'.format(
message=e
))
else:
failure = document.getElementsByTagName('cas:authenticationFailure')
if failure:
logger.warn('Authentication failed from CAS server: %s',
failure[0].firstChild.nodeValue)
except Exception as e:
logger.error('Failed to verify CAS authentication: {message}'.format(
message=e
))
finally:
page.close()
return username | def function[_internal_verify_cas, parameter[ticket, service, suffix]]:
constant[Verifies CAS 2.0 and 3.0 XML-based authentication ticket.
Returns username on success and None on failure.
]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b101a4d0>, <ast.Constant object at 0x7da1b1018370>], [<ast.Name object at 0x7da1b1018850>, <ast.Name object at 0x7da1b101abc0>]]
if name[settings].CAS_PROXY_CALLBACK begin[:]
call[name[params]][constant[pgtUrl]] assign[=] name[settings].CAS_PROXY_CALLBACK
variable[url] assign[=] binary_operation[binary_operation[call[name[urljoin], parameter[name[settings].CAS_SERVER_URL, name[suffix]]] + constant[?]] + call[name[urlencode], parameter[name[params]]]]
variable[page] assign[=] call[name[urlopen], parameter[name[url]]]
variable[username] assign[=] constant[None]
<ast.Try object at 0x7da1b1019c00>
return[name[username]] | keyword[def] identifier[_internal_verify_cas] ( identifier[ticket] , identifier[service] , identifier[suffix] ):
literal[string]
identifier[params] ={ literal[string] : identifier[ticket] , literal[string] : identifier[service] }
keyword[if] identifier[settings] . identifier[CAS_PROXY_CALLBACK] :
identifier[params] [ literal[string] ]= identifier[settings] . identifier[CAS_PROXY_CALLBACK]
identifier[url] =( identifier[urljoin] ( identifier[settings] . identifier[CAS_SERVER_URL] , identifier[suffix] )+ literal[string] +
identifier[urlencode] ( identifier[params] ))
identifier[page] = identifier[urlopen] ( identifier[url] )
identifier[username] = keyword[None]
keyword[try] :
identifier[response] = identifier[page] . identifier[read] ()
identifier[tree] = identifier[ElementTree] . identifier[fromstring] ( identifier[response] )
identifier[document] = identifier[minidom] . identifier[parseString] ( identifier[response] )
keyword[if] identifier[tree] [ literal[int] ]. identifier[tag] . identifier[endswith] ( literal[string] ):
keyword[if] identifier[settings] . identifier[CAS_RESPONSE_CALLBACKS] :
identifier[cas_response_callbacks] ( identifier[tree] )
identifier[username] = identifier[tree] [ literal[int] ][ literal[int] ]. identifier[text]
identifier[pgt_el] = identifier[document] . identifier[getElementsByTagName] ( literal[string] )
keyword[if] identifier[pgt_el] :
identifier[pgt] = identifier[pgt_el] [ literal[int] ]. identifier[firstChild] . identifier[nodeValue]
keyword[try] :
identifier[pgtIou] = identifier[_get_pgtiou] ( identifier[pgt] )
identifier[tgt] = identifier[Tgt] . identifier[objects] . identifier[get] ( identifier[username] = identifier[username] )
identifier[tgt] . identifier[tgt] = identifier[pgtIou] . identifier[tgt]
identifier[tgt] . identifier[save] ()
identifier[pgtIou] . identifier[delete] ()
keyword[except] identifier[Tgt] . identifier[DoesNotExist] :
identifier[Tgt] . identifier[objects] . identifier[create] ( identifier[username] = identifier[username] , identifier[tgt] = identifier[pgtIou] . identifier[tgt] )
identifier[logger] . identifier[info] ( literal[string] . identifier[format] (
identifier[user] = identifier[username]
))
identifier[pgtIou] . identifier[delete] ()
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[logger] . identifier[warning] ( literal[string] . identifier[format] (
identifier[message] = identifier[e]
))
keyword[else] :
identifier[failure] = identifier[document] . identifier[getElementsByTagName] ( literal[string] )
keyword[if] identifier[failure] :
identifier[logger] . identifier[warn] ( literal[string] ,
identifier[failure] [ literal[int] ]. identifier[firstChild] . identifier[nodeValue] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[logger] . identifier[error] ( literal[string] . identifier[format] (
identifier[message] = identifier[e]
))
keyword[finally] :
identifier[page] . identifier[close] ()
keyword[return] identifier[username] | def _internal_verify_cas(ticket, service, suffix):
"""Verifies CAS 2.0 and 3.0 XML-based authentication ticket.
Returns username on success and None on failure.
"""
params = {'ticket': ticket, 'service': service}
if settings.CAS_PROXY_CALLBACK:
params['pgtUrl'] = settings.CAS_PROXY_CALLBACK # depends on [control=['if'], data=[]]
url = urljoin(settings.CAS_SERVER_URL, suffix) + '?' + urlencode(params)
page = urlopen(url)
username = None
try:
response = page.read()
tree = ElementTree.fromstring(response)
document = minidom.parseString(response)
if tree[0].tag.endswith('authenticationSuccess'):
if settings.CAS_RESPONSE_CALLBACKS:
cas_response_callbacks(tree) # depends on [control=['if'], data=[]]
username = tree[0][0].text
pgt_el = document.getElementsByTagName('cas:proxyGrantingTicket')
if pgt_el:
pgt = pgt_el[0].firstChild.nodeValue
try:
pgtIou = _get_pgtiou(pgt)
tgt = Tgt.objects.get(username=username)
tgt.tgt = pgtIou.tgt
tgt.save()
pgtIou.delete() # depends on [control=['try'], data=[]]
except Tgt.DoesNotExist:
Tgt.objects.create(username=username, tgt=pgtIou.tgt)
logger.info('Creating TGT ticket for {user}'.format(user=username))
pgtIou.delete() # depends on [control=['except'], data=[]]
except Exception as e:
logger.warning('Failed to do proxy authentication. {message}'.format(message=e)) # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
failure = document.getElementsByTagName('cas:authenticationFailure')
if failure:
logger.warn('Authentication failed from CAS server: %s', failure[0].firstChild.nodeValue) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except Exception as e:
logger.error('Failed to verify CAS authentication: {message}'.format(message=e)) # depends on [control=['except'], data=['e']]
finally:
page.close()
return username |
def vesting_balance_withdraw(self, vesting_id, amount=None, account=None, **kwargs):
""" Withdraw vesting balance
:param str vesting_id: Id of the vesting object
:param bitshares.amount.Amount Amount: to withdraw ("all" if not
provided")
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
"""
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account, blockchain_instance=self)
if not amount:
obj = Vesting(vesting_id, blockchain_instance=self)
amount = obj.claimable
op = operations.Vesting_balance_withdraw(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"vesting_balance": vesting_id,
"owner": account["id"],
"amount": {"amount": int(amount), "asset_id": amount["asset"]["id"]},
"prefix": self.prefix,
}
)
return self.finalizeOp(op, account["name"], "active") | def function[vesting_balance_withdraw, parameter[self, vesting_id, amount, account]]:
constant[ Withdraw vesting balance
:param str vesting_id: Id of the vesting object
:param bitshares.amount.Amount Amount: to withdraw ("all" if not
provided")
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
]
if <ast.UnaryOp object at 0x7da2049606a0> begin[:]
if compare[constant[default_account] in name[self].config] begin[:]
variable[account] assign[=] call[name[self].config][constant[default_account]]
if <ast.UnaryOp object at 0x7da204963fa0> begin[:]
<ast.Raise object at 0x7da204960940>
variable[account] assign[=] call[name[Account], parameter[name[account]]]
if <ast.UnaryOp object at 0x7da18eb54f10> begin[:]
variable[obj] assign[=] call[name[Vesting], parameter[name[vesting_id]]]
variable[amount] assign[=] name[obj].claimable
variable[op] assign[=] call[name[operations].Vesting_balance_withdraw, parameter[]]
return[call[name[self].finalizeOp, parameter[name[op], call[name[account]][constant[name]], constant[active]]]] | keyword[def] identifier[vesting_balance_withdraw] ( identifier[self] , identifier[vesting_id] , identifier[amount] = keyword[None] , identifier[account] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[if] keyword[not] identifier[account] :
keyword[if] literal[string] keyword[in] identifier[self] . identifier[config] :
identifier[account] = identifier[self] . identifier[config] [ literal[string] ]
keyword[if] keyword[not] identifier[account] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[account] = identifier[Account] ( identifier[account] , identifier[blockchain_instance] = identifier[self] )
keyword[if] keyword[not] identifier[amount] :
identifier[obj] = identifier[Vesting] ( identifier[vesting_id] , identifier[blockchain_instance] = identifier[self] )
identifier[amount] = identifier[obj] . identifier[claimable]
identifier[op] = identifier[operations] . identifier[Vesting_balance_withdraw] (
**{
literal[string] :{ literal[string] : literal[int] , literal[string] : literal[string] },
literal[string] : identifier[vesting_id] ,
literal[string] : identifier[account] [ literal[string] ],
literal[string] :{ literal[string] : identifier[int] ( identifier[amount] ), literal[string] : identifier[amount] [ literal[string] ][ literal[string] ]},
literal[string] : identifier[self] . identifier[prefix] ,
}
)
keyword[return] identifier[self] . identifier[finalizeOp] ( identifier[op] , identifier[account] [ literal[string] ], literal[string] ) | def vesting_balance_withdraw(self, vesting_id, amount=None, account=None, **kwargs):
""" Withdraw vesting balance
:param str vesting_id: Id of the vesting object
:param bitshares.amount.Amount Amount: to withdraw ("all" if not
provided")
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
"""
if not account:
if 'default_account' in self.config:
account = self.config['default_account'] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if not account:
raise ValueError('You need to provide an account') # depends on [control=['if'], data=[]]
account = Account(account, blockchain_instance=self)
if not amount:
obj = Vesting(vesting_id, blockchain_instance=self)
amount = obj.claimable # depends on [control=['if'], data=[]]
op = operations.Vesting_balance_withdraw(**{'fee': {'amount': 0, 'asset_id': '1.3.0'}, 'vesting_balance': vesting_id, 'owner': account['id'], 'amount': {'amount': int(amount), 'asset_id': amount['asset']['id']}, 'prefix': self.prefix})
return self.finalizeOp(op, account['name'], 'active') |
def process_default(self, event):
"""
Writes event string representation to file object provided to
my_init().
@param event: Event to be processed. Can be of any type of events but
IN_Q_OVERFLOW events (see method process_IN_Q_OVERFLOW).
@type event: Event instance
"""
self._out.write(str(event))
self._out.write('\n')
self._out.flush() | def function[process_default, parameter[self, event]]:
constant[
Writes event string representation to file object provided to
my_init().
@param event: Event to be processed. Can be of any type of events but
IN_Q_OVERFLOW events (see method process_IN_Q_OVERFLOW).
@type event: Event instance
]
call[name[self]._out.write, parameter[call[name[str], parameter[name[event]]]]]
call[name[self]._out.write, parameter[constant[
]]]
call[name[self]._out.flush, parameter[]] | keyword[def] identifier[process_default] ( identifier[self] , identifier[event] ):
literal[string]
identifier[self] . identifier[_out] . identifier[write] ( identifier[str] ( identifier[event] ))
identifier[self] . identifier[_out] . identifier[write] ( literal[string] )
identifier[self] . identifier[_out] . identifier[flush] () | def process_default(self, event):
"""
Writes event string representation to file object provided to
my_init().
@param event: Event to be processed. Can be of any type of events but
IN_Q_OVERFLOW events (see method process_IN_Q_OVERFLOW).
@type event: Event instance
"""
self._out.write(str(event))
self._out.write('\n')
self._out.flush() |
def cli(env, identifier):
"""Cancel a dedicated host server immediately"""
mgr = SoftLayer.DedicatedHostManager(env.client)
host_id = helpers.resolve_id(mgr.resolve_ids, identifier, 'dedicated host')
if not (env.skip_confirmations or formatting.no_going_back(host_id)):
raise exceptions.CLIAbort('Aborted')
mgr.cancel_host(host_id)
click.secho('Dedicated Host %s was cancelled' % host_id, fg='green') | def function[cli, parameter[env, identifier]]:
constant[Cancel a dedicated host server immediately]
variable[mgr] assign[=] call[name[SoftLayer].DedicatedHostManager, parameter[name[env].client]]
variable[host_id] assign[=] call[name[helpers].resolve_id, parameter[name[mgr].resolve_ids, name[identifier], constant[dedicated host]]]
if <ast.UnaryOp object at 0x7da18ede78e0> begin[:]
<ast.Raise object at 0x7da20e9b1180>
call[name[mgr].cancel_host, parameter[name[host_id]]]
call[name[click].secho, parameter[binary_operation[constant[Dedicated Host %s was cancelled] <ast.Mod object at 0x7da2590d6920> name[host_id]]]] | keyword[def] identifier[cli] ( identifier[env] , identifier[identifier] ):
literal[string]
identifier[mgr] = identifier[SoftLayer] . identifier[DedicatedHostManager] ( identifier[env] . identifier[client] )
identifier[host_id] = identifier[helpers] . identifier[resolve_id] ( identifier[mgr] . identifier[resolve_ids] , identifier[identifier] , literal[string] )
keyword[if] keyword[not] ( identifier[env] . identifier[skip_confirmations] keyword[or] identifier[formatting] . identifier[no_going_back] ( identifier[host_id] )):
keyword[raise] identifier[exceptions] . identifier[CLIAbort] ( literal[string] )
identifier[mgr] . identifier[cancel_host] ( identifier[host_id] )
identifier[click] . identifier[secho] ( literal[string] % identifier[host_id] , identifier[fg] = literal[string] ) | def cli(env, identifier):
"""Cancel a dedicated host server immediately"""
mgr = SoftLayer.DedicatedHostManager(env.client)
host_id = helpers.resolve_id(mgr.resolve_ids, identifier, 'dedicated host')
if not (env.skip_confirmations or formatting.no_going_back(host_id)):
raise exceptions.CLIAbort('Aborted') # depends on [control=['if'], data=[]]
mgr.cancel_host(host_id)
click.secho('Dedicated Host %s was cancelled' % host_id, fg='green') |
def get_initial_broks(self, broker_name):
"""Send a HTTP request to the satellite (GET /_initial_broks)
Used to build the initial broks for a broker connecting to a scheduler
:param broker_name: the concerned broker name
:type broker_name: str
:return: Boolean indicating if the running id changed
:type: bool
"""
logger.debug("Getting initial broks for %s, %s %s", self.name, self.alive, self.reachable)
return self.con.get('_initial_broks', {'broker_name': broker_name}, wait=True) | def function[get_initial_broks, parameter[self, broker_name]]:
constant[Send a HTTP request to the satellite (GET /_initial_broks)
Used to build the initial broks for a broker connecting to a scheduler
:param broker_name: the concerned broker name
:type broker_name: str
:return: Boolean indicating if the running id changed
:type: bool
]
call[name[logger].debug, parameter[constant[Getting initial broks for %s, %s %s], name[self].name, name[self].alive, name[self].reachable]]
return[call[name[self].con.get, parameter[constant[_initial_broks], dictionary[[<ast.Constant object at 0x7da18dc056c0>], [<ast.Name object at 0x7da18dc06080>]]]]] | keyword[def] identifier[get_initial_broks] ( identifier[self] , identifier[broker_name] ):
literal[string]
identifier[logger] . identifier[debug] ( literal[string] , identifier[self] . identifier[name] , identifier[self] . identifier[alive] , identifier[self] . identifier[reachable] )
keyword[return] identifier[self] . identifier[con] . identifier[get] ( literal[string] ,{ literal[string] : identifier[broker_name] }, identifier[wait] = keyword[True] ) | def get_initial_broks(self, broker_name):
"""Send a HTTP request to the satellite (GET /_initial_broks)
Used to build the initial broks for a broker connecting to a scheduler
:param broker_name: the concerned broker name
:type broker_name: str
:return: Boolean indicating if the running id changed
:type: bool
"""
logger.debug('Getting initial broks for %s, %s %s', self.name, self.alive, self.reachable)
return self.con.get('_initial_broks', {'broker_name': broker_name}, wait=True) |
def _toolkit_serialize_summary_struct(model, sections, section_titles):
"""
Serialize model summary into a dict with ordered lists of sections and section titles
Parameters
----------
model : Model object
sections : Ordered list of lists (sections) of tuples (field,value)
[
[(field1, value1), (field2, value2)],
[(field3, value3), (field4, value4)],
]
section_titles : Ordered list of section titles
Returns
-------
output_dict : A dict with two entries:
'sections' : ordered list with tuples of the form ('label',value)
'section_titles' : ordered list of section labels
"""
output_dict = dict()
output_dict['sections'] = [ [ ( field[0], __extract_model_summary_value(model, field[1]) ) \
for field in section ]
for section in sections ]
output_dict['section_titles'] = section_titles
return output_dict | def function[_toolkit_serialize_summary_struct, parameter[model, sections, section_titles]]:
constant[
Serialize model summary into a dict with ordered lists of sections and section titles
Parameters
----------
model : Model object
sections : Ordered list of lists (sections) of tuples (field,value)
[
[(field1, value1), (field2, value2)],
[(field3, value3), (field4, value4)],
]
section_titles : Ordered list of section titles
Returns
-------
output_dict : A dict with two entries:
'sections' : ordered list with tuples of the form ('label',value)
'section_titles' : ordered list of section labels
]
variable[output_dict] assign[=] call[name[dict], parameter[]]
call[name[output_dict]][constant[sections]] assign[=] <ast.ListComp object at 0x7da1b1fa5870>
call[name[output_dict]][constant[section_titles]] assign[=] name[section_titles]
return[name[output_dict]] | keyword[def] identifier[_toolkit_serialize_summary_struct] ( identifier[model] , identifier[sections] , identifier[section_titles] ):
literal[string]
identifier[output_dict] = identifier[dict] ()
identifier[output_dict] [ literal[string] ]=[[( identifier[field] [ literal[int] ], identifier[__extract_model_summary_value] ( identifier[model] , identifier[field] [ literal[int] ])) keyword[for] identifier[field] keyword[in] identifier[section] ]
keyword[for] identifier[section] keyword[in] identifier[sections] ]
identifier[output_dict] [ literal[string] ]= identifier[section_titles]
keyword[return] identifier[output_dict] | def _toolkit_serialize_summary_struct(model, sections, section_titles):
"""
Serialize model summary into a dict with ordered lists of sections and section titles
Parameters
----------
model : Model object
sections : Ordered list of lists (sections) of tuples (field,value)
[
[(field1, value1), (field2, value2)],
[(field3, value3), (field4, value4)],
]
section_titles : Ordered list of section titles
Returns
-------
output_dict : A dict with two entries:
'sections' : ordered list with tuples of the form ('label',value)
'section_titles' : ordered list of section labels
"""
output_dict = dict()
output_dict['sections'] = [[(field[0], __extract_model_summary_value(model, field[1])) for field in section] for section in sections]
output_dict['section_titles'] = section_titles
return output_dict |
def cp_cmd(argv):
"""Duplicate the named virtualenv to make a new one."""
parser = argparse.ArgumentParser()
parser.add_argument('source')
parser.add_argument('target', nargs='?')
parser.add_argument('-d', '--dont-activate', action='store_false',
default=True, dest='activate', help="After \
creation, continue with the existing shell (don't \
activate the new environment).")
args = parser.parse_args(argv)
target_name = copy_virtualenv_project(args.source, args.target)
if args.activate:
shell(target_name) | def function[cp_cmd, parameter[argv]]:
constant[Duplicate the named virtualenv to make a new one.]
variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[]]
call[name[parser].add_argument, parameter[constant[source]]]
call[name[parser].add_argument, parameter[constant[target]]]
call[name[parser].add_argument, parameter[constant[-d], constant[--dont-activate]]]
variable[args] assign[=] call[name[parser].parse_args, parameter[name[argv]]]
variable[target_name] assign[=] call[name[copy_virtualenv_project], parameter[name[args].source, name[args].target]]
if name[args].activate begin[:]
call[name[shell], parameter[name[target_name]]] | keyword[def] identifier[cp_cmd] ( identifier[argv] ):
literal[string]
identifier[parser] = identifier[argparse] . identifier[ArgumentParser] ()
identifier[parser] . identifier[add_argument] ( literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[nargs] = literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[action] = literal[string] ,
identifier[default] = keyword[True] , identifier[dest] = literal[string] , identifier[help] = literal[string] )
identifier[args] = identifier[parser] . identifier[parse_args] ( identifier[argv] )
identifier[target_name] = identifier[copy_virtualenv_project] ( identifier[args] . identifier[source] , identifier[args] . identifier[target] )
keyword[if] identifier[args] . identifier[activate] :
identifier[shell] ( identifier[target_name] ) | def cp_cmd(argv):
"""Duplicate the named virtualenv to make a new one."""
parser = argparse.ArgumentParser()
parser.add_argument('source')
parser.add_argument('target', nargs='?')
parser.add_argument('-d', '--dont-activate', action='store_false', default=True, dest='activate', help="After creation, continue with the existing shell (don't activate the new environment).")
args = parser.parse_args(argv)
target_name = copy_virtualenv_project(args.source, args.target)
if args.activate:
shell(target_name) # depends on [control=['if'], data=[]] |
def dematerialize(parent_name, parent_node): # FIXME we need to demat more than just leaves!
#FIXME still an issue: Fornix, Striatum, Diagonal Band
""" Remove nodes higher in the tree that occur further down the
SAME branch. If they occur down OTHER branchs leave them alone.
NOTE: modifies in place!
"""
lleaves = {}
children = parent_node[parent_name]
if not children: # children could be empty ? i think this only happens @ root?
#print('at bottom', parent_name)
lleaves[parent_name] = None
return lleaves
children_ord = reversed(sorted(sorted(((k, v)
for k, v in children.items()),
key=alphasortkey),
#key=lambda a: f'{a[0]}'.split('>')[1] if '>' in f'{a[0]}' else f'a[0]'),
#key=lambda a: a[0].split('>') if '>' in a[0] else a[0]),
key=tcsort)) # make sure we hit deepest first
for child_name, _ in children_ord: # get list so we can go ahead and pop
#print(child_name)
new_lleaves = dematerialize(child_name, children)
if child_name == 'magnetic resonance imaging': # debugging failing demat
pass
#embed()
if child_name in new_lleaves or all(l in lleaves for l in new_lleaves):
# if it is a leaf or all childs are leaves as well
if child_name in lleaves: # if it has previously been identified as a leaf!
#print('MATERIALIZATION DETECTED! LOWER PARENT:',
#lleaves[child_name],'ZAPPING!:', child_name,
#'OF PARENT:', parent_name)
children.pop(child_name)
#print('cn', child_name, 'pn', parent_name, 'BOTTOM')
#else: # if it has NOT previously been identified as a leaf, add the parent!
#new_lleaves[child_name] = parent_name # pass it back up to nodes above
#print('cn', child_name, 'pn', parent_name)
#else: # it is a node but we want to dematerizlize them too!
lleaves[child_name] = parent_name
lleaves.update(new_lleaves)
return lleaves | def function[dematerialize, parameter[parent_name, parent_node]]:
constant[ Remove nodes higher in the tree that occur further down the
SAME branch. If they occur down OTHER branchs leave them alone.
NOTE: modifies in place!
]
variable[lleaves] assign[=] dictionary[[], []]
variable[children] assign[=] call[name[parent_node]][name[parent_name]]
if <ast.UnaryOp object at 0x7da1b1be5360> begin[:]
call[name[lleaves]][name[parent_name]] assign[=] constant[None]
return[name[lleaves]]
variable[children_ord] assign[=] call[name[reversed], parameter[call[name[sorted], parameter[call[name[sorted], parameter[<ast.GeneratorExp object at 0x7da1b1be5930>]]]]]]
for taget[tuple[[<ast.Name object at 0x7da1b1be60e0>, <ast.Name object at 0x7da1b1be6230>]]] in starred[name[children_ord]] begin[:]
variable[new_lleaves] assign[=] call[name[dematerialize], parameter[name[child_name], name[children]]]
if compare[name[child_name] equal[==] constant[magnetic resonance imaging]] begin[:]
pass
if <ast.BoolOp object at 0x7da1b1be6da0> begin[:]
if compare[name[child_name] in name[lleaves]] begin[:]
call[name[children].pop, parameter[name[child_name]]]
call[name[lleaves]][name[child_name]] assign[=] name[parent_name]
call[name[lleaves].update, parameter[name[new_lleaves]]]
return[name[lleaves]] | keyword[def] identifier[dematerialize] ( identifier[parent_name] , identifier[parent_node] ):
literal[string]
identifier[lleaves] ={}
identifier[children] = identifier[parent_node] [ identifier[parent_name] ]
keyword[if] keyword[not] identifier[children] :
identifier[lleaves] [ identifier[parent_name] ]= keyword[None]
keyword[return] identifier[lleaves]
identifier[children_ord] = identifier[reversed] ( identifier[sorted] ( identifier[sorted] ((( identifier[k] , identifier[v] )
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[children] . identifier[items] ()),
identifier[key] = identifier[alphasortkey] ),
identifier[key] = identifier[tcsort] ))
keyword[for] identifier[child_name] , identifier[_] keyword[in] identifier[children_ord] :
identifier[new_lleaves] = identifier[dematerialize] ( identifier[child_name] , identifier[children] )
keyword[if] identifier[child_name] == literal[string] :
keyword[pass]
keyword[if] identifier[child_name] keyword[in] identifier[new_lleaves] keyword[or] identifier[all] ( identifier[l] keyword[in] identifier[lleaves] keyword[for] identifier[l] keyword[in] identifier[new_lleaves] ):
keyword[if] identifier[child_name] keyword[in] identifier[lleaves] :
identifier[children] . identifier[pop] ( identifier[child_name] )
identifier[lleaves] [ identifier[child_name] ]= identifier[parent_name]
identifier[lleaves] . identifier[update] ( identifier[new_lleaves] )
keyword[return] identifier[lleaves] | def dematerialize(parent_name, parent_node): # FIXME we need to demat more than just leaves!
#FIXME still an issue: Fornix, Striatum, Diagonal Band
' Remove nodes higher in the tree that occur further down the\n SAME branch. If they occur down OTHER branchs leave them alone.\n\n NOTE: modifies in place!\n '
lleaves = {}
children = parent_node[parent_name]
if not children: # children could be empty ? i think this only happens @ root?
#print('at bottom', parent_name)
lleaves[parent_name] = None
return lleaves # depends on [control=['if'], data=[]]
#key=lambda a: f'{a[0]}'.split('>')[1] if '>' in f'{a[0]}' else f'a[0]'),
#key=lambda a: a[0].split('>') if '>' in a[0] else a[0]),
children_ord = reversed(sorted(sorted(((k, v) for (k, v) in children.items()), key=alphasortkey), key=tcsort)) # make sure we hit deepest first
for (child_name, _) in children_ord: # get list so we can go ahead and pop
#print(child_name)
new_lleaves = dematerialize(child_name, children)
if child_name == 'magnetic resonance imaging': # debugging failing demat
pass # depends on [control=['if'], data=[]]
#embed()
if child_name in new_lleaves or all((l in lleaves for l in new_lleaves)):
# if it is a leaf or all childs are leaves as well
if child_name in lleaves: # if it has previously been identified as a leaf!
#print('MATERIALIZATION DETECTED! LOWER PARENT:',
#lleaves[child_name],'ZAPPING!:', child_name,
#'OF PARENT:', parent_name)
children.pop(child_name) # depends on [control=['if'], data=['child_name']] # depends on [control=['if'], data=[]]
#print('cn', child_name, 'pn', parent_name, 'BOTTOM')
#else: # if it has NOT previously been identified as a leaf, add the parent!
#new_lleaves[child_name] = parent_name # pass it back up to nodes above
#print('cn', child_name, 'pn', parent_name)
#else: # it is a node but we want to dematerizlize them too!
lleaves[child_name] = parent_name
lleaves.update(new_lleaves) # depends on [control=['for'], data=[]]
return lleaves |
def get_file_size(self, path):
"""
Returns size of the file at given ``path``.
"""
id = self._get_id_for_path(path)
blob = self.repository._repo[id]
return blob.raw_length() | def function[get_file_size, parameter[self, path]]:
constant[
Returns size of the file at given ``path``.
]
variable[id] assign[=] call[name[self]._get_id_for_path, parameter[name[path]]]
variable[blob] assign[=] call[name[self].repository._repo][name[id]]
return[call[name[blob].raw_length, parameter[]]] | keyword[def] identifier[get_file_size] ( identifier[self] , identifier[path] ):
literal[string]
identifier[id] = identifier[self] . identifier[_get_id_for_path] ( identifier[path] )
identifier[blob] = identifier[self] . identifier[repository] . identifier[_repo] [ identifier[id] ]
keyword[return] identifier[blob] . identifier[raw_length] () | def get_file_size(self, path):
"""
Returns size of the file at given ``path``.
"""
id = self._get_id_for_path(path)
blob = self.repository._repo[id]
return blob.raw_length() |
def draw_color(self):
"""Tuple[int, int, int, int]: The color used for drawing operations in (red, green, blue, alpha) format."""
rgba = ffi.new('Uint8[]', 4)
check_int_err(lib.SDL_GetRenderDrawColor(self._ptr, rgba + 0, rgba + 1, rgba + 2, rgba + 3))
return (rgba[0], rgba[1], rgba[2], rgba[3]) | def function[draw_color, parameter[self]]:
constant[Tuple[int, int, int, int]: The color used for drawing operations in (red, green, blue, alpha) format.]
variable[rgba] assign[=] call[name[ffi].new, parameter[constant[Uint8[]], constant[4]]]
call[name[check_int_err], parameter[call[name[lib].SDL_GetRenderDrawColor, parameter[name[self]._ptr, binary_operation[name[rgba] + constant[0]], binary_operation[name[rgba] + constant[1]], binary_operation[name[rgba] + constant[2]], binary_operation[name[rgba] + constant[3]]]]]]
return[tuple[[<ast.Subscript object at 0x7da1b09bbf70>, <ast.Subscript object at 0x7da1b09b9a80>, <ast.Subscript object at 0x7da1b09b9450>, <ast.Subscript object at 0x7da1b09ba440>]]] | keyword[def] identifier[draw_color] ( identifier[self] ):
literal[string]
identifier[rgba] = identifier[ffi] . identifier[new] ( literal[string] , literal[int] )
identifier[check_int_err] ( identifier[lib] . identifier[SDL_GetRenderDrawColor] ( identifier[self] . identifier[_ptr] , identifier[rgba] + literal[int] , identifier[rgba] + literal[int] , identifier[rgba] + literal[int] , identifier[rgba] + literal[int] ))
keyword[return] ( identifier[rgba] [ literal[int] ], identifier[rgba] [ literal[int] ], identifier[rgba] [ literal[int] ], identifier[rgba] [ literal[int] ]) | def draw_color(self):
"""Tuple[int, int, int, int]: The color used for drawing operations in (red, green, blue, alpha) format."""
rgba = ffi.new('Uint8[]', 4)
check_int_err(lib.SDL_GetRenderDrawColor(self._ptr, rgba + 0, rgba + 1, rgba + 2, rgba + 3))
return (rgba[0], rgba[1], rgba[2], rgba[3]) |
def readline(self, size=-1):
"""Receive message of a size from the socket.
Matches the following interface:
https://docs.python.org/3/library/io.html#io.IOBase.readline
"""
return self._safe_call(
True,
super(SSLFileobjectMixin, self).readline,
size,
) | def function[readline, parameter[self, size]]:
constant[Receive message of a size from the socket.
Matches the following interface:
https://docs.python.org/3/library/io.html#io.IOBase.readline
]
return[call[name[self]._safe_call, parameter[constant[True], call[name[super], parameter[name[SSLFileobjectMixin], name[self]]].readline, name[size]]]] | keyword[def] identifier[readline] ( identifier[self] , identifier[size] =- literal[int] ):
literal[string]
keyword[return] identifier[self] . identifier[_safe_call] (
keyword[True] ,
identifier[super] ( identifier[SSLFileobjectMixin] , identifier[self] ). identifier[readline] ,
identifier[size] ,
) | def readline(self, size=-1):
"""Receive message of a size from the socket.
Matches the following interface:
https://docs.python.org/3/library/io.html#io.IOBase.readline
"""
return self._safe_call(True, super(SSLFileobjectMixin, self).readline, size) |
def get_eval_metrics(logits, labels, params):
"""Return dictionary of model evaluation metrics."""
metrics = {
"accuracy": _convert_to_eval_metric(padded_accuracy)(logits, labels),
"accuracy_top5": _convert_to_eval_metric(padded_accuracy_top5)(
logits, labels),
"accuracy_per_sequence": _convert_to_eval_metric(
padded_sequence_accuracy)(logits, labels),
"neg_log_perplexity": _convert_to_eval_metric(padded_neg_log_perplexity)(
logits, labels, params.vocab_size),
"approx_bleu_score": _convert_to_eval_metric(bleu_score)(logits, labels),
"rouge_2_fscore": _convert_to_eval_metric(rouge_2_fscore)(logits, labels),
"rouge_L_fscore": _convert_to_eval_metric(rouge_l_fscore)(logits, labels),
}
# Prefix each of the metric names with "metrics/". This allows the metric
# graphs to display under the "metrics" category in TensorBoard.
metrics = {"metrics/%s" % k: v for k, v in six.iteritems(metrics)}
return metrics | def function[get_eval_metrics, parameter[logits, labels, params]]:
constant[Return dictionary of model evaluation metrics.]
variable[metrics] assign[=] dictionary[[<ast.Constant object at 0x7da2054a69b0>, <ast.Constant object at 0x7da2054a4760>, <ast.Constant object at 0x7da2054a4970>, <ast.Constant object at 0x7da2054a7c70>, <ast.Constant object at 0x7da2054a7b80>, <ast.Constant object at 0x7da2054a61a0>, <ast.Constant object at 0x7da2054a4df0>], [<ast.Call object at 0x7da2054a7d60>, <ast.Call object at 0x7da2054a65c0>, <ast.Call object at 0x7da2054a5f60>, <ast.Call object at 0x7da2054a4190>, <ast.Call object at 0x7da2054a49a0>, <ast.Call object at 0x7da2054a52a0>, <ast.Call object at 0x7da2054a44c0>]]
variable[metrics] assign[=] <ast.DictComp object at 0x7da2054a5ba0>
return[name[metrics]] | keyword[def] identifier[get_eval_metrics] ( identifier[logits] , identifier[labels] , identifier[params] ):
literal[string]
identifier[metrics] ={
literal[string] : identifier[_convert_to_eval_metric] ( identifier[padded_accuracy] )( identifier[logits] , identifier[labels] ),
literal[string] : identifier[_convert_to_eval_metric] ( identifier[padded_accuracy_top5] )(
identifier[logits] , identifier[labels] ),
literal[string] : identifier[_convert_to_eval_metric] (
identifier[padded_sequence_accuracy] )( identifier[logits] , identifier[labels] ),
literal[string] : identifier[_convert_to_eval_metric] ( identifier[padded_neg_log_perplexity] )(
identifier[logits] , identifier[labels] , identifier[params] . identifier[vocab_size] ),
literal[string] : identifier[_convert_to_eval_metric] ( identifier[bleu_score] )( identifier[logits] , identifier[labels] ),
literal[string] : identifier[_convert_to_eval_metric] ( identifier[rouge_2_fscore] )( identifier[logits] , identifier[labels] ),
literal[string] : identifier[_convert_to_eval_metric] ( identifier[rouge_l_fscore] )( identifier[logits] , identifier[labels] ),
}
identifier[metrics] ={ literal[string] % identifier[k] : identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[six] . identifier[iteritems] ( identifier[metrics] )}
keyword[return] identifier[metrics] | def get_eval_metrics(logits, labels, params):
"""Return dictionary of model evaluation metrics."""
metrics = {'accuracy': _convert_to_eval_metric(padded_accuracy)(logits, labels), 'accuracy_top5': _convert_to_eval_metric(padded_accuracy_top5)(logits, labels), 'accuracy_per_sequence': _convert_to_eval_metric(padded_sequence_accuracy)(logits, labels), 'neg_log_perplexity': _convert_to_eval_metric(padded_neg_log_perplexity)(logits, labels, params.vocab_size), 'approx_bleu_score': _convert_to_eval_metric(bleu_score)(logits, labels), 'rouge_2_fscore': _convert_to_eval_metric(rouge_2_fscore)(logits, labels), 'rouge_L_fscore': _convert_to_eval_metric(rouge_l_fscore)(logits, labels)}
# Prefix each of the metric names with "metrics/". This allows the metric
# graphs to display under the "metrics" category in TensorBoard.
metrics = {'metrics/%s' % k: v for (k, v) in six.iteritems(metrics)}
return metrics |
def inherit_handlers(self, excluded_handlers):
# type: (Iterable[str]) -> None
"""
Merges the inherited configuration with the current ones
:param excluded_handlers: Excluded handlers
"""
if not excluded_handlers:
excluded_handlers = tuple()
for handler, configuration in self.__inherited_configuration.items():
if handler in excluded_handlers:
# Excluded handler
continue
elif handler not in self.__handlers:
# Fully inherited configuration
self.__handlers[handler] = configuration
# Merge configuration...
elif isinstance(configuration, dict):
# Dictionary
self.__handlers.setdefault(handler, {}).update(configuration)
elif isinstance(configuration, list):
# List
handler_conf = self.__handlers.setdefault(handler, [])
for item in configuration:
if item not in handler_conf:
handler_conf.append(item)
# Clear the inherited configuration dictionary
self.__inherited_configuration.clear() | def function[inherit_handlers, parameter[self, excluded_handlers]]:
constant[
Merges the inherited configuration with the current ones
:param excluded_handlers: Excluded handlers
]
if <ast.UnaryOp object at 0x7da1b06c9c60> begin[:]
variable[excluded_handlers] assign[=] call[name[tuple], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b06c9570>, <ast.Name object at 0x7da1b06c82b0>]]] in starred[call[name[self].__inherited_configuration.items, parameter[]]] begin[:]
if compare[name[handler] in name[excluded_handlers]] begin[:]
continue
call[name[self].__inherited_configuration.clear, parameter[]] | keyword[def] identifier[inherit_handlers] ( identifier[self] , identifier[excluded_handlers] ):
literal[string]
keyword[if] keyword[not] identifier[excluded_handlers] :
identifier[excluded_handlers] = identifier[tuple] ()
keyword[for] identifier[handler] , identifier[configuration] keyword[in] identifier[self] . identifier[__inherited_configuration] . identifier[items] ():
keyword[if] identifier[handler] keyword[in] identifier[excluded_handlers] :
keyword[continue]
keyword[elif] identifier[handler] keyword[not] keyword[in] identifier[self] . identifier[__handlers] :
identifier[self] . identifier[__handlers] [ identifier[handler] ]= identifier[configuration]
keyword[elif] identifier[isinstance] ( identifier[configuration] , identifier[dict] ):
identifier[self] . identifier[__handlers] . identifier[setdefault] ( identifier[handler] ,{}). identifier[update] ( identifier[configuration] )
keyword[elif] identifier[isinstance] ( identifier[configuration] , identifier[list] ):
identifier[handler_conf] = identifier[self] . identifier[__handlers] . identifier[setdefault] ( identifier[handler] ,[])
keyword[for] identifier[item] keyword[in] identifier[configuration] :
keyword[if] identifier[item] keyword[not] keyword[in] identifier[handler_conf] :
identifier[handler_conf] . identifier[append] ( identifier[item] )
identifier[self] . identifier[__inherited_configuration] . identifier[clear] () | def inherit_handlers(self, excluded_handlers):
# type: (Iterable[str]) -> None
'\n Merges the inherited configuration with the current ones\n\n :param excluded_handlers: Excluded handlers\n '
if not excluded_handlers:
excluded_handlers = tuple() # depends on [control=['if'], data=[]]
for (handler, configuration) in self.__inherited_configuration.items():
if handler in excluded_handlers:
# Excluded handler
continue # depends on [control=['if'], data=[]]
elif handler not in self.__handlers:
# Fully inherited configuration
self.__handlers[handler] = configuration # depends on [control=['if'], data=['handler']]
# Merge configuration...
elif isinstance(configuration, dict):
# Dictionary
self.__handlers.setdefault(handler, {}).update(configuration) # depends on [control=['if'], data=[]]
elif isinstance(configuration, list):
# List
handler_conf = self.__handlers.setdefault(handler, [])
for item in configuration:
if item not in handler_conf:
handler_conf.append(item) # depends on [control=['if'], data=['item', 'handler_conf']] # depends on [control=['for'], data=['item']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
# Clear the inherited configuration dictionary
self.__inherited_configuration.clear() |
def add_coalescent_model(self, Tc, **kwargs):
"""Add a coalescent model to the tree and optionally optimze
Parameters
----------
Tc : float,str
If this is a float, it will be interpreted as the inverse merger
rate in molecular clock units, if its is a
"""
from .merger_models import Coalescent
self.logger('TreeTime.run: adding coalescent prior with Tc='+str(Tc),1)
self.merger_model = Coalescent(self.tree,
date2dist=self.date2dist, logger=self.logger)
if Tc=='skyline': # restrict skyline model optimization to last iteration
self.merger_model.optimize_skyline(**kwargs)
self.logger("optimized a skyline ", 2)
else:
if Tc in ['opt', 'const']:
self.merger_model.optimize_Tc()
self.logger("optimized Tc to %f"%self.merger_model.Tc.y[0], 2)
else:
try:
self.merger_model.set_Tc(Tc)
except:
self.logger("setting of coalescent time scale failed", 1, warn=True)
self.merger_model.attach_to_tree() | def function[add_coalescent_model, parameter[self, Tc]]:
constant[Add a coalescent model to the tree and optionally optimze
Parameters
----------
Tc : float,str
If this is a float, it will be interpreted as the inverse merger
rate in molecular clock units, if its is a
]
from relative_module[merger_models] import module[Coalescent]
call[name[self].logger, parameter[binary_operation[constant[TreeTime.run: adding coalescent prior with Tc=] + call[name[str], parameter[name[Tc]]]], constant[1]]]
name[self].merger_model assign[=] call[name[Coalescent], parameter[name[self].tree]]
if compare[name[Tc] equal[==] constant[skyline]] begin[:]
call[name[self].merger_model.optimize_skyline, parameter[]]
call[name[self].logger, parameter[constant[optimized a skyline ], constant[2]]]
call[name[self].merger_model.attach_to_tree, parameter[]] | keyword[def] identifier[add_coalescent_model] ( identifier[self] , identifier[Tc] ,** identifier[kwargs] ):
literal[string]
keyword[from] . identifier[merger_models] keyword[import] identifier[Coalescent]
identifier[self] . identifier[logger] ( literal[string] + identifier[str] ( identifier[Tc] ), literal[int] )
identifier[self] . identifier[merger_model] = identifier[Coalescent] ( identifier[self] . identifier[tree] ,
identifier[date2dist] = identifier[self] . identifier[date2dist] , identifier[logger] = identifier[self] . identifier[logger] )
keyword[if] identifier[Tc] == literal[string] :
identifier[self] . identifier[merger_model] . identifier[optimize_skyline] (** identifier[kwargs] )
identifier[self] . identifier[logger] ( literal[string] , literal[int] )
keyword[else] :
keyword[if] identifier[Tc] keyword[in] [ literal[string] , literal[string] ]:
identifier[self] . identifier[merger_model] . identifier[optimize_Tc] ()
identifier[self] . identifier[logger] ( literal[string] % identifier[self] . identifier[merger_model] . identifier[Tc] . identifier[y] [ literal[int] ], literal[int] )
keyword[else] :
keyword[try] :
identifier[self] . identifier[merger_model] . identifier[set_Tc] ( identifier[Tc] )
keyword[except] :
identifier[self] . identifier[logger] ( literal[string] , literal[int] , identifier[warn] = keyword[True] )
identifier[self] . identifier[merger_model] . identifier[attach_to_tree] () | def add_coalescent_model(self, Tc, **kwargs):
"""Add a coalescent model to the tree and optionally optimze
Parameters
----------
Tc : float,str
If this is a float, it will be interpreted as the inverse merger
rate in molecular clock units, if its is a
"""
from .merger_models import Coalescent
self.logger('TreeTime.run: adding coalescent prior with Tc=' + str(Tc), 1)
self.merger_model = Coalescent(self.tree, date2dist=self.date2dist, logger=self.logger)
if Tc == 'skyline': # restrict skyline model optimization to last iteration
self.merger_model.optimize_skyline(**kwargs)
self.logger('optimized a skyline ', 2) # depends on [control=['if'], data=[]]
elif Tc in ['opt', 'const']:
self.merger_model.optimize_Tc()
self.logger('optimized Tc to %f' % self.merger_model.Tc.y[0], 2) # depends on [control=['if'], data=[]]
else:
try:
self.merger_model.set_Tc(Tc) # depends on [control=['try'], data=[]]
except:
self.logger('setting of coalescent time scale failed', 1, warn=True) # depends on [control=['except'], data=[]]
self.merger_model.attach_to_tree() |
def reinterptet_harray_to_bits(typeFrom, sigOrVal, bitsT):
"""
Cast HArray signal or value to signal or value of type Bits
"""
size = int(typeFrom.size)
widthOfElm = typeFrom.elmType.bit_length()
w = bitsT.bit_length()
if size * widthOfElm != w:
raise TypeConversionErr(
"Size of types is different", size * widthOfElm, w)
partT = Bits(widthOfElm)
parts = [p._reinterpret_cast(partT) for p in sigOrVal]
return Concat(*reversed(parts))._reinterpret_cast(bitsT) | def function[reinterptet_harray_to_bits, parameter[typeFrom, sigOrVal, bitsT]]:
constant[
Cast HArray signal or value to signal or value of type Bits
]
variable[size] assign[=] call[name[int], parameter[name[typeFrom].size]]
variable[widthOfElm] assign[=] call[name[typeFrom].elmType.bit_length, parameter[]]
variable[w] assign[=] call[name[bitsT].bit_length, parameter[]]
if compare[binary_operation[name[size] * name[widthOfElm]] not_equal[!=] name[w]] begin[:]
<ast.Raise object at 0x7da1b039a7d0>
variable[partT] assign[=] call[name[Bits], parameter[name[widthOfElm]]]
variable[parts] assign[=] <ast.ListComp object at 0x7da1b03998d0>
return[call[call[name[Concat], parameter[<ast.Starred object at 0x7da18c4ce920>]]._reinterpret_cast, parameter[name[bitsT]]]] | keyword[def] identifier[reinterptet_harray_to_bits] ( identifier[typeFrom] , identifier[sigOrVal] , identifier[bitsT] ):
literal[string]
identifier[size] = identifier[int] ( identifier[typeFrom] . identifier[size] )
identifier[widthOfElm] = identifier[typeFrom] . identifier[elmType] . identifier[bit_length] ()
identifier[w] = identifier[bitsT] . identifier[bit_length] ()
keyword[if] identifier[size] * identifier[widthOfElm] != identifier[w] :
keyword[raise] identifier[TypeConversionErr] (
literal[string] , identifier[size] * identifier[widthOfElm] , identifier[w] )
identifier[partT] = identifier[Bits] ( identifier[widthOfElm] )
identifier[parts] =[ identifier[p] . identifier[_reinterpret_cast] ( identifier[partT] ) keyword[for] identifier[p] keyword[in] identifier[sigOrVal] ]
keyword[return] identifier[Concat] (* identifier[reversed] ( identifier[parts] )). identifier[_reinterpret_cast] ( identifier[bitsT] ) | def reinterptet_harray_to_bits(typeFrom, sigOrVal, bitsT):
"""
Cast HArray signal or value to signal or value of type Bits
"""
size = int(typeFrom.size)
widthOfElm = typeFrom.elmType.bit_length()
w = bitsT.bit_length()
if size * widthOfElm != w:
raise TypeConversionErr('Size of types is different', size * widthOfElm, w) # depends on [control=['if'], data=['w']]
partT = Bits(widthOfElm)
parts = [p._reinterpret_cast(partT) for p in sigOrVal]
return Concat(*reversed(parts))._reinterpret_cast(bitsT) |
def get_network_by_full_name(self, si, default_network_full_name):
"""
Find network by a Full Name
:param default_network_full_name: <str> Full Network Name - likes 'Root/Folder/Network'
:return:
"""
path, name = get_path_and_name(default_network_full_name)
return self.find_network_by_name(si, path, name) if name else None | def function[get_network_by_full_name, parameter[self, si, default_network_full_name]]:
constant[
Find network by a Full Name
:param default_network_full_name: <str> Full Network Name - likes 'Root/Folder/Network'
:return:
]
<ast.Tuple object at 0x7da2047ebdf0> assign[=] call[name[get_path_and_name], parameter[name[default_network_full_name]]]
return[<ast.IfExp object at 0x7da2047e8700>] | keyword[def] identifier[get_network_by_full_name] ( identifier[self] , identifier[si] , identifier[default_network_full_name] ):
literal[string]
identifier[path] , identifier[name] = identifier[get_path_and_name] ( identifier[default_network_full_name] )
keyword[return] identifier[self] . identifier[find_network_by_name] ( identifier[si] , identifier[path] , identifier[name] ) keyword[if] identifier[name] keyword[else] keyword[None] | def get_network_by_full_name(self, si, default_network_full_name):
"""
Find network by a Full Name
:param default_network_full_name: <str> Full Network Name - likes 'Root/Folder/Network'
:return:
"""
(path, name) = get_path_and_name(default_network_full_name)
return self.find_network_by_name(si, path, name) if name else None |
def parse_fallback(self):
"""Fallback method when parser doesn't know the statement"""
if self.strict:
raise PywavefrontException("Unimplemented OBJ format statement '%s' on line '%s'"
% (self.values[0], self.line.rstrip()))
else:
logger.warning("Unimplemented OBJ format statement '%s' on line '%s'"
% (self.values[0], self.line.rstrip())) | def function[parse_fallback, parameter[self]]:
constant[Fallback method when parser doesn't know the statement]
if name[self].strict begin[:]
<ast.Raise object at 0x7da2044c3880> | keyword[def] identifier[parse_fallback] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[strict] :
keyword[raise] identifier[PywavefrontException] ( literal[string]
%( identifier[self] . identifier[values] [ literal[int] ], identifier[self] . identifier[line] . identifier[rstrip] ()))
keyword[else] :
identifier[logger] . identifier[warning] ( literal[string]
%( identifier[self] . identifier[values] [ literal[int] ], identifier[self] . identifier[line] . identifier[rstrip] ())) | def parse_fallback(self):
"""Fallback method when parser doesn't know the statement"""
if self.strict:
raise PywavefrontException("Unimplemented OBJ format statement '%s' on line '%s'" % (self.values[0], self.line.rstrip())) # depends on [control=['if'], data=[]]
else:
logger.warning("Unimplemented OBJ format statement '%s' on line '%s'" % (self.values[0], self.line.rstrip())) |
def SequenceOf(klass):
"""Function to return a class that can encode and decode a list of
some other type."""
if _debug: SequenceOf._debug("SequenceOf %r", klass)
global _sequence_of_map
global _sequence_of_classes, _array_of_classes
# if this has already been built, return the cached one
if klass in _sequence_of_map:
if _debug: SequenceOf._debug(" - found in cache")
return _sequence_of_map[klass]
# no SequenceOf(SequenceOf(...)) allowed
if klass in _sequence_of_classes:
raise TypeError("nested sequences disallowed")
# no SequenceOf(ArrayOf(...)) allowed
if klass in _array_of_classes:
raise TypeError("sequences of arrays disallowed")
# define a generic class for lists
@bacpypes_debugging
class _SequenceOf:
subtype = None
def __init__(self, value=None):
if _debug: _SequenceOf._debug("(%r)__init__ %r (subtype=%r)", self.__class__.__name__, value, self.subtype)
if value is None:
self.value = []
elif isinstance(value, list):
self.value = value
else:
raise TypeError("invalid constructor datatype")
def append(self, value):
if issubclass(self.subtype, Atomic):
pass
elif issubclass(self.subtype, AnyAtomic) and not isinstance(value, Atomic):
raise TypeError("instance of an atomic type required")
elif not isinstance(value, self.subtype):
raise TypeError("%s value required" % (self.subtype.__name__,))
self.value.append(value)
def __len__(self):
return len(self.value)
def __getitem__(self, item):
return self.value[item]
def __iter__(self):
return iter(self.value)
def encode(self, taglist):
if _debug: _SequenceOf._debug("(%r)encode %r", self.__class__.__name__, taglist)
for value in self.value:
if issubclass(self.subtype, (Atomic, AnyAtomic)):
# a helper cooperates between the atomic value and the tag
helper = self.subtype(value)
# build a tag and encode the data into it
tag = Tag()
helper.encode(tag)
# now encode the tag
taglist.append(tag)
elif isinstance(value, self.subtype):
# it must have its own encoder
value.encode(taglist)
else:
raise TypeError("%s must be a %s" % (value, self.subtype.__name__))
def decode(self, taglist):
if _debug: _SequenceOf._debug("(%r)decode %r", self.__class__.__name__, taglist)
while len(taglist) != 0:
tag = taglist.Peek()
if tag.tagClass == Tag.closingTagClass:
return
if issubclass(self.subtype, (Atomic, AnyAtomic)):
if _debug: _SequenceOf._debug(" - building helper: %r %r", self.subtype, tag)
taglist.Pop()
# a helper cooperates between the atomic value and the tag
helper = self.subtype(tag)
# save the value
self.value.append(helper.value)
else:
if _debug: _SequenceOf._debug(" - building value: %r", self.subtype)
# build an element
value = self.subtype()
# let it decode itself
value.decode(taglist)
# save what was built
self.value.append(value)
def debug_contents(self, indent=1, file=sys.stdout, _ids=None):
i = 0
for value in self.value:
if issubclass(self.subtype, (Atomic, AnyAtomic)):
file.write("%s[%d] = %r\n" % (" " * indent, i, value))
elif isinstance(value, self.subtype):
file.write("%s[%d]" % (" " * indent, i))
value.debug_contents(indent+1, file, _ids)
else:
file.write("%s[%d] %s must be a %s" % (" " * indent, i, value, self.subtype.__name__))
i += 1
def dict_contents(self, use_dict=None, as_class=dict):
# return sequences as arrays
mapped_value = []
for value in self.value:
if issubclass(self.subtype, Atomic):
mapped_value.append(value) ### ambiguous
elif issubclass(self.subtype, AnyAtomic):
mapped_value.append(value.value) ### ambiguous
elif isinstance(value, self.subtype):
mapped_value.append(value.dict_contents(as_class=as_class))
# return what we built
return mapped_value
# constrain it to a list of a specific type of item
setattr(_SequenceOf, 'subtype', klass)
_SequenceOf.__name__ = 'SequenceOf' + klass.__name__
if _debug: SequenceOf._debug(" - build this class: %r", _SequenceOf)
# cache this type
_sequence_of_map[klass] = _SequenceOf
_sequence_of_classes[_SequenceOf] = 1
# return this new type
return _SequenceOf | def function[SequenceOf, parameter[klass]]:
constant[Function to return a class that can encode and decode a list of
some other type.]
if name[_debug] begin[:]
call[name[SequenceOf]._debug, parameter[constant[SequenceOf %r], name[klass]]]
<ast.Global object at 0x7da1b0811fc0>
<ast.Global object at 0x7da1b0811f00>
if compare[name[klass] in name[_sequence_of_map]] begin[:]
if name[_debug] begin[:]
call[name[SequenceOf]._debug, parameter[constant[ - found in cache]]]
return[call[name[_sequence_of_map]][name[klass]]]
if compare[name[klass] in name[_sequence_of_classes]] begin[:]
<ast.Raise object at 0x7da1b08104c0>
if compare[name[klass] in name[_array_of_classes]] begin[:]
<ast.Raise object at 0x7da1b0810610>
class class[_SequenceOf, parameter[]] begin[:]
variable[subtype] assign[=] constant[None]
def function[__init__, parameter[self, value]]:
if name[_debug] begin[:]
call[name[_SequenceOf]._debug, parameter[constant[(%r)__init__ %r (subtype=%r)], name[self].__class__.__name__, name[value], name[self].subtype]]
if compare[name[value] is constant[None]] begin[:]
name[self].value assign[=] list[[]]
def function[append, parameter[self, value]]:
if call[name[issubclass], parameter[name[self].subtype, name[Atomic]]] begin[:]
pass
call[name[self].value.append, parameter[name[value]]]
def function[__len__, parameter[self]]:
return[call[name[len], parameter[name[self].value]]]
def function[__getitem__, parameter[self, item]]:
return[call[name[self].value][name[item]]]
def function[__iter__, parameter[self]]:
return[call[name[iter], parameter[name[self].value]]]
def function[encode, parameter[self, taglist]]:
if name[_debug] begin[:]
call[name[_SequenceOf]._debug, parameter[constant[(%r)encode %r], name[self].__class__.__name__, name[taglist]]]
for taget[name[value]] in starred[name[self].value] begin[:]
if call[name[issubclass], parameter[name[self].subtype, tuple[[<ast.Name object at 0x7da1b0813cd0>, <ast.Name object at 0x7da1b0813ca0>]]]] begin[:]
variable[helper] assign[=] call[name[self].subtype, parameter[name[value]]]
variable[tag] assign[=] call[name[Tag], parameter[]]
call[name[helper].encode, parameter[name[tag]]]
call[name[taglist].append, parameter[name[tag]]]
def function[decode, parameter[self, taglist]]:
if name[_debug] begin[:]
call[name[_SequenceOf]._debug, parameter[constant[(%r)decode %r], name[self].__class__.__name__, name[taglist]]]
while compare[call[name[len], parameter[name[taglist]]] not_equal[!=] constant[0]] begin[:]
variable[tag] assign[=] call[name[taglist].Peek, parameter[]]
if compare[name[tag].tagClass equal[==] name[Tag].closingTagClass] begin[:]
return[None]
if call[name[issubclass], parameter[name[self].subtype, tuple[[<ast.Name object at 0x7da1b0893100>, <ast.Name object at 0x7da1b08930d0>]]]] begin[:]
if name[_debug] begin[:]
call[name[_SequenceOf]._debug, parameter[constant[ - building helper: %r %r], name[self].subtype, name[tag]]]
call[name[taglist].Pop, parameter[]]
variable[helper] assign[=] call[name[self].subtype, parameter[name[tag]]]
call[name[self].value.append, parameter[name[helper].value]]
def function[debug_contents, parameter[self, indent, file, _ids]]:
variable[i] assign[=] constant[0]
for taget[name[value]] in starred[name[self].value] begin[:]
if call[name[issubclass], parameter[name[self].subtype, tuple[[<ast.Name object at 0x7da1b0892170>, <ast.Name object at 0x7da1b0892140>]]]] begin[:]
call[name[file].write, parameter[binary_operation[constant[%s[%d] = %r
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.BinOp object at 0x7da1b0891fc0>, <ast.Name object at 0x7da1b0891f30>, <ast.Name object at 0x7da1b0891f00>]]]]]
<ast.AugAssign object at 0x7da1b0891660>
def function[dict_contents, parameter[self, use_dict, as_class]]:
variable[mapped_value] assign[=] list[[]]
for taget[name[value]] in starred[name[self].value] begin[:]
if call[name[issubclass], parameter[name[self].subtype, name[Atomic]]] begin[:]
call[name[mapped_value].append, parameter[name[value]]]
return[name[mapped_value]]
call[name[setattr], parameter[name[_SequenceOf], constant[subtype], name[klass]]]
name[_SequenceOf].__name__ assign[=] binary_operation[constant[SequenceOf] + name[klass].__name__]
if name[_debug] begin[:]
call[name[SequenceOf]._debug, parameter[constant[ - build this class: %r], name[_SequenceOf]]]
call[name[_sequence_of_map]][name[klass]] assign[=] name[_SequenceOf]
call[name[_sequence_of_classes]][name[_SequenceOf]] assign[=] constant[1]
return[name[_SequenceOf]] | keyword[def] identifier[SequenceOf] ( identifier[klass] ):
literal[string]
keyword[if] identifier[_debug] : identifier[SequenceOf] . identifier[_debug] ( literal[string] , identifier[klass] )
keyword[global] identifier[_sequence_of_map]
keyword[global] identifier[_sequence_of_classes] , identifier[_array_of_classes]
keyword[if] identifier[klass] keyword[in] identifier[_sequence_of_map] :
keyword[if] identifier[_debug] : identifier[SequenceOf] . identifier[_debug] ( literal[string] )
keyword[return] identifier[_sequence_of_map] [ identifier[klass] ]
keyword[if] identifier[klass] keyword[in] identifier[_sequence_of_classes] :
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] identifier[klass] keyword[in] identifier[_array_of_classes] :
keyword[raise] identifier[TypeError] ( literal[string] )
@ identifier[bacpypes_debugging]
keyword[class] identifier[_SequenceOf] :
identifier[subtype] = keyword[None]
keyword[def] identifier[__init__] ( identifier[self] , identifier[value] = keyword[None] ):
keyword[if] identifier[_debug] : identifier[_SequenceOf] . identifier[_debug] ( literal[string] , identifier[self] . identifier[__class__] . identifier[__name__] , identifier[value] , identifier[self] . identifier[subtype] )
keyword[if] identifier[value] keyword[is] keyword[None] :
identifier[self] . identifier[value] =[]
keyword[elif] identifier[isinstance] ( identifier[value] , identifier[list] ):
identifier[self] . identifier[value] = identifier[value]
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[def] identifier[append] ( identifier[self] , identifier[value] ):
keyword[if] identifier[issubclass] ( identifier[self] . identifier[subtype] , identifier[Atomic] ):
keyword[pass]
keyword[elif] identifier[issubclass] ( identifier[self] . identifier[subtype] , identifier[AnyAtomic] ) keyword[and] keyword[not] identifier[isinstance] ( identifier[value] , identifier[Atomic] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[elif] keyword[not] identifier[isinstance] ( identifier[value] , identifier[self] . identifier[subtype] ):
keyword[raise] identifier[TypeError] ( literal[string] %( identifier[self] . identifier[subtype] . identifier[__name__] ,))
identifier[self] . identifier[value] . identifier[append] ( identifier[value] )
keyword[def] identifier[__len__] ( identifier[self] ):
keyword[return] identifier[len] ( identifier[self] . identifier[value] )
keyword[def] identifier[__getitem__] ( identifier[self] , identifier[item] ):
keyword[return] identifier[self] . identifier[value] [ identifier[item] ]
keyword[def] identifier[__iter__] ( identifier[self] ):
keyword[return] identifier[iter] ( identifier[self] . identifier[value] )
keyword[def] identifier[encode] ( identifier[self] , identifier[taglist] ):
keyword[if] identifier[_debug] : identifier[_SequenceOf] . identifier[_debug] ( literal[string] , identifier[self] . identifier[__class__] . identifier[__name__] , identifier[taglist] )
keyword[for] identifier[value] keyword[in] identifier[self] . identifier[value] :
keyword[if] identifier[issubclass] ( identifier[self] . identifier[subtype] ,( identifier[Atomic] , identifier[AnyAtomic] )):
identifier[helper] = identifier[self] . identifier[subtype] ( identifier[value] )
identifier[tag] = identifier[Tag] ()
identifier[helper] . identifier[encode] ( identifier[tag] )
identifier[taglist] . identifier[append] ( identifier[tag] )
keyword[elif] identifier[isinstance] ( identifier[value] , identifier[self] . identifier[subtype] ):
identifier[value] . identifier[encode] ( identifier[taglist] )
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string] %( identifier[value] , identifier[self] . identifier[subtype] . identifier[__name__] ))
keyword[def] identifier[decode] ( identifier[self] , identifier[taglist] ):
keyword[if] identifier[_debug] : identifier[_SequenceOf] . identifier[_debug] ( literal[string] , identifier[self] . identifier[__class__] . identifier[__name__] , identifier[taglist] )
keyword[while] identifier[len] ( identifier[taglist] )!= literal[int] :
identifier[tag] = identifier[taglist] . identifier[Peek] ()
keyword[if] identifier[tag] . identifier[tagClass] == identifier[Tag] . identifier[closingTagClass] :
keyword[return]
keyword[if] identifier[issubclass] ( identifier[self] . identifier[subtype] ,( identifier[Atomic] , identifier[AnyAtomic] )):
keyword[if] identifier[_debug] : identifier[_SequenceOf] . identifier[_debug] ( literal[string] , identifier[self] . identifier[subtype] , identifier[tag] )
identifier[taglist] . identifier[Pop] ()
identifier[helper] = identifier[self] . identifier[subtype] ( identifier[tag] )
identifier[self] . identifier[value] . identifier[append] ( identifier[helper] . identifier[value] )
keyword[else] :
keyword[if] identifier[_debug] : identifier[_SequenceOf] . identifier[_debug] ( literal[string] , identifier[self] . identifier[subtype] )
identifier[value] = identifier[self] . identifier[subtype] ()
identifier[value] . identifier[decode] ( identifier[taglist] )
identifier[self] . identifier[value] . identifier[append] ( identifier[value] )
keyword[def] identifier[debug_contents] ( identifier[self] , identifier[indent] = literal[int] , identifier[file] = identifier[sys] . identifier[stdout] , identifier[_ids] = keyword[None] ):
identifier[i] = literal[int]
keyword[for] identifier[value] keyword[in] identifier[self] . identifier[value] :
keyword[if] identifier[issubclass] ( identifier[self] . identifier[subtype] ,( identifier[Atomic] , identifier[AnyAtomic] )):
identifier[file] . identifier[write] ( literal[string] %( literal[string] * identifier[indent] , identifier[i] , identifier[value] ))
keyword[elif] identifier[isinstance] ( identifier[value] , identifier[self] . identifier[subtype] ):
identifier[file] . identifier[write] ( literal[string] %( literal[string] * identifier[indent] , identifier[i] ))
identifier[value] . identifier[debug_contents] ( identifier[indent] + literal[int] , identifier[file] , identifier[_ids] )
keyword[else] :
identifier[file] . identifier[write] ( literal[string] %( literal[string] * identifier[indent] , identifier[i] , identifier[value] , identifier[self] . identifier[subtype] . identifier[__name__] ))
identifier[i] += literal[int]
keyword[def] identifier[dict_contents] ( identifier[self] , identifier[use_dict] = keyword[None] , identifier[as_class] = identifier[dict] ):
identifier[mapped_value] =[]
keyword[for] identifier[value] keyword[in] identifier[self] . identifier[value] :
keyword[if] identifier[issubclass] ( identifier[self] . identifier[subtype] , identifier[Atomic] ):
identifier[mapped_value] . identifier[append] ( identifier[value] )
keyword[elif] identifier[issubclass] ( identifier[self] . identifier[subtype] , identifier[AnyAtomic] ):
identifier[mapped_value] . identifier[append] ( identifier[value] . identifier[value] )
keyword[elif] identifier[isinstance] ( identifier[value] , identifier[self] . identifier[subtype] ):
identifier[mapped_value] . identifier[append] ( identifier[value] . identifier[dict_contents] ( identifier[as_class] = identifier[as_class] ))
keyword[return] identifier[mapped_value]
identifier[setattr] ( identifier[_SequenceOf] , literal[string] , identifier[klass] )
identifier[_SequenceOf] . identifier[__name__] = literal[string] + identifier[klass] . identifier[__name__]
keyword[if] identifier[_debug] : identifier[SequenceOf] . identifier[_debug] ( literal[string] , identifier[_SequenceOf] )
identifier[_sequence_of_map] [ identifier[klass] ]= identifier[_SequenceOf]
identifier[_sequence_of_classes] [ identifier[_SequenceOf] ]= literal[int]
keyword[return] identifier[_SequenceOf] | def SequenceOf(klass):
"""Function to return a class that can encode and decode a list of
some other type."""
if _debug:
SequenceOf._debug('SequenceOf %r', klass) # depends on [control=['if'], data=[]]
global _sequence_of_map
global _sequence_of_classes, _array_of_classes
# if this has already been built, return the cached one
if klass in _sequence_of_map:
if _debug:
SequenceOf._debug(' - found in cache') # depends on [control=['if'], data=[]]
return _sequence_of_map[klass] # depends on [control=['if'], data=['klass', '_sequence_of_map']]
# no SequenceOf(SequenceOf(...)) allowed
if klass in _sequence_of_classes:
raise TypeError('nested sequences disallowed') # depends on [control=['if'], data=[]]
# no SequenceOf(ArrayOf(...)) allowed
if klass in _array_of_classes:
raise TypeError('sequences of arrays disallowed') # depends on [control=['if'], data=[]]
# define a generic class for lists
@bacpypes_debugging
class _SequenceOf:
subtype = None
def __init__(self, value=None):
if _debug:
_SequenceOf._debug('(%r)__init__ %r (subtype=%r)', self.__class__.__name__, value, self.subtype) # depends on [control=['if'], data=[]]
if value is None:
self.value = [] # depends on [control=['if'], data=[]]
elif isinstance(value, list):
self.value = value # depends on [control=['if'], data=[]]
else:
raise TypeError('invalid constructor datatype')
def append(self, value):
if issubclass(self.subtype, Atomic):
pass # depends on [control=['if'], data=[]]
elif issubclass(self.subtype, AnyAtomic) and (not isinstance(value, Atomic)):
raise TypeError('instance of an atomic type required') # depends on [control=['if'], data=[]]
elif not isinstance(value, self.subtype):
raise TypeError('%s value required' % (self.subtype.__name__,)) # depends on [control=['if'], data=[]]
self.value.append(value)
def __len__(self):
return len(self.value)
def __getitem__(self, item):
return self.value[item]
def __iter__(self):
return iter(self.value)
def encode(self, taglist):
if _debug:
_SequenceOf._debug('(%r)encode %r', self.__class__.__name__, taglist) # depends on [control=['if'], data=[]]
for value in self.value:
if issubclass(self.subtype, (Atomic, AnyAtomic)):
# a helper cooperates between the atomic value and the tag
helper = self.subtype(value)
# build a tag and encode the data into it
tag = Tag()
helper.encode(tag)
# now encode the tag
taglist.append(tag) # depends on [control=['if'], data=[]]
elif isinstance(value, self.subtype):
# it must have its own encoder
value.encode(taglist) # depends on [control=['if'], data=[]]
else:
raise TypeError('%s must be a %s' % (value, self.subtype.__name__)) # depends on [control=['for'], data=['value']]
def decode(self, taglist):
if _debug:
_SequenceOf._debug('(%r)decode %r', self.__class__.__name__, taglist) # depends on [control=['if'], data=[]]
while len(taglist) != 0:
tag = taglist.Peek()
if tag.tagClass == Tag.closingTagClass:
return # depends on [control=['if'], data=[]]
if issubclass(self.subtype, (Atomic, AnyAtomic)):
if _debug:
_SequenceOf._debug(' - building helper: %r %r', self.subtype, tag) # depends on [control=['if'], data=[]]
taglist.Pop()
# a helper cooperates between the atomic value and the tag
helper = self.subtype(tag)
# save the value
self.value.append(helper.value) # depends on [control=['if'], data=[]]
else:
if _debug:
_SequenceOf._debug(' - building value: %r', self.subtype) # depends on [control=['if'], data=[]]
# build an element
value = self.subtype()
# let it decode itself
value.decode(taglist)
# save what was built
self.value.append(value) # depends on [control=['while'], data=[]]
def debug_contents(self, indent=1, file=sys.stdout, _ids=None):
i = 0
for value in self.value:
if issubclass(self.subtype, (Atomic, AnyAtomic)):
file.write('%s[%d] = %r\n' % (' ' * indent, i, value)) # depends on [control=['if'], data=[]]
elif isinstance(value, self.subtype):
file.write('%s[%d]' % (' ' * indent, i))
value.debug_contents(indent + 1, file, _ids) # depends on [control=['if'], data=[]]
else:
file.write('%s[%d] %s must be a %s' % (' ' * indent, i, value, self.subtype.__name__))
i += 1 # depends on [control=['for'], data=['value']]
def dict_contents(self, use_dict=None, as_class=dict):
# return sequences as arrays
mapped_value = []
for value in self.value:
if issubclass(self.subtype, Atomic):
mapped_value.append(value) ### ambiguous # depends on [control=['if'], data=[]]
elif issubclass(self.subtype, AnyAtomic):
mapped_value.append(value.value) ### ambiguous # depends on [control=['if'], data=[]]
elif isinstance(value, self.subtype):
mapped_value.append(value.dict_contents(as_class=as_class)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['value']]
# return what we built
return mapped_value
# constrain it to a list of a specific type of item
setattr(_SequenceOf, 'subtype', klass)
_SequenceOf.__name__ = 'SequenceOf' + klass.__name__
if _debug:
SequenceOf._debug(' - build this class: %r', _SequenceOf) # depends on [control=['if'], data=[]]
# cache this type
_sequence_of_map[klass] = _SequenceOf
_sequence_of_classes[_SequenceOf] = 1
# return this new type
return _SequenceOf |
def create_wordpress(self,
service_id,
version_number,
name,
path,
comment=None):
"""Create a wordpress for the specified service and version."""
body = self._formdata({
"name": name,
"path": path,
"comment": comment,
}, FastlyWordpress.FIELDS)
content = self._fetch("/service/%s/version/%d/wordpress" % (service_id, version_number), method="POST", body=body)
return FastlyWordpress(self, content) | def function[create_wordpress, parameter[self, service_id, version_number, name, path, comment]]:
constant[Create a wordpress for the specified service and version.]
variable[body] assign[=] call[name[self]._formdata, parameter[dictionary[[<ast.Constant object at 0x7da1b0f11600>, <ast.Constant object at 0x7da1b0f13250>, <ast.Constant object at 0x7da1b0f12e30>], [<ast.Name object at 0x7da1b0f10bb0>, <ast.Name object at 0x7da1b0f10cd0>, <ast.Name object at 0x7da1b0f117b0>]], name[FastlyWordpress].FIELDS]]
variable[content] assign[=] call[name[self]._fetch, parameter[binary_operation[constant[/service/%s/version/%d/wordpress] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0f10d30>, <ast.Name object at 0x7da1b0f11e40>]]]]]
return[call[name[FastlyWordpress], parameter[name[self], name[content]]]] | keyword[def] identifier[create_wordpress] ( identifier[self] ,
identifier[service_id] ,
identifier[version_number] ,
identifier[name] ,
identifier[path] ,
identifier[comment] = keyword[None] ):
literal[string]
identifier[body] = identifier[self] . identifier[_formdata] ({
literal[string] : identifier[name] ,
literal[string] : identifier[path] ,
literal[string] : identifier[comment] ,
}, identifier[FastlyWordpress] . identifier[FIELDS] )
identifier[content] = identifier[self] . identifier[_fetch] ( literal[string] %( identifier[service_id] , identifier[version_number] ), identifier[method] = literal[string] , identifier[body] = identifier[body] )
keyword[return] identifier[FastlyWordpress] ( identifier[self] , identifier[content] ) | def create_wordpress(self, service_id, version_number, name, path, comment=None):
"""Create a wordpress for the specified service and version."""
body = self._formdata({'name': name, 'path': path, 'comment': comment}, FastlyWordpress.FIELDS)
content = self._fetch('/service/%s/version/%d/wordpress' % (service_id, version_number), method='POST', body=body)
return FastlyWordpress(self, content) |
def dedupe(input_file, output_file, returncmd=False, **kwargs):
"""
Runs dedupe from the bbtools package.
:param input_file: Input file.
:param returncmd: If set to true, function will return the cmd string passed to subprocess as a third value.
:param output_file: Output file.
:param kwargs: Arguments to give to dedupe in parameter=argument format. See dedupe documentation for full list.
:return: out and err: stdout string and stderr string from running dedupe.
"""
options = kwargs_to_string(kwargs)
cmd = 'dedupe.sh in={} out={}{}'.format(input_file, output_file, options)
out, err = accessoryfunctions.run_subprocess(cmd)
if returncmd:
return out, err, cmd
else:
return out, err | def function[dedupe, parameter[input_file, output_file, returncmd]]:
constant[
Runs dedupe from the bbtools package.
:param input_file: Input file.
:param returncmd: If set to true, function will return the cmd string passed to subprocess as a third value.
:param output_file: Output file.
:param kwargs: Arguments to give to dedupe in parameter=argument format. See dedupe documentation for full list.
:return: out and err: stdout string and stderr string from running dedupe.
]
variable[options] assign[=] call[name[kwargs_to_string], parameter[name[kwargs]]]
variable[cmd] assign[=] call[constant[dedupe.sh in={} out={}{}].format, parameter[name[input_file], name[output_file], name[options]]]
<ast.Tuple object at 0x7da18fe90220> assign[=] call[name[accessoryfunctions].run_subprocess, parameter[name[cmd]]]
if name[returncmd] begin[:]
return[tuple[[<ast.Name object at 0x7da1b1d67a00>, <ast.Name object at 0x7da1b1d65ff0>, <ast.Name object at 0x7da1b1d65c90>]]] | keyword[def] identifier[dedupe] ( identifier[input_file] , identifier[output_file] , identifier[returncmd] = keyword[False] ,** identifier[kwargs] ):
literal[string]
identifier[options] = identifier[kwargs_to_string] ( identifier[kwargs] )
identifier[cmd] = literal[string] . identifier[format] ( identifier[input_file] , identifier[output_file] , identifier[options] )
identifier[out] , identifier[err] = identifier[accessoryfunctions] . identifier[run_subprocess] ( identifier[cmd] )
keyword[if] identifier[returncmd] :
keyword[return] identifier[out] , identifier[err] , identifier[cmd]
keyword[else] :
keyword[return] identifier[out] , identifier[err] | def dedupe(input_file, output_file, returncmd=False, **kwargs):
"""
Runs dedupe from the bbtools package.
:param input_file: Input file.
:param returncmd: If set to true, function will return the cmd string passed to subprocess as a third value.
:param output_file: Output file.
:param kwargs: Arguments to give to dedupe in parameter=argument format. See dedupe documentation for full list.
:return: out and err: stdout string and stderr string from running dedupe.
"""
options = kwargs_to_string(kwargs)
cmd = 'dedupe.sh in={} out={}{}'.format(input_file, output_file, options)
(out, err) = accessoryfunctions.run_subprocess(cmd)
if returncmd:
return (out, err, cmd) # depends on [control=['if'], data=[]]
else:
return (out, err) |
def address(self, compressed=True, testnet=False):
""" Address property that returns the Base58Check
encoded version of the HASH160.
Args:
compressed (bool): Whether or not the compressed key should
be used.
testnet (bool): Whether or not the key is intended for testnet
usage. False indicates mainnet usage.
Returns:
bytes: Base58Check encoded string
"""
version = '0x'
return version + binascii.hexlify(self.keccak[12:]).decode('ascii') | def function[address, parameter[self, compressed, testnet]]:
constant[ Address property that returns the Base58Check
encoded version of the HASH160.
Args:
compressed (bool): Whether or not the compressed key should
be used.
testnet (bool): Whether or not the key is intended for testnet
usage. False indicates mainnet usage.
Returns:
bytes: Base58Check encoded string
]
variable[version] assign[=] constant[0x]
return[binary_operation[name[version] + call[call[name[binascii].hexlify, parameter[call[name[self].keccak][<ast.Slice object at 0x7da1b22ea0e0>]]].decode, parameter[constant[ascii]]]]] | keyword[def] identifier[address] ( identifier[self] , identifier[compressed] = keyword[True] , identifier[testnet] = keyword[False] ):
literal[string]
identifier[version] = literal[string]
keyword[return] identifier[version] + identifier[binascii] . identifier[hexlify] ( identifier[self] . identifier[keccak] [ literal[int] :]). identifier[decode] ( literal[string] ) | def address(self, compressed=True, testnet=False):
""" Address property that returns the Base58Check
encoded version of the HASH160.
Args:
compressed (bool): Whether or not the compressed key should
be used.
testnet (bool): Whether or not the key is intended for testnet
usage. False indicates mainnet usage.
Returns:
bytes: Base58Check encoded string
"""
version = '0x'
return version + binascii.hexlify(self.keccak[12:]).decode('ascii') |
def update_notification_list(self, apps=None, schema_editor=None, verbose=False):
"""Updates the notification model to ensure all registered
notifications classes are listed.
Typically called from a post_migrate signal.
Also, in tests you can register a notification and the Notification
class (not model) will automatically call this method if the
named notification does not exist. See notification.notify()
"""
Notification = (apps or django_apps).get_model("edc_notification.notification")
# flag all notifications as disabled and re-enable as required
Notification.objects.all().update(enabled=False)
if site_notifications.loaded:
if verbose:
sys.stdout.write(
style.MIGRATE_HEADING("Populating Notification model:\n")
)
self.delete_unregistered_notifications(apps=apps)
for name, notification_cls in site_notifications.registry.items():
if verbose:
sys.stdout.write(
f" * Adding '{name}': '{notification_cls().display_name}'\n"
)
try:
obj = Notification.objects.get(name=name)
except ObjectDoesNotExist:
Notification.objects.create(
name=name,
display_name=notification_cls().display_name,
enabled=True,
)
else:
obj.display_name = notification_cls().display_name
obj.enabled = True
obj.save() | def function[update_notification_list, parameter[self, apps, schema_editor, verbose]]:
constant[Updates the notification model to ensure all registered
notifications classes are listed.
Typically called from a post_migrate signal.
Also, in tests you can register a notification and the Notification
class (not model) will automatically call this method if the
named notification does not exist. See notification.notify()
]
variable[Notification] assign[=] call[<ast.BoolOp object at 0x7da18fe90130>.get_model, parameter[constant[edc_notification.notification]]]
call[call[name[Notification].objects.all, parameter[]].update, parameter[]]
if name[site_notifications].loaded begin[:]
if name[verbose] begin[:]
call[name[sys].stdout.write, parameter[call[name[style].MIGRATE_HEADING, parameter[constant[Populating Notification model:
]]]]]
call[name[self].delete_unregistered_notifications, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da18fe90ee0>, <ast.Name object at 0x7da18fe934f0>]]] in starred[call[name[site_notifications].registry.items, parameter[]]] begin[:]
if name[verbose] begin[:]
call[name[sys].stdout.write, parameter[<ast.JoinedStr object at 0x7da20c993a60>]]
<ast.Try object at 0x7da20c992c20> | keyword[def] identifier[update_notification_list] ( identifier[self] , identifier[apps] = keyword[None] , identifier[schema_editor] = keyword[None] , identifier[verbose] = keyword[False] ):
literal[string]
identifier[Notification] =( identifier[apps] keyword[or] identifier[django_apps] ). identifier[get_model] ( literal[string] )
identifier[Notification] . identifier[objects] . identifier[all] (). identifier[update] ( identifier[enabled] = keyword[False] )
keyword[if] identifier[site_notifications] . identifier[loaded] :
keyword[if] identifier[verbose] :
identifier[sys] . identifier[stdout] . identifier[write] (
identifier[style] . identifier[MIGRATE_HEADING] ( literal[string] )
)
identifier[self] . identifier[delete_unregistered_notifications] ( identifier[apps] = identifier[apps] )
keyword[for] identifier[name] , identifier[notification_cls] keyword[in] identifier[site_notifications] . identifier[registry] . identifier[items] ():
keyword[if] identifier[verbose] :
identifier[sys] . identifier[stdout] . identifier[write] (
literal[string]
)
keyword[try] :
identifier[obj] = identifier[Notification] . identifier[objects] . identifier[get] ( identifier[name] = identifier[name] )
keyword[except] identifier[ObjectDoesNotExist] :
identifier[Notification] . identifier[objects] . identifier[create] (
identifier[name] = identifier[name] ,
identifier[display_name] = identifier[notification_cls] (). identifier[display_name] ,
identifier[enabled] = keyword[True] ,
)
keyword[else] :
identifier[obj] . identifier[display_name] = identifier[notification_cls] (). identifier[display_name]
identifier[obj] . identifier[enabled] = keyword[True]
identifier[obj] . identifier[save] () | def update_notification_list(self, apps=None, schema_editor=None, verbose=False):
"""Updates the notification model to ensure all registered
notifications classes are listed.
Typically called from a post_migrate signal.
Also, in tests you can register a notification and the Notification
class (not model) will automatically call this method if the
named notification does not exist. See notification.notify()
"""
Notification = (apps or django_apps).get_model('edc_notification.notification')
# flag all notifications as disabled and re-enable as required
Notification.objects.all().update(enabled=False)
if site_notifications.loaded:
if verbose:
sys.stdout.write(style.MIGRATE_HEADING('Populating Notification model:\n')) # depends on [control=['if'], data=[]]
self.delete_unregistered_notifications(apps=apps)
for (name, notification_cls) in site_notifications.registry.items():
if verbose:
sys.stdout.write(f" * Adding '{name}': '{notification_cls().display_name}'\n") # depends on [control=['if'], data=[]]
try:
obj = Notification.objects.get(name=name) # depends on [control=['try'], data=[]]
except ObjectDoesNotExist:
Notification.objects.create(name=name, display_name=notification_cls().display_name, enabled=True) # depends on [control=['except'], data=[]]
else:
obj.display_name = notification_cls().display_name
obj.enabled = True
obj.save() # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] |
def __chopStringDict(self, data):
'''Returns a dictionary of the provided raw service/host check string.'''
r = {}
d = data.split('\t')
for item in d:
item_parts = item.split('::')
if len(item_parts) == 2:
(name, value) = item_parts
else:
name = item_parts[0]
value = item_parts[1]
name = self.__filter(name)
r[name] = value
if "hostperfdata" in r:
r["type"] = "hostcheck"
r["perfdata"] = r["hostperfdata"]
r["checkcommand"] = re.search("(.*?)!\(?.*", r["hostcheckcommand"]).group(1)
r["name"] = "hostcheck"
else:
r["type"] = "servicecheck"
r["perfdata"] = r["serviceperfdata"]
r["checkcommand"] = re.search("((.*)(?=\!)|(.*))", r["servicecheckcommand"]).group(1)
r["name"] = self.__filter(r["servicedesc"])
r["hostname"] = self.replacePeriod(self.__filter(r["hostname"]))
return r | def function[__chopStringDict, parameter[self, data]]:
constant[Returns a dictionary of the provided raw service/host check string.]
variable[r] assign[=] dictionary[[], []]
variable[d] assign[=] call[name[data].split, parameter[constant[ ]]]
for taget[name[item]] in starred[name[d]] begin[:]
variable[item_parts] assign[=] call[name[item].split, parameter[constant[::]]]
if compare[call[name[len], parameter[name[item_parts]]] equal[==] constant[2]] begin[:]
<ast.Tuple object at 0x7da204623310> assign[=] name[item_parts]
variable[name] assign[=] call[name[self].__filter, parameter[name[name]]]
call[name[r]][name[name]] assign[=] name[value]
if compare[constant[hostperfdata] in name[r]] begin[:]
call[name[r]][constant[type]] assign[=] constant[hostcheck]
call[name[r]][constant[perfdata]] assign[=] call[name[r]][constant[hostperfdata]]
call[name[r]][constant[checkcommand]] assign[=] call[call[name[re].search, parameter[constant[(.*?)!\(?.*], call[name[r]][constant[hostcheckcommand]]]].group, parameter[constant[1]]]
call[name[r]][constant[name]] assign[=] constant[hostcheck]
call[name[r]][constant[hostname]] assign[=] call[name[self].replacePeriod, parameter[call[name[self].__filter, parameter[call[name[r]][constant[hostname]]]]]]
return[name[r]] | keyword[def] identifier[__chopStringDict] ( identifier[self] , identifier[data] ):
literal[string]
identifier[r] ={}
identifier[d] = identifier[data] . identifier[split] ( literal[string] )
keyword[for] identifier[item] keyword[in] identifier[d] :
identifier[item_parts] = identifier[item] . identifier[split] ( literal[string] )
keyword[if] identifier[len] ( identifier[item_parts] )== literal[int] :
( identifier[name] , identifier[value] )= identifier[item_parts]
keyword[else] :
identifier[name] = identifier[item_parts] [ literal[int] ]
identifier[value] = identifier[item_parts] [ literal[int] ]
identifier[name] = identifier[self] . identifier[__filter] ( identifier[name] )
identifier[r] [ identifier[name] ]= identifier[value]
keyword[if] literal[string] keyword[in] identifier[r] :
identifier[r] [ literal[string] ]= literal[string]
identifier[r] [ literal[string] ]= identifier[r] [ literal[string] ]
identifier[r] [ literal[string] ]= identifier[re] . identifier[search] ( literal[string] , identifier[r] [ literal[string] ]). identifier[group] ( literal[int] )
identifier[r] [ literal[string] ]= literal[string]
keyword[else] :
identifier[r] [ literal[string] ]= literal[string]
identifier[r] [ literal[string] ]= identifier[r] [ literal[string] ]
identifier[r] [ literal[string] ]= identifier[re] . identifier[search] ( literal[string] , identifier[r] [ literal[string] ]). identifier[group] ( literal[int] )
identifier[r] [ literal[string] ]= identifier[self] . identifier[__filter] ( identifier[r] [ literal[string] ])
identifier[r] [ literal[string] ]= identifier[self] . identifier[replacePeriod] ( identifier[self] . identifier[__filter] ( identifier[r] [ literal[string] ]))
keyword[return] identifier[r] | def __chopStringDict(self, data):
"""Returns a dictionary of the provided raw service/host check string."""
r = {}
d = data.split('\t')
for item in d:
item_parts = item.split('::')
if len(item_parts) == 2:
(name, value) = item_parts # depends on [control=['if'], data=[]]
else:
name = item_parts[0]
value = item_parts[1]
name = self.__filter(name)
r[name] = value # depends on [control=['for'], data=['item']]
if 'hostperfdata' in r:
r['type'] = 'hostcheck'
r['perfdata'] = r['hostperfdata']
r['checkcommand'] = re.search('(.*?)!\\(?.*', r['hostcheckcommand']).group(1)
r['name'] = 'hostcheck' # depends on [control=['if'], data=['r']]
else:
r['type'] = 'servicecheck'
r['perfdata'] = r['serviceperfdata']
r['checkcommand'] = re.search('((.*)(?=\\!)|(.*))', r['servicecheckcommand']).group(1)
r['name'] = self.__filter(r['servicedesc'])
r['hostname'] = self.replacePeriod(self.__filter(r['hostname']))
return r |
def send(reg_id, message, **kwargs):
"""
Site: https://developers.google.com
API: https://developers.google.com/web/updates/2016/03/web-push-encryption
Desc: Web Push notifications for Chrome and FireFox
Installation:
pip install 'pywebpush>=0.4.0'
"""
subscription_info = kwargs.pop('subscription_info')
payload = {
"title": kwargs.pop("event"),
"body": message,
"url": kwargs.pop("push_url", None)
}
payload.update(kwargs)
wp = WebPusher(subscription_info)
response = wp.send(
dumps(payload), gcm_key=settings.GCM_KEY,
ttl=kwargs.pop("ttl", 60))
if not response.ok or (
response.text and loads(response.text).get("failure") > 0):
raise GCMError(response.text)
return True | def function[send, parameter[reg_id, message]]:
constant[
Site: https://developers.google.com
API: https://developers.google.com/web/updates/2016/03/web-push-encryption
Desc: Web Push notifications for Chrome and FireFox
Installation:
pip install 'pywebpush>=0.4.0'
]
variable[subscription_info] assign[=] call[name[kwargs].pop, parameter[constant[subscription_info]]]
variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da2054a6980>, <ast.Constant object at 0x7da2054a5e40>, <ast.Constant object at 0x7da2054a5750>], [<ast.Call object at 0x7da2054a5540>, <ast.Name object at 0x7da2054a7130>, <ast.Call object at 0x7da2054a4c40>]]
call[name[payload].update, parameter[name[kwargs]]]
variable[wp] assign[=] call[name[WebPusher], parameter[name[subscription_info]]]
variable[response] assign[=] call[name[wp].send, parameter[call[name[dumps], parameter[name[payload]]]]]
if <ast.BoolOp object at 0x7da2054a75e0> begin[:]
<ast.Raise object at 0x7da18f58c310>
return[constant[True]] | keyword[def] identifier[send] ( identifier[reg_id] , identifier[message] ,** identifier[kwargs] ):
literal[string]
identifier[subscription_info] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[payload] ={
literal[string] : identifier[kwargs] . identifier[pop] ( literal[string] ),
literal[string] : identifier[message] ,
literal[string] : identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] )
}
identifier[payload] . identifier[update] ( identifier[kwargs] )
identifier[wp] = identifier[WebPusher] ( identifier[subscription_info] )
identifier[response] = identifier[wp] . identifier[send] (
identifier[dumps] ( identifier[payload] ), identifier[gcm_key] = identifier[settings] . identifier[GCM_KEY] ,
identifier[ttl] = identifier[kwargs] . identifier[pop] ( literal[string] , literal[int] ))
keyword[if] keyword[not] identifier[response] . identifier[ok] keyword[or] (
identifier[response] . identifier[text] keyword[and] identifier[loads] ( identifier[response] . identifier[text] ). identifier[get] ( literal[string] )> literal[int] ):
keyword[raise] identifier[GCMError] ( identifier[response] . identifier[text] )
keyword[return] keyword[True] | def send(reg_id, message, **kwargs):
"""
Site: https://developers.google.com
API: https://developers.google.com/web/updates/2016/03/web-push-encryption
Desc: Web Push notifications for Chrome and FireFox
Installation:
pip install 'pywebpush>=0.4.0'
"""
subscription_info = kwargs.pop('subscription_info')
payload = {'title': kwargs.pop('event'), 'body': message, 'url': kwargs.pop('push_url', None)}
payload.update(kwargs)
wp = WebPusher(subscription_info)
response = wp.send(dumps(payload), gcm_key=settings.GCM_KEY, ttl=kwargs.pop('ttl', 60))
if not response.ok or (response.text and loads(response.text).get('failure') > 0):
raise GCMError(response.text) # depends on [control=['if'], data=[]]
return True |
def _process_exclude_dictionary(exclude_dictionary):
"""
Based on values in the exclude_dictionary generate a list of term queries that
will filter out unwanted results.
"""
# not_properties will hold the generated term queries.
not_properties = []
for exclude_property in exclude_dictionary:
exclude_values = exclude_dictionary[exclude_property]
if not isinstance(exclude_values, list):
exclude_values = [exclude_values]
not_properties.extend([{"term": {exclude_property: exclude_value}} for exclude_value in exclude_values])
# Returning a query segment with an empty list freaks out ElasticSearch,
# so just return an empty segment.
if not not_properties:
return {}
return {
"not": {
"filter": {
"or": not_properties
}
}
} | def function[_process_exclude_dictionary, parameter[exclude_dictionary]]:
constant[
Based on values in the exclude_dictionary generate a list of term queries that
will filter out unwanted results.
]
variable[not_properties] assign[=] list[[]]
for taget[name[exclude_property]] in starred[name[exclude_dictionary]] begin[:]
variable[exclude_values] assign[=] call[name[exclude_dictionary]][name[exclude_property]]
if <ast.UnaryOp object at 0x7da1b00dd9f0> begin[:]
variable[exclude_values] assign[=] list[[<ast.Name object at 0x7da1b00de620>]]
call[name[not_properties].extend, parameter[<ast.ListComp object at 0x7da1b00de500>]]
if <ast.UnaryOp object at 0x7da1b00df430> begin[:]
return[dictionary[[], []]]
return[dictionary[[<ast.Constant object at 0x7da1b00dd1b0>], [<ast.Dict object at 0x7da1b00dd1e0>]]] | keyword[def] identifier[_process_exclude_dictionary] ( identifier[exclude_dictionary] ):
literal[string]
identifier[not_properties] =[]
keyword[for] identifier[exclude_property] keyword[in] identifier[exclude_dictionary] :
identifier[exclude_values] = identifier[exclude_dictionary] [ identifier[exclude_property] ]
keyword[if] keyword[not] identifier[isinstance] ( identifier[exclude_values] , identifier[list] ):
identifier[exclude_values] =[ identifier[exclude_values] ]
identifier[not_properties] . identifier[extend] ([{ literal[string] :{ identifier[exclude_property] : identifier[exclude_value] }} keyword[for] identifier[exclude_value] keyword[in] identifier[exclude_values] ])
keyword[if] keyword[not] identifier[not_properties] :
keyword[return] {}
keyword[return] {
literal[string] :{
literal[string] :{
literal[string] : identifier[not_properties]
}
}
} | def _process_exclude_dictionary(exclude_dictionary):
"""
Based on values in the exclude_dictionary generate a list of term queries that
will filter out unwanted results.
"""
# not_properties will hold the generated term queries.
not_properties = []
for exclude_property in exclude_dictionary:
exclude_values = exclude_dictionary[exclude_property]
if not isinstance(exclude_values, list):
exclude_values = [exclude_values] # depends on [control=['if'], data=[]]
not_properties.extend([{'term': {exclude_property: exclude_value}} for exclude_value in exclude_values]) # depends on [control=['for'], data=['exclude_property']]
# Returning a query segment with an empty list freaks out ElasticSearch,
# so just return an empty segment.
if not not_properties:
return {} # depends on [control=['if'], data=[]]
return {'not': {'filter': {'or': not_properties}}} |
def get_conn():
'''
Return a conn object for the passed VM data
'''
driver = get_driver(Provider.CLOUDSTACK)
verify_ssl_cert = config.get_cloud_config_value('verify_ssl_cert',
get_configured_provider(),
__opts__,
default=True,
search_global=False)
if verify_ssl_cert is False:
try:
import libcloud.security
libcloud.security.VERIFY_SSL_CERT = False
except (ImportError, AttributeError):
raise SaltCloudSystemExit(
'Could not disable SSL certificate verification. '
'Not loading module.'
)
return driver(
key=config.get_cloud_config_value(
'apikey', get_configured_provider(), __opts__, search_global=False
),
secret=config.get_cloud_config_value(
'secretkey', get_configured_provider(), __opts__,
search_global=False
),
secure=config.get_cloud_config_value(
'secure', get_configured_provider(), __opts__,
default=True, search_global=False
),
host=config.get_cloud_config_value(
'host', get_configured_provider(), __opts__, search_global=False
),
path=config.get_cloud_config_value(
'path', get_configured_provider(), __opts__, search_global=False
),
port=config.get_cloud_config_value(
'port', get_configured_provider(), __opts__,
default=None, search_global=False
)
) | def function[get_conn, parameter[]]:
constant[
Return a conn object for the passed VM data
]
variable[driver] assign[=] call[name[get_driver], parameter[name[Provider].CLOUDSTACK]]
variable[verify_ssl_cert] assign[=] call[name[config].get_cloud_config_value, parameter[constant[verify_ssl_cert], call[name[get_configured_provider], parameter[]], name[__opts__]]]
if compare[name[verify_ssl_cert] is constant[False]] begin[:]
<ast.Try object at 0x7da20cabc6d0>
return[call[name[driver], parameter[]]] | keyword[def] identifier[get_conn] ():
literal[string]
identifier[driver] = identifier[get_driver] ( identifier[Provider] . identifier[CLOUDSTACK] )
identifier[verify_ssl_cert] = identifier[config] . identifier[get_cloud_config_value] ( literal[string] ,
identifier[get_configured_provider] (),
identifier[__opts__] ,
identifier[default] = keyword[True] ,
identifier[search_global] = keyword[False] )
keyword[if] identifier[verify_ssl_cert] keyword[is] keyword[False] :
keyword[try] :
keyword[import] identifier[libcloud] . identifier[security]
identifier[libcloud] . identifier[security] . identifier[VERIFY_SSL_CERT] = keyword[False]
keyword[except] ( identifier[ImportError] , identifier[AttributeError] ):
keyword[raise] identifier[SaltCloudSystemExit] (
literal[string]
literal[string]
)
keyword[return] identifier[driver] (
identifier[key] = identifier[config] . identifier[get_cloud_config_value] (
literal[string] , identifier[get_configured_provider] (), identifier[__opts__] , identifier[search_global] = keyword[False]
),
identifier[secret] = identifier[config] . identifier[get_cloud_config_value] (
literal[string] , identifier[get_configured_provider] (), identifier[__opts__] ,
identifier[search_global] = keyword[False]
),
identifier[secure] = identifier[config] . identifier[get_cloud_config_value] (
literal[string] , identifier[get_configured_provider] (), identifier[__opts__] ,
identifier[default] = keyword[True] , identifier[search_global] = keyword[False]
),
identifier[host] = identifier[config] . identifier[get_cloud_config_value] (
literal[string] , identifier[get_configured_provider] (), identifier[__opts__] , identifier[search_global] = keyword[False]
),
identifier[path] = identifier[config] . identifier[get_cloud_config_value] (
literal[string] , identifier[get_configured_provider] (), identifier[__opts__] , identifier[search_global] = keyword[False]
),
identifier[port] = identifier[config] . identifier[get_cloud_config_value] (
literal[string] , identifier[get_configured_provider] (), identifier[__opts__] ,
identifier[default] = keyword[None] , identifier[search_global] = keyword[False]
)
) | def get_conn():
"""
Return a conn object for the passed VM data
"""
driver = get_driver(Provider.CLOUDSTACK)
verify_ssl_cert = config.get_cloud_config_value('verify_ssl_cert', get_configured_provider(), __opts__, default=True, search_global=False)
if verify_ssl_cert is False:
try:
import libcloud.security
libcloud.security.VERIFY_SSL_CERT = False # depends on [control=['try'], data=[]]
except (ImportError, AttributeError):
raise SaltCloudSystemExit('Could not disable SSL certificate verification. Not loading module.') # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
return driver(key=config.get_cloud_config_value('apikey', get_configured_provider(), __opts__, search_global=False), secret=config.get_cloud_config_value('secretkey', get_configured_provider(), __opts__, search_global=False), secure=config.get_cloud_config_value('secure', get_configured_provider(), __opts__, default=True, search_global=False), host=config.get_cloud_config_value('host', get_configured_provider(), __opts__, search_global=False), path=config.get_cloud_config_value('path', get_configured_provider(), __opts__, search_global=False), port=config.get_cloud_config_value('port', get_configured_provider(), __opts__, default=None, search_global=False)) |
def close(self):
"""Stop serving the :attr:`.Server.sockets` and close all
concurrent connections.
"""
transports, self.transports = self.transports, []
for transport in transports:
transport.close() | def function[close, parameter[self]]:
constant[Stop serving the :attr:`.Server.sockets` and close all
concurrent connections.
]
<ast.Tuple object at 0x7da20c992b90> assign[=] tuple[[<ast.Attribute object at 0x7da20c992ce0>, <ast.List object at 0x7da20c9923b0>]]
for taget[name[transport]] in starred[name[transports]] begin[:]
call[name[transport].close, parameter[]] | keyword[def] identifier[close] ( identifier[self] ):
literal[string]
identifier[transports] , identifier[self] . identifier[transports] = identifier[self] . identifier[transports] ,[]
keyword[for] identifier[transport] keyword[in] identifier[transports] :
identifier[transport] . identifier[close] () | def close(self):
"""Stop serving the :attr:`.Server.sockets` and close all
concurrent connections.
"""
(transports, self.transports) = (self.transports, [])
for transport in transports:
transport.close() # depends on [control=['for'], data=['transport']] |
def download(url, dir, filename=None, expect_size=None):
"""
Download URL to a directory.
Will figure out the filename automatically from URL, if not given.
"""
mkdir_p(dir)
if filename is None:
filename = url.split('/')[-1]
fpath = os.path.join(dir, filename)
if os.path.isfile(fpath):
if expect_size is not None and os.stat(fpath).st_size == expect_size:
logger.info("File {} exists! Skip download.".format(filename))
return fpath
else:
logger.warn("File {} exists. Will overwrite with a new download!".format(filename))
def hook(t):
last_b = [0]
def inner(b, bsize, tsize=None):
if tsize is not None:
t.total = tsize
t.update((b - last_b[0]) * bsize)
last_b[0] = b
return inner
try:
with tqdm.tqdm(unit='B', unit_scale=True, miniters=1, desc=filename) as t:
fpath, _ = urllib.request.urlretrieve(url, fpath, reporthook=hook(t))
statinfo = os.stat(fpath)
size = statinfo.st_size
except IOError:
logger.error("Failed to download {}".format(url))
raise
assert size > 0, "Downloaded an empty file from {}!".format(url)
if expect_size is not None and size != expect_size:
logger.error("File downloaded from {} does not match the expected size!".format(url))
logger.error("You may have downloaded a broken file, or the upstream may have modified the file.")
# TODO human-readable size
logger.info('Succesfully downloaded ' + filename + ". " + str(size) + ' bytes.')
return fpath | def function[download, parameter[url, dir, filename, expect_size]]:
constant[
Download URL to a directory.
Will figure out the filename automatically from URL, if not given.
]
call[name[mkdir_p], parameter[name[dir]]]
if compare[name[filename] is constant[None]] begin[:]
variable[filename] assign[=] call[call[name[url].split, parameter[constant[/]]]][<ast.UnaryOp object at 0x7da18bccad40>]
variable[fpath] assign[=] call[name[os].path.join, parameter[name[dir], name[filename]]]
if call[name[os].path.isfile, parameter[name[fpath]]] begin[:]
if <ast.BoolOp object at 0x7da18bcc8e80> begin[:]
call[name[logger].info, parameter[call[constant[File {} exists! Skip download.].format, parameter[name[filename]]]]]
return[name[fpath]]
def function[hook, parameter[t]]:
variable[last_b] assign[=] list[[<ast.Constant object at 0x7da18bccbbb0>]]
def function[inner, parameter[b, bsize, tsize]]:
if compare[name[tsize] is_not constant[None]] begin[:]
name[t].total assign[=] name[tsize]
call[name[t].update, parameter[binary_operation[binary_operation[name[b] - call[name[last_b]][constant[0]]] * name[bsize]]]]
call[name[last_b]][constant[0]] assign[=] name[b]
return[name[inner]]
<ast.Try object at 0x7da18bcc9930>
assert[compare[name[size] greater[>] constant[0]]]
if <ast.BoolOp object at 0x7da18f723df0> begin[:]
call[name[logger].error, parameter[call[constant[File downloaded from {} does not match the expected size!].format, parameter[name[url]]]]]
call[name[logger].error, parameter[constant[You may have downloaded a broken file, or the upstream may have modified the file.]]]
call[name[logger].info, parameter[binary_operation[binary_operation[binary_operation[binary_operation[constant[Succesfully downloaded ] + name[filename]] + constant[. ]] + call[name[str], parameter[name[size]]]] + constant[ bytes.]]]]
return[name[fpath]] | keyword[def] identifier[download] ( identifier[url] , identifier[dir] , identifier[filename] = keyword[None] , identifier[expect_size] = keyword[None] ):
literal[string]
identifier[mkdir_p] ( identifier[dir] )
keyword[if] identifier[filename] keyword[is] keyword[None] :
identifier[filename] = identifier[url] . identifier[split] ( literal[string] )[- literal[int] ]
identifier[fpath] = identifier[os] . identifier[path] . identifier[join] ( identifier[dir] , identifier[filename] )
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[fpath] ):
keyword[if] identifier[expect_size] keyword[is] keyword[not] keyword[None] keyword[and] identifier[os] . identifier[stat] ( identifier[fpath] ). identifier[st_size] == identifier[expect_size] :
identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[filename] ))
keyword[return] identifier[fpath]
keyword[else] :
identifier[logger] . identifier[warn] ( literal[string] . identifier[format] ( identifier[filename] ))
keyword[def] identifier[hook] ( identifier[t] ):
identifier[last_b] =[ literal[int] ]
keyword[def] identifier[inner] ( identifier[b] , identifier[bsize] , identifier[tsize] = keyword[None] ):
keyword[if] identifier[tsize] keyword[is] keyword[not] keyword[None] :
identifier[t] . identifier[total] = identifier[tsize]
identifier[t] . identifier[update] (( identifier[b] - identifier[last_b] [ literal[int] ])* identifier[bsize] )
identifier[last_b] [ literal[int] ]= identifier[b]
keyword[return] identifier[inner]
keyword[try] :
keyword[with] identifier[tqdm] . identifier[tqdm] ( identifier[unit] = literal[string] , identifier[unit_scale] = keyword[True] , identifier[miniters] = literal[int] , identifier[desc] = identifier[filename] ) keyword[as] identifier[t] :
identifier[fpath] , identifier[_] = identifier[urllib] . identifier[request] . identifier[urlretrieve] ( identifier[url] , identifier[fpath] , identifier[reporthook] = identifier[hook] ( identifier[t] ))
identifier[statinfo] = identifier[os] . identifier[stat] ( identifier[fpath] )
identifier[size] = identifier[statinfo] . identifier[st_size]
keyword[except] identifier[IOError] :
identifier[logger] . identifier[error] ( literal[string] . identifier[format] ( identifier[url] ))
keyword[raise]
keyword[assert] identifier[size] > literal[int] , literal[string] . identifier[format] ( identifier[url] )
keyword[if] identifier[expect_size] keyword[is] keyword[not] keyword[None] keyword[and] identifier[size] != identifier[expect_size] :
identifier[logger] . identifier[error] ( literal[string] . identifier[format] ( identifier[url] ))
identifier[logger] . identifier[error] ( literal[string] )
identifier[logger] . identifier[info] ( literal[string] + identifier[filename] + literal[string] + identifier[str] ( identifier[size] )+ literal[string] )
keyword[return] identifier[fpath] | def download(url, dir, filename=None, expect_size=None):
"""
Download URL to a directory.
Will figure out the filename automatically from URL, if not given.
"""
mkdir_p(dir)
if filename is None:
filename = url.split('/')[-1] # depends on [control=['if'], data=['filename']]
fpath = os.path.join(dir, filename)
if os.path.isfile(fpath):
if expect_size is not None and os.stat(fpath).st_size == expect_size:
logger.info('File {} exists! Skip download.'.format(filename))
return fpath # depends on [control=['if'], data=[]]
else:
logger.warn('File {} exists. Will overwrite with a new download!'.format(filename)) # depends on [control=['if'], data=[]]
def hook(t):
last_b = [0]
def inner(b, bsize, tsize=None):
if tsize is not None:
t.total = tsize # depends on [control=['if'], data=['tsize']]
t.update((b - last_b[0]) * bsize)
last_b[0] = b
return inner
try:
with tqdm.tqdm(unit='B', unit_scale=True, miniters=1, desc=filename) as t:
(fpath, _) = urllib.request.urlretrieve(url, fpath, reporthook=hook(t)) # depends on [control=['with'], data=['t']]
statinfo = os.stat(fpath)
size = statinfo.st_size # depends on [control=['try'], data=[]]
except IOError:
logger.error('Failed to download {}'.format(url))
raise # depends on [control=['except'], data=[]]
assert size > 0, 'Downloaded an empty file from {}!'.format(url)
if expect_size is not None and size != expect_size:
logger.error('File downloaded from {} does not match the expected size!'.format(url))
logger.error('You may have downloaded a broken file, or the upstream may have modified the file.') # depends on [control=['if'], data=[]]
# TODO human-readable size
logger.info('Succesfully downloaded ' + filename + '. ' + str(size) + ' bytes.')
return fpath |
def _lock(self):
"""Lock the config DB."""
if not self.locked:
self.device.cu.lock()
self.locked = True | def function[_lock, parameter[self]]:
constant[Lock the config DB.]
if <ast.UnaryOp object at 0x7da1b26ac1c0> begin[:]
call[name[self].device.cu.lock, parameter[]]
name[self].locked assign[=] constant[True] | keyword[def] identifier[_lock] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[locked] :
identifier[self] . identifier[device] . identifier[cu] . identifier[lock] ()
identifier[self] . identifier[locked] = keyword[True] | def _lock(self):
"""Lock the config DB."""
if not self.locked:
self.device.cu.lock()
self.locked = True # depends on [control=['if'], data=[]] |
def plugitInclude(parser, token):
"""
Load and render a template, using the same context of a specific action.
Example: {% plugitInclude "/menuBar" %}
"""
bits = token.split_contents()
if len(bits) != 2:
raise TemplateSyntaxError("'plugitInclude' tag takes one argument: the tempalte's action to use")
action = parser.compile_filter(bits[1])
return PlugItIncludeNode(action) | def function[plugitInclude, parameter[parser, token]]:
constant[
Load and render a template, using the same context of a specific action.
Example: {% plugitInclude "/menuBar" %}
]
variable[bits] assign[=] call[name[token].split_contents, parameter[]]
if compare[call[name[len], parameter[name[bits]]] not_equal[!=] constant[2]] begin[:]
<ast.Raise object at 0x7da18f721390>
variable[action] assign[=] call[name[parser].compile_filter, parameter[call[name[bits]][constant[1]]]]
return[call[name[PlugItIncludeNode], parameter[name[action]]]] | keyword[def] identifier[plugitInclude] ( identifier[parser] , identifier[token] ):
literal[string]
identifier[bits] = identifier[token] . identifier[split_contents] ()
keyword[if] identifier[len] ( identifier[bits] )!= literal[int] :
keyword[raise] identifier[TemplateSyntaxError] ( literal[string] )
identifier[action] = identifier[parser] . identifier[compile_filter] ( identifier[bits] [ literal[int] ])
keyword[return] identifier[PlugItIncludeNode] ( identifier[action] ) | def plugitInclude(parser, token):
"""
Load and render a template, using the same context of a specific action.
Example: {% plugitInclude "/menuBar" %}
"""
bits = token.split_contents()
if len(bits) != 2:
raise TemplateSyntaxError("'plugitInclude' tag takes one argument: the tempalte's action to use") # depends on [control=['if'], data=[]]
action = parser.compile_filter(bits[1])
return PlugItIncludeNode(action) |
def walkParams(intf, discovered):
"""
walk parameter instances on this interface
"""
for si in intf._interfaces:
yield from walkParams(si, discovered)
for p in intf._params:
if p not in discovered:
discovered.add(p)
yield p | def function[walkParams, parameter[intf, discovered]]:
constant[
walk parameter instances on this interface
]
for taget[name[si]] in starred[name[intf]._interfaces] begin[:]
<ast.YieldFrom object at 0x7da1b0553f40>
for taget[name[p]] in starred[name[intf]._params] begin[:]
if compare[name[p] <ast.NotIn object at 0x7da2590d7190> name[discovered]] begin[:]
call[name[discovered].add, parameter[name[p]]]
<ast.Yield object at 0x7da1b05bfee0> | keyword[def] identifier[walkParams] ( identifier[intf] , identifier[discovered] ):
literal[string]
keyword[for] identifier[si] keyword[in] identifier[intf] . identifier[_interfaces] :
keyword[yield] keyword[from] identifier[walkParams] ( identifier[si] , identifier[discovered] )
keyword[for] identifier[p] keyword[in] identifier[intf] . identifier[_params] :
keyword[if] identifier[p] keyword[not] keyword[in] identifier[discovered] :
identifier[discovered] . identifier[add] ( identifier[p] )
keyword[yield] identifier[p] | def walkParams(intf, discovered):
"""
walk parameter instances on this interface
"""
for si in intf._interfaces:
yield from walkParams(si, discovered) # depends on [control=['for'], data=['si']]
for p in intf._params:
if p not in discovered:
discovered.add(p)
yield p # depends on [control=['if'], data=['p', 'discovered']] # depends on [control=['for'], data=['p']] |
def act(self, event, *args, **kwargs):
"""
Act on the specific life cycle event. The action here is to invoke the hook function on all registered plugins.
*args and **kwargs will be passed directly to the plugin's hook functions
:param samtranslator.plugins.LifeCycleEvents event: Event to act upon
:return: Nothing
:raises ValueError: If event is not a valid life cycle event
:raises NameError: If a plugin does not have the hook method defined
:raises Exception: Any exception that a plugin raises
"""
if not isinstance(event, LifeCycleEvents):
raise ValueError("'event' must be an instance of LifeCycleEvents class")
method_name = "on_" + event.name
for plugin in self._plugins:
if not hasattr(plugin, method_name):
raise NameError("'{}' method is not found in the plugin with name '{}'"
.format(method_name, plugin.name))
try:
getattr(plugin, method_name)(*args, **kwargs)
except InvalidResourceException as ex:
# Don't need to log these because they don't result in crashes
raise ex
except Exception as ex:
logging.exception("Plugin '%s' raised an exception: %s", plugin.name, ex)
raise ex | def function[act, parameter[self, event]]:
constant[
Act on the specific life cycle event. The action here is to invoke the hook function on all registered plugins.
*args and **kwargs will be passed directly to the plugin's hook functions
:param samtranslator.plugins.LifeCycleEvents event: Event to act upon
:return: Nothing
:raises ValueError: If event is not a valid life cycle event
:raises NameError: If a plugin does not have the hook method defined
:raises Exception: Any exception that a plugin raises
]
if <ast.UnaryOp object at 0x7da1b1f94b50> begin[:]
<ast.Raise object at 0x7da1b1f96ce0>
variable[method_name] assign[=] binary_operation[constant[on_] + name[event].name]
for taget[name[plugin]] in starred[name[self]._plugins] begin[:]
if <ast.UnaryOp object at 0x7da18f58c8b0> begin[:]
<ast.Raise object at 0x7da18f58e680>
<ast.Try object at 0x7da18f58f070> | keyword[def] identifier[act] ( identifier[self] , identifier[event] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[event] , identifier[LifeCycleEvents] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[method_name] = literal[string] + identifier[event] . identifier[name]
keyword[for] identifier[plugin] keyword[in] identifier[self] . identifier[_plugins] :
keyword[if] keyword[not] identifier[hasattr] ( identifier[plugin] , identifier[method_name] ):
keyword[raise] identifier[NameError] ( literal[string]
. identifier[format] ( identifier[method_name] , identifier[plugin] . identifier[name] ))
keyword[try] :
identifier[getattr] ( identifier[plugin] , identifier[method_name] )(* identifier[args] ,** identifier[kwargs] )
keyword[except] identifier[InvalidResourceException] keyword[as] identifier[ex] :
keyword[raise] identifier[ex]
keyword[except] identifier[Exception] keyword[as] identifier[ex] :
identifier[logging] . identifier[exception] ( literal[string] , identifier[plugin] . identifier[name] , identifier[ex] )
keyword[raise] identifier[ex] | def act(self, event, *args, **kwargs):
"""
Act on the specific life cycle event. The action here is to invoke the hook function on all registered plugins.
*args and **kwargs will be passed directly to the plugin's hook functions
:param samtranslator.plugins.LifeCycleEvents event: Event to act upon
:return: Nothing
:raises ValueError: If event is not a valid life cycle event
:raises NameError: If a plugin does not have the hook method defined
:raises Exception: Any exception that a plugin raises
"""
if not isinstance(event, LifeCycleEvents):
raise ValueError("'event' must be an instance of LifeCycleEvents class") # depends on [control=['if'], data=[]]
method_name = 'on_' + event.name
for plugin in self._plugins:
if not hasattr(plugin, method_name):
raise NameError("'{}' method is not found in the plugin with name '{}'".format(method_name, plugin.name)) # depends on [control=['if'], data=[]]
try:
getattr(plugin, method_name)(*args, **kwargs) # depends on [control=['try'], data=[]]
except InvalidResourceException as ex:
# Don't need to log these because they don't result in crashes
raise ex # depends on [control=['except'], data=['ex']]
except Exception as ex:
logging.exception("Plugin '%s' raised an exception: %s", plugin.name, ex)
raise ex # depends on [control=['except'], data=['ex']] # depends on [control=['for'], data=['plugin']] |
def image(array, domain=None, width=None, format='png', **kwargs):
"""Display an image.
Args:
array: NumPy array representing the image
fmt: Image format e.g. png, jpeg
domain: Domain of pixel values, inferred from min & max values if None
w: width of output image, scaled using nearest neighbor interpolation.
size unchanged if None
"""
image_data = serialize_array(array, fmt=format, domain=domain)
image = IPython.display.Image(data=image_data, format=format, width=width)
IPython.display.display(image) | def function[image, parameter[array, domain, width, format]]:
constant[Display an image.
Args:
array: NumPy array representing the image
fmt: Image format e.g. png, jpeg
domain: Domain of pixel values, inferred from min & max values if None
w: width of output image, scaled using nearest neighbor interpolation.
size unchanged if None
]
variable[image_data] assign[=] call[name[serialize_array], parameter[name[array]]]
variable[image] assign[=] call[name[IPython].display.Image, parameter[]]
call[name[IPython].display.display, parameter[name[image]]] | keyword[def] identifier[image] ( identifier[array] , identifier[domain] = keyword[None] , identifier[width] = keyword[None] , identifier[format] = literal[string] ,** identifier[kwargs] ):
literal[string]
identifier[image_data] = identifier[serialize_array] ( identifier[array] , identifier[fmt] = identifier[format] , identifier[domain] = identifier[domain] )
identifier[image] = identifier[IPython] . identifier[display] . identifier[Image] ( identifier[data] = identifier[image_data] , identifier[format] = identifier[format] , identifier[width] = identifier[width] )
identifier[IPython] . identifier[display] . identifier[display] ( identifier[image] ) | def image(array, domain=None, width=None, format='png', **kwargs):
"""Display an image.
Args:
array: NumPy array representing the image
fmt: Image format e.g. png, jpeg
domain: Domain of pixel values, inferred from min & max values if None
w: width of output image, scaled using nearest neighbor interpolation.
size unchanged if None
"""
image_data = serialize_array(array, fmt=format, domain=domain)
image = IPython.display.Image(data=image_data, format=format, width=width)
IPython.display.display(image) |
def InputSplines(seq_length, n_bases=10, name=None, **kwargs):
"""Input placeholder for array returned by `encodeSplines`
Wrapper for: `keras.layers.Input((seq_length, n_bases), name=name, **kwargs)`
"""
return Input((seq_length, n_bases), name=name, **kwargs) | def function[InputSplines, parameter[seq_length, n_bases, name]]:
constant[Input placeholder for array returned by `encodeSplines`
Wrapper for: `keras.layers.Input((seq_length, n_bases), name=name, **kwargs)`
]
return[call[name[Input], parameter[tuple[[<ast.Name object at 0x7da207f033a0>, <ast.Name object at 0x7da207f02200>]]]]] | keyword[def] identifier[InputSplines] ( identifier[seq_length] , identifier[n_bases] = literal[int] , identifier[name] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[Input] (( identifier[seq_length] , identifier[n_bases] ), identifier[name] = identifier[name] ,** identifier[kwargs] ) | def InputSplines(seq_length, n_bases=10, name=None, **kwargs):
"""Input placeholder for array returned by `encodeSplines`
Wrapper for: `keras.layers.Input((seq_length, n_bases), name=name, **kwargs)`
"""
return Input((seq_length, n_bases), name=name, **kwargs) |
def results_equal(a, b):
"""Compares two result instances
Checks full name and all data. Does not consider the comment.
:return: True or False
:raises: ValueError if both inputs are no result instances
"""
if a.v_is_parameter and b.v_is_parameter:
raise ValueError('Both inputs are not results.')
if a.v_is_parameter or b.v_is_parameter:
return False
if a.v_full_name != b.v_full_name:
return False
if hasattr(a, '_data') and not hasattr(b, '_data'):
return False
if hasattr(a, '_data'):
akeyset = set(a._data.keys())
bkeyset = set(b._data.keys())
if akeyset != bkeyset:
return False
for key in a._data:
val = a._data[key]
bval = b._data[key]
if not nested_equal(val, bval):
return False
return True | def function[results_equal, parameter[a, b]]:
constant[Compares two result instances
Checks full name and all data. Does not consider the comment.
:return: True or False
:raises: ValueError if both inputs are no result instances
]
if <ast.BoolOp object at 0x7da20c6a89d0> begin[:]
<ast.Raise object at 0x7da20c6a9b70>
if <ast.BoolOp object at 0x7da20c6aa5c0> begin[:]
return[constant[False]]
if compare[name[a].v_full_name not_equal[!=] name[b].v_full_name] begin[:]
return[constant[False]]
if <ast.BoolOp object at 0x7da20c6a8190> begin[:]
return[constant[False]]
if call[name[hasattr], parameter[name[a], constant[_data]]] begin[:]
variable[akeyset] assign[=] call[name[set], parameter[call[name[a]._data.keys, parameter[]]]]
variable[bkeyset] assign[=] call[name[set], parameter[call[name[b]._data.keys, parameter[]]]]
if compare[name[akeyset] not_equal[!=] name[bkeyset]] begin[:]
return[constant[False]]
for taget[name[key]] in starred[name[a]._data] begin[:]
variable[val] assign[=] call[name[a]._data][name[key]]
variable[bval] assign[=] call[name[b]._data][name[key]]
if <ast.UnaryOp object at 0x7da20c6ab3a0> begin[:]
return[constant[False]]
return[constant[True]] | keyword[def] identifier[results_equal] ( identifier[a] , identifier[b] ):
literal[string]
keyword[if] identifier[a] . identifier[v_is_parameter] keyword[and] identifier[b] . identifier[v_is_parameter] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[a] . identifier[v_is_parameter] keyword[or] identifier[b] . identifier[v_is_parameter] :
keyword[return] keyword[False]
keyword[if] identifier[a] . identifier[v_full_name] != identifier[b] . identifier[v_full_name] :
keyword[return] keyword[False]
keyword[if] identifier[hasattr] ( identifier[a] , literal[string] ) keyword[and] keyword[not] identifier[hasattr] ( identifier[b] , literal[string] ):
keyword[return] keyword[False]
keyword[if] identifier[hasattr] ( identifier[a] , literal[string] ):
identifier[akeyset] = identifier[set] ( identifier[a] . identifier[_data] . identifier[keys] ())
identifier[bkeyset] = identifier[set] ( identifier[b] . identifier[_data] . identifier[keys] ())
keyword[if] identifier[akeyset] != identifier[bkeyset] :
keyword[return] keyword[False]
keyword[for] identifier[key] keyword[in] identifier[a] . identifier[_data] :
identifier[val] = identifier[a] . identifier[_data] [ identifier[key] ]
identifier[bval] = identifier[b] . identifier[_data] [ identifier[key] ]
keyword[if] keyword[not] identifier[nested_equal] ( identifier[val] , identifier[bval] ):
keyword[return] keyword[False]
keyword[return] keyword[True] | def results_equal(a, b):
"""Compares two result instances
Checks full name and all data. Does not consider the comment.
:return: True or False
:raises: ValueError if both inputs are no result instances
"""
if a.v_is_parameter and b.v_is_parameter:
raise ValueError('Both inputs are not results.') # depends on [control=['if'], data=[]]
if a.v_is_parameter or b.v_is_parameter:
return False # depends on [control=['if'], data=[]]
if a.v_full_name != b.v_full_name:
return False # depends on [control=['if'], data=[]]
if hasattr(a, '_data') and (not hasattr(b, '_data')):
return False # depends on [control=['if'], data=[]]
if hasattr(a, '_data'):
akeyset = set(a._data.keys())
bkeyset = set(b._data.keys())
if akeyset != bkeyset:
return False # depends on [control=['if'], data=[]]
for key in a._data:
val = a._data[key]
bval = b._data[key]
if not nested_equal(val, bval):
return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']] # depends on [control=['if'], data=[]]
return True |
def coverage():
"generate coverage report and show in browser"
coverage_index = path("build/coverage/index.html")
coverage_index.remove()
sh("paver test")
coverage_index.exists() and webbrowser.open(coverage_index) | def function[coverage, parameter[]]:
constant[generate coverage report and show in browser]
variable[coverage_index] assign[=] call[name[path], parameter[constant[build/coverage/index.html]]]
call[name[coverage_index].remove, parameter[]]
call[name[sh], parameter[constant[paver test]]]
<ast.BoolOp object at 0x7da1b26af580> | keyword[def] identifier[coverage] ():
literal[string]
identifier[coverage_index] = identifier[path] ( literal[string] )
identifier[coverage_index] . identifier[remove] ()
identifier[sh] ( literal[string] )
identifier[coverage_index] . identifier[exists] () keyword[and] identifier[webbrowser] . identifier[open] ( identifier[coverage_index] ) | def coverage():
"""generate coverage report and show in browser"""
coverage_index = path('build/coverage/index.html')
coverage_index.remove()
sh('paver test')
coverage_index.exists() and webbrowser.open(coverage_index) |
def _set_has_no_columns(self, has_no_column, col_avg, col_last, fields):
"""
Regenerate has_no_column by adding the amount of columns at the end
"""
for index, field in has_no_column.items():
if index == len(has_no_column):
field_name = "{field}|{col_last}".format(
field=field, col_last=col_last
)
has_no_column[index] = self._return_field(field_name, fields)
else:
field_name = "{field}|{col_avg}".format(
field=field, col_avg=col_avg
)
has_no_column[index] = self._return_field(field_name, fields)
return has_no_column | def function[_set_has_no_columns, parameter[self, has_no_column, col_avg, col_last, fields]]:
constant[
Regenerate has_no_column by adding the amount of columns at the end
]
for taget[tuple[[<ast.Name object at 0x7da1b0535cc0>, <ast.Name object at 0x7da1b0535780>]]] in starred[call[name[has_no_column].items, parameter[]]] begin[:]
if compare[name[index] equal[==] call[name[len], parameter[name[has_no_column]]]] begin[:]
variable[field_name] assign[=] call[constant[{field}|{col_last}].format, parameter[]]
call[name[has_no_column]][name[index]] assign[=] call[name[self]._return_field, parameter[name[field_name], name[fields]]]
return[name[has_no_column]] | keyword[def] identifier[_set_has_no_columns] ( identifier[self] , identifier[has_no_column] , identifier[col_avg] , identifier[col_last] , identifier[fields] ):
literal[string]
keyword[for] identifier[index] , identifier[field] keyword[in] identifier[has_no_column] . identifier[items] ():
keyword[if] identifier[index] == identifier[len] ( identifier[has_no_column] ):
identifier[field_name] = literal[string] . identifier[format] (
identifier[field] = identifier[field] , identifier[col_last] = identifier[col_last]
)
identifier[has_no_column] [ identifier[index] ]= identifier[self] . identifier[_return_field] ( identifier[field_name] , identifier[fields] )
keyword[else] :
identifier[field_name] = literal[string] . identifier[format] (
identifier[field] = identifier[field] , identifier[col_avg] = identifier[col_avg]
)
identifier[has_no_column] [ identifier[index] ]= identifier[self] . identifier[_return_field] ( identifier[field_name] , identifier[fields] )
keyword[return] identifier[has_no_column] | def _set_has_no_columns(self, has_no_column, col_avg, col_last, fields):
"""
Regenerate has_no_column by adding the amount of columns at the end
"""
for (index, field) in has_no_column.items():
if index == len(has_no_column):
field_name = '{field}|{col_last}'.format(field=field, col_last=col_last)
has_no_column[index] = self._return_field(field_name, fields) # depends on [control=['if'], data=['index']]
else:
field_name = '{field}|{col_avg}'.format(field=field, col_avg=col_avg)
has_no_column[index] = self._return_field(field_name, fields) # depends on [control=['for'], data=[]]
return has_no_column |
def _sample(self, nmr_samples, thinning=1, return_output=True):
"""Sample the given number of samples with the given thinning.
If ``return_output`` we will return the samples, log likelihoods and log priors. If not, we will advance the
state of the sampler without returning storing the samples.
Args:
nmr_samples (int): the number of iterations to advance the sampler
thinning (int): the thinning to apply
return_output (boolean): if we should return the output
Returns:
None or tuple: if ``return_output`` is True three ndarrays as (samples, log_likelihoods, log_priors)
"""
kernel_data = self._get_kernel_data(nmr_samples, thinning, return_output)
sample_func = self._get_compute_func(nmr_samples, thinning, return_output)
sample_func.evaluate(kernel_data, self._nmr_problems,
use_local_reduction=all(env.is_gpu for env in self._cl_runtime_info.cl_environments),
cl_runtime_info=self._cl_runtime_info)
self._sampling_index += nmr_samples * thinning
if return_output:
return (kernel_data['samples'].get_data(),
kernel_data['log_likelihoods'].get_data(),
kernel_data['log_priors'].get_data()) | def function[_sample, parameter[self, nmr_samples, thinning, return_output]]:
constant[Sample the given number of samples with the given thinning.
If ``return_output`` we will return the samples, log likelihoods and log priors. If not, we will advance the
state of the sampler without returning storing the samples.
Args:
nmr_samples (int): the number of iterations to advance the sampler
thinning (int): the thinning to apply
return_output (boolean): if we should return the output
Returns:
None or tuple: if ``return_output`` is True three ndarrays as (samples, log_likelihoods, log_priors)
]
variable[kernel_data] assign[=] call[name[self]._get_kernel_data, parameter[name[nmr_samples], name[thinning], name[return_output]]]
variable[sample_func] assign[=] call[name[self]._get_compute_func, parameter[name[nmr_samples], name[thinning], name[return_output]]]
call[name[sample_func].evaluate, parameter[name[kernel_data], name[self]._nmr_problems]]
<ast.AugAssign object at 0x7da207f981c0>
if name[return_output] begin[:]
return[tuple[[<ast.Call object at 0x7da20c6a8610>, <ast.Call object at 0x7da20c6ab580>, <ast.Call object at 0x7da20c6aaa40>]]] | keyword[def] identifier[_sample] ( identifier[self] , identifier[nmr_samples] , identifier[thinning] = literal[int] , identifier[return_output] = keyword[True] ):
literal[string]
identifier[kernel_data] = identifier[self] . identifier[_get_kernel_data] ( identifier[nmr_samples] , identifier[thinning] , identifier[return_output] )
identifier[sample_func] = identifier[self] . identifier[_get_compute_func] ( identifier[nmr_samples] , identifier[thinning] , identifier[return_output] )
identifier[sample_func] . identifier[evaluate] ( identifier[kernel_data] , identifier[self] . identifier[_nmr_problems] ,
identifier[use_local_reduction] = identifier[all] ( identifier[env] . identifier[is_gpu] keyword[for] identifier[env] keyword[in] identifier[self] . identifier[_cl_runtime_info] . identifier[cl_environments] ),
identifier[cl_runtime_info] = identifier[self] . identifier[_cl_runtime_info] )
identifier[self] . identifier[_sampling_index] += identifier[nmr_samples] * identifier[thinning]
keyword[if] identifier[return_output] :
keyword[return] ( identifier[kernel_data] [ literal[string] ]. identifier[get_data] (),
identifier[kernel_data] [ literal[string] ]. identifier[get_data] (),
identifier[kernel_data] [ literal[string] ]. identifier[get_data] ()) | def _sample(self, nmr_samples, thinning=1, return_output=True):
"""Sample the given number of samples with the given thinning.
If ``return_output`` we will return the samples, log likelihoods and log priors. If not, we will advance the
state of the sampler without returning storing the samples.
Args:
nmr_samples (int): the number of iterations to advance the sampler
thinning (int): the thinning to apply
return_output (boolean): if we should return the output
Returns:
None or tuple: if ``return_output`` is True three ndarrays as (samples, log_likelihoods, log_priors)
"""
kernel_data = self._get_kernel_data(nmr_samples, thinning, return_output)
sample_func = self._get_compute_func(nmr_samples, thinning, return_output)
sample_func.evaluate(kernel_data, self._nmr_problems, use_local_reduction=all((env.is_gpu for env in self._cl_runtime_info.cl_environments)), cl_runtime_info=self._cl_runtime_info)
self._sampling_index += nmr_samples * thinning
if return_output:
return (kernel_data['samples'].get_data(), kernel_data['log_likelihoods'].get_data(), kernel_data['log_priors'].get_data()) # depends on [control=['if'], data=[]] |
def __find_file(name, path, deep=False, partial=False):
"""
Searches for a file and returns its path upon finding it.
Searches for a file with the given `name` in the list of directory
mentioned in `path`. It also supports `deep` search (recursive) and
`partial` search (searching for files having filename matching
partially with the query) through the respective boolean arguments.
This function is internally called by `find_file` for each of the
input directory of that.
Parameters
----------
name : str
The name of the file to search for.
path : str
The path of the directory to be searched.
deep : bool, optional
Enables deep-searching, i.e. recursively looking for the file
in the sub-directories of the mentioned directory.
partial : bool, optional
Whether look for files having filename partially matching with
the query `name`.
Returns
-------
str
The path of the file. In case of multiple hits, it only returns
the first one.
"""
if deep:
for root, dirs, files in os.walk(path):
if partial:
for file in files:
if name in file:
return os.path.join(root, file)
else:
if name in files:
return os.path.join(root, name)
else:
f = os.path.join(path, name)
if os.path.isfile(f):
return f | def function[__find_file, parameter[name, path, deep, partial]]:
constant[
Searches for a file and returns its path upon finding it.
Searches for a file with the given `name` in the list of directory
mentioned in `path`. It also supports `deep` search (recursive) and
`partial` search (searching for files having filename matching
partially with the query) through the respective boolean arguments.
This function is internally called by `find_file` for each of the
input directory of that.
Parameters
----------
name : str
The name of the file to search for.
path : str
The path of the directory to be searched.
deep : bool, optional
Enables deep-searching, i.e. recursively looking for the file
in the sub-directories of the mentioned directory.
partial : bool, optional
Whether look for files having filename partially matching with
the query `name`.
Returns
-------
str
The path of the file. In case of multiple hits, it only returns
the first one.
]
if name[deep] begin[:]
for taget[tuple[[<ast.Name object at 0x7da204346530>, <ast.Name object at 0x7da204346b30>, <ast.Name object at 0x7da2043454e0>]]] in starred[call[name[os].walk, parameter[name[path]]]] begin[:]
if name[partial] begin[:]
for taget[name[file]] in starred[name[files]] begin[:]
if compare[name[name] in name[file]] begin[:]
return[call[name[os].path.join, parameter[name[root], name[file]]]] | keyword[def] identifier[__find_file] ( identifier[name] , identifier[path] , identifier[deep] = keyword[False] , identifier[partial] = keyword[False] ):
literal[string]
keyword[if] identifier[deep] :
keyword[for] identifier[root] , identifier[dirs] , identifier[files] keyword[in] identifier[os] . identifier[walk] ( identifier[path] ):
keyword[if] identifier[partial] :
keyword[for] identifier[file] keyword[in] identifier[files] :
keyword[if] identifier[name] keyword[in] identifier[file] :
keyword[return] identifier[os] . identifier[path] . identifier[join] ( identifier[root] , identifier[file] )
keyword[else] :
keyword[if] identifier[name] keyword[in] identifier[files] :
keyword[return] identifier[os] . identifier[path] . identifier[join] ( identifier[root] , identifier[name] )
keyword[else] :
identifier[f] = identifier[os] . identifier[path] . identifier[join] ( identifier[path] , identifier[name] )
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[f] ):
keyword[return] identifier[f] | def __find_file(name, path, deep=False, partial=False):
"""
Searches for a file and returns its path upon finding it.
Searches for a file with the given `name` in the list of directory
mentioned in `path`. It also supports `deep` search (recursive) and
`partial` search (searching for files having filename matching
partially with the query) through the respective boolean arguments.
This function is internally called by `find_file` for each of the
input directory of that.
Parameters
----------
name : str
The name of the file to search for.
path : str
The path of the directory to be searched.
deep : bool, optional
Enables deep-searching, i.e. recursively looking for the file
in the sub-directories of the mentioned directory.
partial : bool, optional
Whether look for files having filename partially matching with
the query `name`.
Returns
-------
str
The path of the file. In case of multiple hits, it only returns
the first one.
"""
if deep:
for (root, dirs, files) in os.walk(path):
if partial:
for file in files:
if name in file:
return os.path.join(root, file) # depends on [control=['if'], data=['file']] # depends on [control=['for'], data=['file']] # depends on [control=['if'], data=[]]
elif name in files:
return os.path.join(root, name) # depends on [control=['if'], data=['name']] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
else:
f = os.path.join(path, name)
if os.path.isfile(f):
return f # depends on [control=['if'], data=[]] |
def estimateTdisrupt(self,deltaAngle):
"""
NAME:
estimateTdisrupt
PURPOSE:
estimate the time of disruption
INPUT:
deltaAngle- spread in angle since disruption
OUTPUT:
time in natural units
HISTORY:
2013-11-27 - Written - Bovy (IAS)
"""
return deltaAngle\
/numpy.sqrt(numpy.sum(self._dsigomeanProg**2.)) | def function[estimateTdisrupt, parameter[self, deltaAngle]]:
constant[
NAME:
estimateTdisrupt
PURPOSE:
estimate the time of disruption
INPUT:
deltaAngle- spread in angle since disruption
OUTPUT:
time in natural units
HISTORY:
2013-11-27 - Written - Bovy (IAS)
]
return[binary_operation[name[deltaAngle] / call[name[numpy].sqrt, parameter[call[name[numpy].sum, parameter[binary_operation[name[self]._dsigomeanProg ** constant[2.0]]]]]]]] | keyword[def] identifier[estimateTdisrupt] ( identifier[self] , identifier[deltaAngle] ):
literal[string]
keyword[return] identifier[deltaAngle] / identifier[numpy] . identifier[sqrt] ( identifier[numpy] . identifier[sum] ( identifier[self] . identifier[_dsigomeanProg] ** literal[int] )) | def estimateTdisrupt(self, deltaAngle):
"""
NAME:
estimateTdisrupt
PURPOSE:
estimate the time of disruption
INPUT:
deltaAngle- spread in angle since disruption
OUTPUT:
time in natural units
HISTORY:
2013-11-27 - Written - Bovy (IAS)
"""
return deltaAngle / numpy.sqrt(numpy.sum(self._dsigomeanProg ** 2.0)) |
def limits(table, field):
"""
Find minimum and maximum values under the given field. E.g.::
>>> import petl as etl
>>> table = [['foo', 'bar'], ['a', 1], ['b', 2], ['b', 3]]
>>> minv, maxv = etl.limits(table, 'bar')
>>> minv
1
>>> maxv
3
The `field` argument can be a field name or index (starting from zero).
"""
vals = iter(values(table, field))
try:
minv = maxv = next(vals)
except StopIteration:
return None, None
else:
for v in vals:
if v < minv:
minv = v
if v > maxv:
maxv = v
return minv, maxv | def function[limits, parameter[table, field]]:
constant[
Find minimum and maximum values under the given field. E.g.::
>>> import petl as etl
>>> table = [['foo', 'bar'], ['a', 1], ['b', 2], ['b', 3]]
>>> minv, maxv = etl.limits(table, 'bar')
>>> minv
1
>>> maxv
3
The `field` argument can be a field name or index (starting from zero).
]
variable[vals] assign[=] call[name[iter], parameter[call[name[values], parameter[name[table], name[field]]]]]
<ast.Try object at 0x7da1b08e4fd0> | keyword[def] identifier[limits] ( identifier[table] , identifier[field] ):
literal[string]
identifier[vals] = identifier[iter] ( identifier[values] ( identifier[table] , identifier[field] ))
keyword[try] :
identifier[minv] = identifier[maxv] = identifier[next] ( identifier[vals] )
keyword[except] identifier[StopIteration] :
keyword[return] keyword[None] , keyword[None]
keyword[else] :
keyword[for] identifier[v] keyword[in] identifier[vals] :
keyword[if] identifier[v] < identifier[minv] :
identifier[minv] = identifier[v]
keyword[if] identifier[v] > identifier[maxv] :
identifier[maxv] = identifier[v]
keyword[return] identifier[minv] , identifier[maxv] | def limits(table, field):
"""
Find minimum and maximum values under the given field. E.g.::
>>> import petl as etl
>>> table = [['foo', 'bar'], ['a', 1], ['b', 2], ['b', 3]]
>>> minv, maxv = etl.limits(table, 'bar')
>>> minv
1
>>> maxv
3
The `field` argument can be a field name or index (starting from zero).
"""
vals = iter(values(table, field))
try:
minv = maxv = next(vals) # depends on [control=['try'], data=[]]
except StopIteration:
return (None, None) # depends on [control=['except'], data=[]]
else:
for v in vals:
if v < minv:
minv = v # depends on [control=['if'], data=['v', 'minv']]
if v > maxv:
maxv = v # depends on [control=['if'], data=['v', 'maxv']] # depends on [control=['for'], data=['v']]
return (minv, maxv) |
def handleRequestForUser(self, username, url):
"""
User C{username} wants to reset their password. Create an attempt
item, and send them an email if the username is valid
"""
attempt = self.newAttemptForUser(username)
account = self.accountByAddress(username)
if account is None:
# do we want to disclose this to the user?
return
email = self.getExternalEmail(account)
if email is not None:
self.sendEmail(url, attempt, email) | def function[handleRequestForUser, parameter[self, username, url]]:
constant[
User C{username} wants to reset their password. Create an attempt
item, and send them an email if the username is valid
]
variable[attempt] assign[=] call[name[self].newAttemptForUser, parameter[name[username]]]
variable[account] assign[=] call[name[self].accountByAddress, parameter[name[username]]]
if compare[name[account] is constant[None]] begin[:]
return[None]
variable[email] assign[=] call[name[self].getExternalEmail, parameter[name[account]]]
if compare[name[email] is_not constant[None]] begin[:]
call[name[self].sendEmail, parameter[name[url], name[attempt], name[email]]] | keyword[def] identifier[handleRequestForUser] ( identifier[self] , identifier[username] , identifier[url] ):
literal[string]
identifier[attempt] = identifier[self] . identifier[newAttemptForUser] ( identifier[username] )
identifier[account] = identifier[self] . identifier[accountByAddress] ( identifier[username] )
keyword[if] identifier[account] keyword[is] keyword[None] :
keyword[return]
identifier[email] = identifier[self] . identifier[getExternalEmail] ( identifier[account] )
keyword[if] identifier[email] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[sendEmail] ( identifier[url] , identifier[attempt] , identifier[email] ) | def handleRequestForUser(self, username, url):
"""
User C{username} wants to reset their password. Create an attempt
item, and send them an email if the username is valid
"""
attempt = self.newAttemptForUser(username)
account = self.accountByAddress(username)
if account is None:
# do we want to disclose this to the user?
return # depends on [control=['if'], data=[]]
email = self.getExternalEmail(account)
if email is not None:
self.sendEmail(url, attempt, email) # depends on [control=['if'], data=['email']] |
def date(self):
"""DATE command.
Coordinated Universal time from the perspective of the usenet server.
It can be used to provide information that might be useful when using
the NEWNEWS command.
See <http://tools.ietf.org/html/rfc3977#section-7.1>
Returns:
The UTC time according to the server as a datetime object.
Raises:
NNTPDataError: If the timestamp can't be parsed.
"""
code, message = self.command("DATE")
if code != 111:
raise NNTPReplyError(code, message)
ts = date.datetimeobj(message, fmt="%Y%m%d%H%M%S")
return ts | def function[date, parameter[self]]:
constant[DATE command.
Coordinated Universal time from the perspective of the usenet server.
It can be used to provide information that might be useful when using
the NEWNEWS command.
See <http://tools.ietf.org/html/rfc3977#section-7.1>
Returns:
The UTC time according to the server as a datetime object.
Raises:
NNTPDataError: If the timestamp can't be parsed.
]
<ast.Tuple object at 0x7da1b013e9e0> assign[=] call[name[self].command, parameter[constant[DATE]]]
if compare[name[code] not_equal[!=] constant[111]] begin[:]
<ast.Raise object at 0x7da1b013ffa0>
variable[ts] assign[=] call[name[date].datetimeobj, parameter[name[message]]]
return[name[ts]] | keyword[def] identifier[date] ( identifier[self] ):
literal[string]
identifier[code] , identifier[message] = identifier[self] . identifier[command] ( literal[string] )
keyword[if] identifier[code] != literal[int] :
keyword[raise] identifier[NNTPReplyError] ( identifier[code] , identifier[message] )
identifier[ts] = identifier[date] . identifier[datetimeobj] ( identifier[message] , identifier[fmt] = literal[string] )
keyword[return] identifier[ts] | def date(self):
"""DATE command.
Coordinated Universal time from the perspective of the usenet server.
It can be used to provide information that might be useful when using
the NEWNEWS command.
See <http://tools.ietf.org/html/rfc3977#section-7.1>
Returns:
The UTC time according to the server as a datetime object.
Raises:
NNTPDataError: If the timestamp can't be parsed.
"""
(code, message) = self.command('DATE')
if code != 111:
raise NNTPReplyError(code, message) # depends on [control=['if'], data=['code']]
ts = date.datetimeobj(message, fmt='%Y%m%d%H%M%S')
return ts |
def _setup(self):
"""
Prepare the scenario for Molecule and returns None.
:return: None
"""
if not os.path.isdir(self.inventory_directory):
os.makedirs(self.inventory_directory) | def function[_setup, parameter[self]]:
constant[
Prepare the scenario for Molecule and returns None.
:return: None
]
if <ast.UnaryOp object at 0x7da1b2120220> begin[:]
call[name[os].makedirs, parameter[name[self].inventory_directory]] | keyword[def] identifier[_setup] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[self] . identifier[inventory_directory] ):
identifier[os] . identifier[makedirs] ( identifier[self] . identifier[inventory_directory] ) | def _setup(self):
"""
Prepare the scenario for Molecule and returns None.
:return: None
"""
if not os.path.isdir(self.inventory_directory):
os.makedirs(self.inventory_directory) # depends on [control=['if'], data=[]] |
def handle_err(*args):
""" Handle fatal errors, caught in __main__ scope.
If DEBUG is set, print a real traceback.
Otherwise, `print_err` any arguments passed.
"""
if DEBUG:
print_err(traceback.format_exc(), color=False)
else:
print_err(*args, newline=True) | def function[handle_err, parameter[]]:
constant[ Handle fatal errors, caught in __main__ scope.
If DEBUG is set, print a real traceback.
Otherwise, `print_err` any arguments passed.
]
if name[DEBUG] begin[:]
call[name[print_err], parameter[call[name[traceback].format_exc, parameter[]]]] | keyword[def] identifier[handle_err] (* identifier[args] ):
literal[string]
keyword[if] identifier[DEBUG] :
identifier[print_err] ( identifier[traceback] . identifier[format_exc] (), identifier[color] = keyword[False] )
keyword[else] :
identifier[print_err] (* identifier[args] , identifier[newline] = keyword[True] ) | def handle_err(*args):
""" Handle fatal errors, caught in __main__ scope.
If DEBUG is set, print a real traceback.
Otherwise, `print_err` any arguments passed.
"""
if DEBUG:
print_err(traceback.format_exc(), color=False) # depends on [control=['if'], data=[]]
else:
print_err(*args, newline=True) |
def get_all_tags(image_name, branch=None):
"""
GET /v1/repositories/<namespace>/<repository_name>/tags
:param image_name: The docker image name
:param branch: The branch to filter by
:return: A list of Version instances, latest first
"""
try:
return get_all_tags_no_auth(image_name, branch)
except AuthException:
return get_all_tags_with_auth(image_name, branch) | def function[get_all_tags, parameter[image_name, branch]]:
constant[
GET /v1/repositories/<namespace>/<repository_name>/tags
:param image_name: The docker image name
:param branch: The branch to filter by
:return: A list of Version instances, latest first
]
<ast.Try object at 0x7da1b2585bd0> | keyword[def] identifier[get_all_tags] ( identifier[image_name] , identifier[branch] = keyword[None] ):
literal[string]
keyword[try] :
keyword[return] identifier[get_all_tags_no_auth] ( identifier[image_name] , identifier[branch] )
keyword[except] identifier[AuthException] :
keyword[return] identifier[get_all_tags_with_auth] ( identifier[image_name] , identifier[branch] ) | def get_all_tags(image_name, branch=None):
"""
GET /v1/repositories/<namespace>/<repository_name>/tags
:param image_name: The docker image name
:param branch: The branch to filter by
:return: A list of Version instances, latest first
"""
try:
return get_all_tags_no_auth(image_name, branch) # depends on [control=['try'], data=[]]
except AuthException:
return get_all_tags_with_auth(image_name, branch) # depends on [control=['except'], data=[]] |
def modification_time(self):
"""dfdatetime.DateTimeValues: modification time or None if not available."""
timestamp = getattr(self._tar_info, 'mtime', None)
if timestamp is None:
return None
return dfdatetime_posix_time.PosixTime(timestamp=timestamp) | def function[modification_time, parameter[self]]:
constant[dfdatetime.DateTimeValues: modification time or None if not available.]
variable[timestamp] assign[=] call[name[getattr], parameter[name[self]._tar_info, constant[mtime], constant[None]]]
if compare[name[timestamp] is constant[None]] begin[:]
return[constant[None]]
return[call[name[dfdatetime_posix_time].PosixTime, parameter[]]] | keyword[def] identifier[modification_time] ( identifier[self] ):
literal[string]
identifier[timestamp] = identifier[getattr] ( identifier[self] . identifier[_tar_info] , literal[string] , keyword[None] )
keyword[if] identifier[timestamp] keyword[is] keyword[None] :
keyword[return] keyword[None]
keyword[return] identifier[dfdatetime_posix_time] . identifier[PosixTime] ( identifier[timestamp] = identifier[timestamp] ) | def modification_time(self):
"""dfdatetime.DateTimeValues: modification time or None if not available."""
timestamp = getattr(self._tar_info, 'mtime', None)
if timestamp is None:
return None # depends on [control=['if'], data=[]]
return dfdatetime_posix_time.PosixTime(timestamp=timestamp) |
def radiation_values(self, location, timestep=1):
"""Lists of driect normal, diffuse horiz, and global horiz rad at each timestep.
"""
# create sunpath and get altitude at every timestep of the design day
sp = Sunpath.from_location(location)
altitudes = []
dates = self._get_datetimes(timestep)
for t_date in dates:
sun = sp.calculate_sun_from_date_time(t_date)
altitudes.append(sun.altitude)
dir_norm, diff_horiz = ashrae_clear_sky(
altitudes, self._month, self._clearness)
glob_horiz = [dhr + dnr * math.sin(math.radians(alt)) for
alt, dnr, dhr in zip(altitudes, dir_norm, diff_horiz)]
return dir_norm, diff_horiz, glob_horiz | def function[radiation_values, parameter[self, location, timestep]]:
constant[Lists of driect normal, diffuse horiz, and global horiz rad at each timestep.
]
variable[sp] assign[=] call[name[Sunpath].from_location, parameter[name[location]]]
variable[altitudes] assign[=] list[[]]
variable[dates] assign[=] call[name[self]._get_datetimes, parameter[name[timestep]]]
for taget[name[t_date]] in starred[name[dates]] begin[:]
variable[sun] assign[=] call[name[sp].calculate_sun_from_date_time, parameter[name[t_date]]]
call[name[altitudes].append, parameter[name[sun].altitude]]
<ast.Tuple object at 0x7da1b12a1b10> assign[=] call[name[ashrae_clear_sky], parameter[name[altitudes], name[self]._month, name[self]._clearness]]
variable[glob_horiz] assign[=] <ast.ListComp object at 0x7da1b1265db0>
return[tuple[[<ast.Name object at 0x7da1b1265c90>, <ast.Name object at 0x7da1b1266020>, <ast.Name object at 0x7da1b1265ed0>]]] | keyword[def] identifier[radiation_values] ( identifier[self] , identifier[location] , identifier[timestep] = literal[int] ):
literal[string]
identifier[sp] = identifier[Sunpath] . identifier[from_location] ( identifier[location] )
identifier[altitudes] =[]
identifier[dates] = identifier[self] . identifier[_get_datetimes] ( identifier[timestep] )
keyword[for] identifier[t_date] keyword[in] identifier[dates] :
identifier[sun] = identifier[sp] . identifier[calculate_sun_from_date_time] ( identifier[t_date] )
identifier[altitudes] . identifier[append] ( identifier[sun] . identifier[altitude] )
identifier[dir_norm] , identifier[diff_horiz] = identifier[ashrae_clear_sky] (
identifier[altitudes] , identifier[self] . identifier[_month] , identifier[self] . identifier[_clearness] )
identifier[glob_horiz] =[ identifier[dhr] + identifier[dnr] * identifier[math] . identifier[sin] ( identifier[math] . identifier[radians] ( identifier[alt] )) keyword[for]
identifier[alt] , identifier[dnr] , identifier[dhr] keyword[in] identifier[zip] ( identifier[altitudes] , identifier[dir_norm] , identifier[diff_horiz] )]
keyword[return] identifier[dir_norm] , identifier[diff_horiz] , identifier[glob_horiz] | def radiation_values(self, location, timestep=1):
"""Lists of driect normal, diffuse horiz, and global horiz rad at each timestep.
"""
# create sunpath and get altitude at every timestep of the design day
sp = Sunpath.from_location(location)
altitudes = []
dates = self._get_datetimes(timestep)
for t_date in dates:
sun = sp.calculate_sun_from_date_time(t_date)
altitudes.append(sun.altitude) # depends on [control=['for'], data=['t_date']]
(dir_norm, diff_horiz) = ashrae_clear_sky(altitudes, self._month, self._clearness)
glob_horiz = [dhr + dnr * math.sin(math.radians(alt)) for (alt, dnr, dhr) in zip(altitudes, dir_norm, diff_horiz)]
return (dir_norm, diff_horiz, glob_horiz) |
def proxyval(self, visited):
'''
Scrape a value from the inferior process, and try to represent it
within the gdb process, whilst (hopefully) avoiding crashes when
the remote data is corrupt.
Derived classes will override this.
For example, a PyIntObject* with ob_ival 42 in the inferior process
should result in an int(42) in this process.
visited: a set of all gdb.Value pyobject pointers already visited
whilst generating this value (to guard against infinite recursion when
visiting object graphs with loops). Analogous to Py_ReprEnter and
Py_ReprLeave
'''
class FakeRepr(object):
"""
Class representing a non-descript PyObject* value in the inferior
process for when we don't have a custom scraper, intended to have
a sane repr().
"""
def __init__(self, tp_name, address):
self.tp_name = tp_name
self.address = address
def __repr__(self):
# For the NULL pointer, we have no way of knowing a type, so
# special-case it as per
# http://bugs.python.org/issue8032#msg100882
if self.address == 0:
return '0x0'
return '<%s at remote 0x%x>' % (self.tp_name, self.address)
return FakeRepr(self.safe_tp_name(),
long(self._gdbval)) | def function[proxyval, parameter[self, visited]]:
constant[
Scrape a value from the inferior process, and try to represent it
within the gdb process, whilst (hopefully) avoiding crashes when
the remote data is corrupt.
Derived classes will override this.
For example, a PyIntObject* with ob_ival 42 in the inferior process
should result in an int(42) in this process.
visited: a set of all gdb.Value pyobject pointers already visited
whilst generating this value (to guard against infinite recursion when
visiting object graphs with loops). Analogous to Py_ReprEnter and
Py_ReprLeave
]
class class[FakeRepr, parameter[]] begin[:]
constant[
Class representing a non-descript PyObject* value in the inferior
process for when we don't have a custom scraper, intended to have
a sane repr().
]
def function[__init__, parameter[self, tp_name, address]]:
name[self].tp_name assign[=] name[tp_name]
name[self].address assign[=] name[address]
def function[__repr__, parameter[self]]:
if compare[name[self].address equal[==] constant[0]] begin[:]
return[constant[0x0]]
return[binary_operation[constant[<%s at remote 0x%x>] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da18ede6920>, <ast.Attribute object at 0x7da1b114be80>]]]]
return[call[name[FakeRepr], parameter[call[name[self].safe_tp_name, parameter[]], call[name[long], parameter[name[self]._gdbval]]]]] | keyword[def] identifier[proxyval] ( identifier[self] , identifier[visited] ):
literal[string]
keyword[class] identifier[FakeRepr] ( identifier[object] ):
literal[string]
keyword[def] identifier[__init__] ( identifier[self] , identifier[tp_name] , identifier[address] ):
identifier[self] . identifier[tp_name] = identifier[tp_name]
identifier[self] . identifier[address] = identifier[address]
keyword[def] identifier[__repr__] ( identifier[self] ):
keyword[if] identifier[self] . identifier[address] == literal[int] :
keyword[return] literal[string]
keyword[return] literal[string] %( identifier[self] . identifier[tp_name] , identifier[self] . identifier[address] )
keyword[return] identifier[FakeRepr] ( identifier[self] . identifier[safe_tp_name] (),
identifier[long] ( identifier[self] . identifier[_gdbval] )) | def proxyval(self, visited):
"""
Scrape a value from the inferior process, and try to represent it
within the gdb process, whilst (hopefully) avoiding crashes when
the remote data is corrupt.
Derived classes will override this.
For example, a PyIntObject* with ob_ival 42 in the inferior process
should result in an int(42) in this process.
visited: a set of all gdb.Value pyobject pointers already visited
whilst generating this value (to guard against infinite recursion when
visiting object graphs with loops). Analogous to Py_ReprEnter and
Py_ReprLeave
"""
class FakeRepr(object):
"""
Class representing a non-descript PyObject* value in the inferior
process for when we don't have a custom scraper, intended to have
a sane repr().
"""
def __init__(self, tp_name, address):
self.tp_name = tp_name
self.address = address
def __repr__(self):
# For the NULL pointer, we have no way of knowing a type, so
# special-case it as per
# http://bugs.python.org/issue8032#msg100882
if self.address == 0:
return '0x0' # depends on [control=['if'], data=[]]
return '<%s at remote 0x%x>' % (self.tp_name, self.address)
return FakeRepr(self.safe_tp_name(), long(self._gdbval)) |
def execute(self, cmd, fname, codes=[0, None]):
'''
Execute a command against the specified file.
@cmd - Command to execute.
@fname - File to run command against.
@codes - List of return codes indicating cmd success.
Returns True on success, False on failure, or None if the external extraction utility could not be found.
'''
tmp = None
rval = 0
retval = True
command_list = []
binwalk.core.common.debug("Running extractor '%s'" % str(cmd))
try:
if callable(cmd):
command_list.append(get_class_name_from_method(cmd))
try:
retval = cmd(fname)
except KeyboardInterrupt as e:
raise e
except Exception as e:
binwalk.core.common.warning("Internal extractor '%s' failed with exception: '%s'" % (str(cmd), str(e)))
elif cmd:
# If not in debug mode, create a temporary file to redirect
# stdout and stderr to
if not binwalk.core.common.DEBUG:
tmp = tempfile.TemporaryFile()
# Generate unique file paths for all paths in the current
# command that are surrounded by UNIQUE_PATH_DELIMITER
while self.UNIQUE_PATH_DELIMITER in cmd:
need_unique_path = cmd.split(self.UNIQUE_PATH_DELIMITER)[
1].split(self.UNIQUE_PATH_DELIMITER)[0]
unique_path = binwalk.core.common.unique_file_name(need_unique_path)
cmd = cmd.replace(self.UNIQUE_PATH_DELIMITER + need_unique_path + self.UNIQUE_PATH_DELIMITER, unique_path)
# Execute.
for command in cmd.split("&&"):
# Replace all instances of FILE_NAME_PLACEHOLDER in the
# command with fname
command = command.strip().replace(self.FILE_NAME_PLACEHOLDER, fname)
binwalk.core.common.debug("subprocess.call(%s, stdout=%s, stderr=%s)" % (command, str(tmp), str(tmp)))
rval = subprocess.call(shlex.split(command), stdout=tmp, stderr=tmp)
if rval in codes:
retval = True
else:
retval = False
binwalk.core.common.debug('External extractor command "%s" completed with return code %d (success: %s)' % (cmd, rval, str(retval)))
command_list.append(command)
# TODO: Should errors from all commands in a command string be checked? Currently we only support
# specifying one set of error codes, so at the moment, this is not done; it is up to the
# final command to return success or failure (which presumably it will if previous necessary
# commands were not successful, but this is an assumption).
# if retval == False:
# break
except KeyboardInterrupt as e:
raise e
except Exception as e:
binwalk.core.common.warning("Extractor.execute failed to run external extractor '%s': %s, '%s' might not be installed correctly" % (str(cmd), str(e), str(cmd)))
retval = None
if tmp is not None:
tmp.close()
return (retval, '&&'.join(command_list)) | def function[execute, parameter[self, cmd, fname, codes]]:
constant[
Execute a command against the specified file.
@cmd - Command to execute.
@fname - File to run command against.
@codes - List of return codes indicating cmd success.
Returns True on success, False on failure, or None if the external extraction utility could not be found.
]
variable[tmp] assign[=] constant[None]
variable[rval] assign[=] constant[0]
variable[retval] assign[=] constant[True]
variable[command_list] assign[=] list[[]]
call[name[binwalk].core.common.debug, parameter[binary_operation[constant[Running extractor '%s'] <ast.Mod object at 0x7da2590d6920> call[name[str], parameter[name[cmd]]]]]]
<ast.Try object at 0x7da1b1c89ed0>
if compare[name[tmp] is_not constant[None]] begin[:]
call[name[tmp].close, parameter[]]
return[tuple[[<ast.Name object at 0x7da1b1c89300>, <ast.Call object at 0x7da1b1c89c30>]]] | keyword[def] identifier[execute] ( identifier[self] , identifier[cmd] , identifier[fname] , identifier[codes] =[ literal[int] , keyword[None] ]):
literal[string]
identifier[tmp] = keyword[None]
identifier[rval] = literal[int]
identifier[retval] = keyword[True]
identifier[command_list] =[]
identifier[binwalk] . identifier[core] . identifier[common] . identifier[debug] ( literal[string] % identifier[str] ( identifier[cmd] ))
keyword[try] :
keyword[if] identifier[callable] ( identifier[cmd] ):
identifier[command_list] . identifier[append] ( identifier[get_class_name_from_method] ( identifier[cmd] ))
keyword[try] :
identifier[retval] = identifier[cmd] ( identifier[fname] )
keyword[except] identifier[KeyboardInterrupt] keyword[as] identifier[e] :
keyword[raise] identifier[e]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[binwalk] . identifier[core] . identifier[common] . identifier[warning] ( literal[string] %( identifier[str] ( identifier[cmd] ), identifier[str] ( identifier[e] )))
keyword[elif] identifier[cmd] :
keyword[if] keyword[not] identifier[binwalk] . identifier[core] . identifier[common] . identifier[DEBUG] :
identifier[tmp] = identifier[tempfile] . identifier[TemporaryFile] ()
keyword[while] identifier[self] . identifier[UNIQUE_PATH_DELIMITER] keyword[in] identifier[cmd] :
identifier[need_unique_path] = identifier[cmd] . identifier[split] ( identifier[self] . identifier[UNIQUE_PATH_DELIMITER] )[
literal[int] ]. identifier[split] ( identifier[self] . identifier[UNIQUE_PATH_DELIMITER] )[ literal[int] ]
identifier[unique_path] = identifier[binwalk] . identifier[core] . identifier[common] . identifier[unique_file_name] ( identifier[need_unique_path] )
identifier[cmd] = identifier[cmd] . identifier[replace] ( identifier[self] . identifier[UNIQUE_PATH_DELIMITER] + identifier[need_unique_path] + identifier[self] . identifier[UNIQUE_PATH_DELIMITER] , identifier[unique_path] )
keyword[for] identifier[command] keyword[in] identifier[cmd] . identifier[split] ( literal[string] ):
identifier[command] = identifier[command] . identifier[strip] (). identifier[replace] ( identifier[self] . identifier[FILE_NAME_PLACEHOLDER] , identifier[fname] )
identifier[binwalk] . identifier[core] . identifier[common] . identifier[debug] ( literal[string] %( identifier[command] , identifier[str] ( identifier[tmp] ), identifier[str] ( identifier[tmp] )))
identifier[rval] = identifier[subprocess] . identifier[call] ( identifier[shlex] . identifier[split] ( identifier[command] ), identifier[stdout] = identifier[tmp] , identifier[stderr] = identifier[tmp] )
keyword[if] identifier[rval] keyword[in] identifier[codes] :
identifier[retval] = keyword[True]
keyword[else] :
identifier[retval] = keyword[False]
identifier[binwalk] . identifier[core] . identifier[common] . identifier[debug] ( literal[string] %( identifier[cmd] , identifier[rval] , identifier[str] ( identifier[retval] )))
identifier[command_list] . identifier[append] ( identifier[command] )
keyword[except] identifier[KeyboardInterrupt] keyword[as] identifier[e] :
keyword[raise] identifier[e]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[binwalk] . identifier[core] . identifier[common] . identifier[warning] ( literal[string] %( identifier[str] ( identifier[cmd] ), identifier[str] ( identifier[e] ), identifier[str] ( identifier[cmd] )))
identifier[retval] = keyword[None]
keyword[if] identifier[tmp] keyword[is] keyword[not] keyword[None] :
identifier[tmp] . identifier[close] ()
keyword[return] ( identifier[retval] , literal[string] . identifier[join] ( identifier[command_list] )) | def execute(self, cmd, fname, codes=[0, None]):
"""
Execute a command against the specified file.
@cmd - Command to execute.
@fname - File to run command against.
@codes - List of return codes indicating cmd success.
Returns True on success, False on failure, or None if the external extraction utility could not be found.
"""
tmp = None
rval = 0
retval = True
command_list = []
binwalk.core.common.debug("Running extractor '%s'" % str(cmd))
try:
if callable(cmd):
command_list.append(get_class_name_from_method(cmd))
try:
retval = cmd(fname) # depends on [control=['try'], data=[]]
except KeyboardInterrupt as e:
raise e # depends on [control=['except'], data=['e']]
except Exception as e:
binwalk.core.common.warning("Internal extractor '%s' failed with exception: '%s'" % (str(cmd), str(e))) # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=[]]
elif cmd:
# If not in debug mode, create a temporary file to redirect
# stdout and stderr to
if not binwalk.core.common.DEBUG:
tmp = tempfile.TemporaryFile() # depends on [control=['if'], data=[]]
# Generate unique file paths for all paths in the current
# command that are surrounded by UNIQUE_PATH_DELIMITER
while self.UNIQUE_PATH_DELIMITER in cmd:
need_unique_path = cmd.split(self.UNIQUE_PATH_DELIMITER)[1].split(self.UNIQUE_PATH_DELIMITER)[0]
unique_path = binwalk.core.common.unique_file_name(need_unique_path)
cmd = cmd.replace(self.UNIQUE_PATH_DELIMITER + need_unique_path + self.UNIQUE_PATH_DELIMITER, unique_path) # depends on [control=['while'], data=['cmd']]
# Execute.
for command in cmd.split('&&'):
# Replace all instances of FILE_NAME_PLACEHOLDER in the
# command with fname
command = command.strip().replace(self.FILE_NAME_PLACEHOLDER, fname)
binwalk.core.common.debug('subprocess.call(%s, stdout=%s, stderr=%s)' % (command, str(tmp), str(tmp)))
rval = subprocess.call(shlex.split(command), stdout=tmp, stderr=tmp)
if rval in codes:
retval = True # depends on [control=['if'], data=[]]
else:
retval = False
binwalk.core.common.debug('External extractor command "%s" completed with return code %d (success: %s)' % (cmd, rval, str(retval)))
command_list.append(command) # depends on [control=['for'], data=['command']] # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
# TODO: Should errors from all commands in a command string be checked? Currently we only support
# specifying one set of error codes, so at the moment, this is not done; it is up to the
# final command to return success or failure (which presumably it will if previous necessary
# commands were not successful, but this is an assumption).
# if retval == False:
# break
except KeyboardInterrupt as e:
raise e # depends on [control=['except'], data=['e']]
except Exception as e:
binwalk.core.common.warning("Extractor.execute failed to run external extractor '%s': %s, '%s' might not be installed correctly" % (str(cmd), str(e), str(cmd)))
retval = None # depends on [control=['except'], data=['e']]
if tmp is not None:
tmp.close() # depends on [control=['if'], data=['tmp']]
return (retval, '&&'.join(command_list)) |
def trunc_normal_(x:Tensor, mean:float=0., std:float=1.) -> Tensor:
"Truncated normal initialization."
# From https://discuss.pytorch.org/t/implementing-truncated-normal-initializer/4778/12
return x.normal_().fmod_(2).mul_(std).add_(mean) | def function[trunc_normal_, parameter[x, mean, std]]:
constant[Truncated normal initialization.]
return[call[call[call[call[name[x].normal_, parameter[]].fmod_, parameter[constant[2]]].mul_, parameter[name[std]]].add_, parameter[name[mean]]]] | keyword[def] identifier[trunc_normal_] ( identifier[x] : identifier[Tensor] , identifier[mean] : identifier[float] = literal[int] , identifier[std] : identifier[float] = literal[int] )-> identifier[Tensor] :
literal[string]
keyword[return] identifier[x] . identifier[normal_] (). identifier[fmod_] ( literal[int] ). identifier[mul_] ( identifier[std] ). identifier[add_] ( identifier[mean] ) | def trunc_normal_(x: Tensor, mean: float=0.0, std: float=1.0) -> Tensor:
"""Truncated normal initialization."""
# From https://discuss.pytorch.org/t/implementing-truncated-normal-initializer/4778/12
return x.normal_().fmod_(2).mul_(std).add_(mean) |
def merge_config(
config: Mapping[str, Any],
override_config: Mapping[str, Any] = None,
override_config_fn: str = None,
) -> Mapping[str, Any]:
"""Override config with additional configuration in override_config or override_config_fn
Used in script to merge CLI options with Config
Args:
config: original configuration
override_config: new configuration to override/extend current config
override_config_fn: new configuration filename as YAML file
"""
if override_config_fn:
with open(override_config_fn, "r") as f:
override_config = yaml.load(f, Loader=yaml.SafeLoader)
if not override_config:
log.info("Missing override_config")
return functools.reduce(rec_merge, (config, override_config)) | def function[merge_config, parameter[config, override_config, override_config_fn]]:
constant[Override config with additional configuration in override_config or override_config_fn
Used in script to merge CLI options with Config
Args:
config: original configuration
override_config: new configuration to override/extend current config
override_config_fn: new configuration filename as YAML file
]
if name[override_config_fn] begin[:]
with call[name[open], parameter[name[override_config_fn], constant[r]]] begin[:]
variable[override_config] assign[=] call[name[yaml].load, parameter[name[f]]]
if <ast.UnaryOp object at 0x7da1b18752d0> begin[:]
call[name[log].info, parameter[constant[Missing override_config]]]
return[call[name[functools].reduce, parameter[name[rec_merge], tuple[[<ast.Name object at 0x7da1b18777f0>, <ast.Name object at 0x7da1b18746d0>]]]]] | keyword[def] identifier[merge_config] (
identifier[config] : identifier[Mapping] [ identifier[str] , identifier[Any] ],
identifier[override_config] : identifier[Mapping] [ identifier[str] , identifier[Any] ]= keyword[None] ,
identifier[override_config_fn] : identifier[str] = keyword[None] ,
)-> identifier[Mapping] [ identifier[str] , identifier[Any] ]:
literal[string]
keyword[if] identifier[override_config_fn] :
keyword[with] identifier[open] ( identifier[override_config_fn] , literal[string] ) keyword[as] identifier[f] :
identifier[override_config] = identifier[yaml] . identifier[load] ( identifier[f] , identifier[Loader] = identifier[yaml] . identifier[SafeLoader] )
keyword[if] keyword[not] identifier[override_config] :
identifier[log] . identifier[info] ( literal[string] )
keyword[return] identifier[functools] . identifier[reduce] ( identifier[rec_merge] ,( identifier[config] , identifier[override_config] )) | def merge_config(config: Mapping[str, Any], override_config: Mapping[str, Any]=None, override_config_fn: str=None) -> Mapping[str, Any]:
"""Override config with additional configuration in override_config or override_config_fn
Used in script to merge CLI options with Config
Args:
config: original configuration
override_config: new configuration to override/extend current config
override_config_fn: new configuration filename as YAML file
"""
if override_config_fn:
with open(override_config_fn, 'r') as f:
override_config = yaml.load(f, Loader=yaml.SafeLoader) # depends on [control=['with'], data=['f']] # depends on [control=['if'], data=[]]
if not override_config:
log.info('Missing override_config') # depends on [control=['if'], data=[]]
return functools.reduce(rec_merge, (config, override_config)) |
def format_datetime(self, time_input, tz=None, date_format=None):
""" Return timestamp from multiple input formats.
Formats:
#. Human Input (e.g 30 days ago, last friday)
#. ISO 8601 (e.g. 2017-11-08T16:52:42Z)
#. Loose Date format (e.g. 2017 12 25)
#. Unix Time/Posix Time/Epoch Time (e.g. 1510686617 or 1510686617.298753)
.. note:: To get a unix timestamp format use the strftime format **%s**. Python
does not natively support **%s**, however this method has support.
Args:
time_input (string): The time input string (see formats above).
tz (string): The time zone for the returned data.
date_format (string): The strftime format to use, ISO by default.
Returns:
(string): Formatted datetime string.
"""
# handle timestamp (e.g. 1510686617 or 1510686617.298753)
dt_value = self.any_to_datetime(time_input, tz)
# format date
if date_format == '%s':
dt_value = calendar.timegm(dt_value.timetuple())
elif date_format:
dt_value = dt_value.strftime(date_format)
else:
dt_value = dt_value.isoformat()
return dt_value | def function[format_datetime, parameter[self, time_input, tz, date_format]]:
constant[ Return timestamp from multiple input formats.
Formats:
#. Human Input (e.g 30 days ago, last friday)
#. ISO 8601 (e.g. 2017-11-08T16:52:42Z)
#. Loose Date format (e.g. 2017 12 25)
#. Unix Time/Posix Time/Epoch Time (e.g. 1510686617 or 1510686617.298753)
.. note:: To get a unix timestamp format use the strftime format **%s**. Python
does not natively support **%s**, however this method has support.
Args:
time_input (string): The time input string (see formats above).
tz (string): The time zone for the returned data.
date_format (string): The strftime format to use, ISO by default.
Returns:
(string): Formatted datetime string.
]
variable[dt_value] assign[=] call[name[self].any_to_datetime, parameter[name[time_input], name[tz]]]
if compare[name[date_format] equal[==] constant[%s]] begin[:]
variable[dt_value] assign[=] call[name[calendar].timegm, parameter[call[name[dt_value].timetuple, parameter[]]]]
return[name[dt_value]] | keyword[def] identifier[format_datetime] ( identifier[self] , identifier[time_input] , identifier[tz] = keyword[None] , identifier[date_format] = keyword[None] ):
literal[string]
identifier[dt_value] = identifier[self] . identifier[any_to_datetime] ( identifier[time_input] , identifier[tz] )
keyword[if] identifier[date_format] == literal[string] :
identifier[dt_value] = identifier[calendar] . identifier[timegm] ( identifier[dt_value] . identifier[timetuple] ())
keyword[elif] identifier[date_format] :
identifier[dt_value] = identifier[dt_value] . identifier[strftime] ( identifier[date_format] )
keyword[else] :
identifier[dt_value] = identifier[dt_value] . identifier[isoformat] ()
keyword[return] identifier[dt_value] | def format_datetime(self, time_input, tz=None, date_format=None):
""" Return timestamp from multiple input formats.
Formats:
#. Human Input (e.g 30 days ago, last friday)
#. ISO 8601 (e.g. 2017-11-08T16:52:42Z)
#. Loose Date format (e.g. 2017 12 25)
#. Unix Time/Posix Time/Epoch Time (e.g. 1510686617 or 1510686617.298753)
.. note:: To get a unix timestamp format use the strftime format **%s**. Python
does not natively support **%s**, however this method has support.
Args:
time_input (string): The time input string (see formats above).
tz (string): The time zone for the returned data.
date_format (string): The strftime format to use, ISO by default.
Returns:
(string): Formatted datetime string.
"""
# handle timestamp (e.g. 1510686617 or 1510686617.298753)
dt_value = self.any_to_datetime(time_input, tz)
# format date
if date_format == '%s':
dt_value = calendar.timegm(dt_value.timetuple()) # depends on [control=['if'], data=[]]
elif date_format:
dt_value = dt_value.strftime(date_format) # depends on [control=['if'], data=[]]
else:
dt_value = dt_value.isoformat()
return dt_value |
def delete(self, table):
"""Deletes record in table
>>> yql.delete('yql.storage').where(['name','=','store://YEl70PraLLMSMuYAauqNc7'])
"""
self._table = table
self._limit = None
self._query = "DELETE FROM {0}".format(self._table)
return self | def function[delete, parameter[self, table]]:
constant[Deletes record in table
>>> yql.delete('yql.storage').where(['name','=','store://YEl70PraLLMSMuYAauqNc7'])
]
name[self]._table assign[=] name[table]
name[self]._limit assign[=] constant[None]
name[self]._query assign[=] call[constant[DELETE FROM {0}].format, parameter[name[self]._table]]
return[name[self]] | keyword[def] identifier[delete] ( identifier[self] , identifier[table] ):
literal[string]
identifier[self] . identifier[_table] = identifier[table]
identifier[self] . identifier[_limit] = keyword[None]
identifier[self] . identifier[_query] = literal[string] . identifier[format] ( identifier[self] . identifier[_table] )
keyword[return] identifier[self] | def delete(self, table):
"""Deletes record in table
>>> yql.delete('yql.storage').where(['name','=','store://YEl70PraLLMSMuYAauqNc7'])
"""
self._table = table
self._limit = None
self._query = 'DELETE FROM {0}'.format(self._table)
return self |
def for_json(self):
"""Return date ISO8601 string formats for datetime, date, and time values, milliseconds for intervals"""
value = super(DatetimeField, self).for_json()
# Order of instance checks matters for proper inheritance checks
if isinstance(value, pendulum.Interval):
return value.in_seconds() * 1000
if isinstance(value, datetime):
return self.format_datetime(value)
if isinstance(value, pendulum.Time):
return str(value)
if isinstance(value, pendulum.Date):
return value.to_date_string() | def function[for_json, parameter[self]]:
constant[Return date ISO8601 string formats for datetime, date, and time values, milliseconds for intervals]
variable[value] assign[=] call[call[name[super], parameter[name[DatetimeField], name[self]]].for_json, parameter[]]
if call[name[isinstance], parameter[name[value], name[pendulum].Interval]] begin[:]
return[binary_operation[call[name[value].in_seconds, parameter[]] * constant[1000]]]
if call[name[isinstance], parameter[name[value], name[datetime]]] begin[:]
return[call[name[self].format_datetime, parameter[name[value]]]]
if call[name[isinstance], parameter[name[value], name[pendulum].Time]] begin[:]
return[call[name[str], parameter[name[value]]]]
if call[name[isinstance], parameter[name[value], name[pendulum].Date]] begin[:]
return[call[name[value].to_date_string, parameter[]]] | keyword[def] identifier[for_json] ( identifier[self] ):
literal[string]
identifier[value] = identifier[super] ( identifier[DatetimeField] , identifier[self] ). identifier[for_json] ()
keyword[if] identifier[isinstance] ( identifier[value] , identifier[pendulum] . identifier[Interval] ):
keyword[return] identifier[value] . identifier[in_seconds] ()* literal[int]
keyword[if] identifier[isinstance] ( identifier[value] , identifier[datetime] ):
keyword[return] identifier[self] . identifier[format_datetime] ( identifier[value] )
keyword[if] identifier[isinstance] ( identifier[value] , identifier[pendulum] . identifier[Time] ):
keyword[return] identifier[str] ( identifier[value] )
keyword[if] identifier[isinstance] ( identifier[value] , identifier[pendulum] . identifier[Date] ):
keyword[return] identifier[value] . identifier[to_date_string] () | def for_json(self):
"""Return date ISO8601 string formats for datetime, date, and time values, milliseconds for intervals"""
value = super(DatetimeField, self).for_json()
# Order of instance checks matters for proper inheritance checks
if isinstance(value, pendulum.Interval):
return value.in_seconds() * 1000 # depends on [control=['if'], data=[]]
if isinstance(value, datetime):
return self.format_datetime(value) # depends on [control=['if'], data=[]]
if isinstance(value, pendulum.Time):
return str(value) # depends on [control=['if'], data=[]]
if isinstance(value, pendulum.Date):
return value.to_date_string() # depends on [control=['if'], data=[]] |
def error_uns(self):
"""Check if package supported by arch
before proceed to install
"""
self.FAULT = ""
UNST = ["UNSUPPORTED", "UNTESTED"]
if "".join(self.source_dwn) in UNST:
self.FAULT = "".join(self.source_dwn) | def function[error_uns, parameter[self]]:
constant[Check if package supported by arch
before proceed to install
]
name[self].FAULT assign[=] constant[]
variable[UNST] assign[=] list[[<ast.Constant object at 0x7da204961900>, <ast.Constant object at 0x7da2049628f0>]]
if compare[call[constant[].join, parameter[name[self].source_dwn]] in name[UNST]] begin[:]
name[self].FAULT assign[=] call[constant[].join, parameter[name[self].source_dwn]] | keyword[def] identifier[error_uns] ( identifier[self] ):
literal[string]
identifier[self] . identifier[FAULT] = literal[string]
identifier[UNST] =[ literal[string] , literal[string] ]
keyword[if] literal[string] . identifier[join] ( identifier[self] . identifier[source_dwn] ) keyword[in] identifier[UNST] :
identifier[self] . identifier[FAULT] = literal[string] . identifier[join] ( identifier[self] . identifier[source_dwn] ) | def error_uns(self):
"""Check if package supported by arch
before proceed to install
"""
self.FAULT = ''
UNST = ['UNSUPPORTED', 'UNTESTED']
if ''.join(self.source_dwn) in UNST:
self.FAULT = ''.join(self.source_dwn) # depends on [control=['if'], data=[]] |
def get_statistics(prefix=''):
"""
Get statistics for message codes that start with the prefix.
prefix='' matches all errors and warnings
prefix='E' matches all errors
prefix='W' matches all warnings
prefix='E4' matches all errors that have to do with imports
"""
stats = []
keys = options.messages.keys()
keys.sort()
for key in keys:
if key.startswith(prefix):
stats.append('%-7s %s %s' %
(options.counters[key], key, options.messages[key]))
return stats | def function[get_statistics, parameter[prefix]]:
constant[
Get statistics for message codes that start with the prefix.
prefix='' matches all errors and warnings
prefix='E' matches all errors
prefix='W' matches all warnings
prefix='E4' matches all errors that have to do with imports
]
variable[stats] assign[=] list[[]]
variable[keys] assign[=] call[name[options].messages.keys, parameter[]]
call[name[keys].sort, parameter[]]
for taget[name[key]] in starred[name[keys]] begin[:]
if call[name[key].startswith, parameter[name[prefix]]] begin[:]
call[name[stats].append, parameter[binary_operation[constant[%-7s %s %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b09ec280>, <ast.Name object at 0x7da1b09edf00>, <ast.Subscript object at 0x7da1b09efbe0>]]]]]
return[name[stats]] | keyword[def] identifier[get_statistics] ( identifier[prefix] = literal[string] ):
literal[string]
identifier[stats] =[]
identifier[keys] = identifier[options] . identifier[messages] . identifier[keys] ()
identifier[keys] . identifier[sort] ()
keyword[for] identifier[key] keyword[in] identifier[keys] :
keyword[if] identifier[key] . identifier[startswith] ( identifier[prefix] ):
identifier[stats] . identifier[append] ( literal[string] %
( identifier[options] . identifier[counters] [ identifier[key] ], identifier[key] , identifier[options] . identifier[messages] [ identifier[key] ]))
keyword[return] identifier[stats] | def get_statistics(prefix=''):
"""
Get statistics for message codes that start with the prefix.
prefix='' matches all errors and warnings
prefix='E' matches all errors
prefix='W' matches all warnings
prefix='E4' matches all errors that have to do with imports
"""
stats = []
keys = options.messages.keys()
keys.sort()
for key in keys:
if key.startswith(prefix):
stats.append('%-7s %s %s' % (options.counters[key], key, options.messages[key])) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']]
return stats |
def search_texts(args, parser):
"""Searches texts for presence of n-grams."""
store = utils.get_data_store(args)
corpus = utils.get_corpus(args)
catalogue = utils.get_catalogue(args)
store.validate(corpus, catalogue)
ngrams = []
for ngram_file in args.ngrams:
ngrams.extend(utils.get_ngrams(ngram_file))
store.search(catalogue, ngrams, sys.stdout) | def function[search_texts, parameter[args, parser]]:
constant[Searches texts for presence of n-grams.]
variable[store] assign[=] call[name[utils].get_data_store, parameter[name[args]]]
variable[corpus] assign[=] call[name[utils].get_corpus, parameter[name[args]]]
variable[catalogue] assign[=] call[name[utils].get_catalogue, parameter[name[args]]]
call[name[store].validate, parameter[name[corpus], name[catalogue]]]
variable[ngrams] assign[=] list[[]]
for taget[name[ngram_file]] in starred[name[args].ngrams] begin[:]
call[name[ngrams].extend, parameter[call[name[utils].get_ngrams, parameter[name[ngram_file]]]]]
call[name[store].search, parameter[name[catalogue], name[ngrams], name[sys].stdout]] | keyword[def] identifier[search_texts] ( identifier[args] , identifier[parser] ):
literal[string]
identifier[store] = identifier[utils] . identifier[get_data_store] ( identifier[args] )
identifier[corpus] = identifier[utils] . identifier[get_corpus] ( identifier[args] )
identifier[catalogue] = identifier[utils] . identifier[get_catalogue] ( identifier[args] )
identifier[store] . identifier[validate] ( identifier[corpus] , identifier[catalogue] )
identifier[ngrams] =[]
keyword[for] identifier[ngram_file] keyword[in] identifier[args] . identifier[ngrams] :
identifier[ngrams] . identifier[extend] ( identifier[utils] . identifier[get_ngrams] ( identifier[ngram_file] ))
identifier[store] . identifier[search] ( identifier[catalogue] , identifier[ngrams] , identifier[sys] . identifier[stdout] ) | def search_texts(args, parser):
"""Searches texts for presence of n-grams."""
store = utils.get_data_store(args)
corpus = utils.get_corpus(args)
catalogue = utils.get_catalogue(args)
store.validate(corpus, catalogue)
ngrams = []
for ngram_file in args.ngrams:
ngrams.extend(utils.get_ngrams(ngram_file)) # depends on [control=['for'], data=['ngram_file']]
store.search(catalogue, ngrams, sys.stdout) |
def author_name(author):
"""
get name of author, or else username.
It'll try to find an email in the author string and just cut it off
to get the username
"""
if not '@' in author:
return author
else:
return author.replace(author_email(author), '').replace('<', '')\
.replace('>', '').strip() | def function[author_name, parameter[author]]:
constant[
get name of author, or else username.
It'll try to find an email in the author string and just cut it off
to get the username
]
if <ast.UnaryOp object at 0x7da1b264a9e0> begin[:]
return[name[author]] | keyword[def] identifier[author_name] ( identifier[author] ):
literal[string]
keyword[if] keyword[not] literal[string] keyword[in] identifier[author] :
keyword[return] identifier[author]
keyword[else] :
keyword[return] identifier[author] . identifier[replace] ( identifier[author_email] ( identifier[author] ), literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[strip] () | def author_name(author):
"""
get name of author, or else username.
It'll try to find an email in the author string and just cut it off
to get the username
"""
if not '@' in author:
return author # depends on [control=['if'], data=[]]
else:
return author.replace(author_email(author), '').replace('<', '').replace('>', '').strip() |
def _annotate_query(query, generate_dict):
"""Add annotations to the query to retrieve values required by field value generate
functions."""
annotate_key_list = []
for field_name, annotate_dict in generate_dict.items():
for annotate_name, annotate_func in annotate_dict["annotate_dict"].items():
query = annotate_func(query)
annotate_key_list.append(annotate_name)
return query, annotate_key_list | def function[_annotate_query, parameter[query, generate_dict]]:
constant[Add annotations to the query to retrieve values required by field value generate
functions.]
variable[annotate_key_list] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b190b3d0>, <ast.Name object at 0x7da1b19088b0>]]] in starred[call[name[generate_dict].items, parameter[]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b1908310>, <ast.Name object at 0x7da1b1908df0>]]] in starred[call[call[name[annotate_dict]][constant[annotate_dict]].items, parameter[]]] begin[:]
variable[query] assign[=] call[name[annotate_func], parameter[name[query]]]
call[name[annotate_key_list].append, parameter[name[annotate_name]]]
return[tuple[[<ast.Name object at 0x7da1b190bc70>, <ast.Name object at 0x7da1b1908550>]]] | keyword[def] identifier[_annotate_query] ( identifier[query] , identifier[generate_dict] ):
literal[string]
identifier[annotate_key_list] =[]
keyword[for] identifier[field_name] , identifier[annotate_dict] keyword[in] identifier[generate_dict] . identifier[items] ():
keyword[for] identifier[annotate_name] , identifier[annotate_func] keyword[in] identifier[annotate_dict] [ literal[string] ]. identifier[items] ():
identifier[query] = identifier[annotate_func] ( identifier[query] )
identifier[annotate_key_list] . identifier[append] ( identifier[annotate_name] )
keyword[return] identifier[query] , identifier[annotate_key_list] | def _annotate_query(query, generate_dict):
"""Add annotations to the query to retrieve values required by field value generate
functions."""
annotate_key_list = []
for (field_name, annotate_dict) in generate_dict.items():
for (annotate_name, annotate_func) in annotate_dict['annotate_dict'].items():
query = annotate_func(query)
annotate_key_list.append(annotate_name) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
return (query, annotate_key_list) |
def findItems( self, cls ):
"""
Looks up the items in the scene that inherit from the inputed class.
:param cls | <type>
"""
return filter(lambda x: isinstance(x, cls), self.items()) | def function[findItems, parameter[self, cls]]:
constant[
Looks up the items in the scene that inherit from the inputed class.
:param cls | <type>
]
return[call[name[filter], parameter[<ast.Lambda object at 0x7da18f58ea10>, call[name[self].items, parameter[]]]]] | keyword[def] identifier[findItems] ( identifier[self] , identifier[cls] ):
literal[string]
keyword[return] identifier[filter] ( keyword[lambda] identifier[x] : identifier[isinstance] ( identifier[x] , identifier[cls] ), identifier[self] . identifier[items] ()) | def findItems(self, cls):
"""
Looks up the items in the scene that inherit from the inputed class.
:param cls | <type>
"""
return filter(lambda x: isinstance(x, cls), self.items()) |
def load_app(config, **kwargs):
'''
Used to load a ``Pecan`` application and its environment based on passed
configuration.
:param config: Can be a dictionary containing configuration, a string which
represents a (relative) configuration filename
returns a pecan.Pecan object
'''
from .configuration import _runtime_conf, set_config
set_config(config, overwrite=True)
for package_name in getattr(_runtime_conf.app, 'modules', []):
module = __import__(package_name, fromlist=['app'])
if hasattr(module, 'app') and hasattr(module.app, 'setup_app'):
app = module.app.setup_app(_runtime_conf, **kwargs)
app.config = _runtime_conf
return app
raise RuntimeError(
'No app.setup_app found in any of the configured app.modules'
) | def function[load_app, parameter[config]]:
constant[
Used to load a ``Pecan`` application and its environment based on passed
configuration.
:param config: Can be a dictionary containing configuration, a string which
represents a (relative) configuration filename
returns a pecan.Pecan object
]
from relative_module[configuration] import module[_runtime_conf], module[set_config]
call[name[set_config], parameter[name[config]]]
for taget[name[package_name]] in starred[call[name[getattr], parameter[name[_runtime_conf].app, constant[modules], list[[]]]]] begin[:]
variable[module] assign[=] call[name[__import__], parameter[name[package_name]]]
if <ast.BoolOp object at 0x7da1b0d77430> begin[:]
variable[app] assign[=] call[name[module].app.setup_app, parameter[name[_runtime_conf]]]
name[app].config assign[=] name[_runtime_conf]
return[name[app]]
<ast.Raise object at 0x7da1b0d771f0> | keyword[def] identifier[load_app] ( identifier[config] ,** identifier[kwargs] ):
literal[string]
keyword[from] . identifier[configuration] keyword[import] identifier[_runtime_conf] , identifier[set_config]
identifier[set_config] ( identifier[config] , identifier[overwrite] = keyword[True] )
keyword[for] identifier[package_name] keyword[in] identifier[getattr] ( identifier[_runtime_conf] . identifier[app] , literal[string] ,[]):
identifier[module] = identifier[__import__] ( identifier[package_name] , identifier[fromlist] =[ literal[string] ])
keyword[if] identifier[hasattr] ( identifier[module] , literal[string] ) keyword[and] identifier[hasattr] ( identifier[module] . identifier[app] , literal[string] ):
identifier[app] = identifier[module] . identifier[app] . identifier[setup_app] ( identifier[_runtime_conf] ,** identifier[kwargs] )
identifier[app] . identifier[config] = identifier[_runtime_conf]
keyword[return] identifier[app]
keyword[raise] identifier[RuntimeError] (
literal[string]
) | def load_app(config, **kwargs):
"""
Used to load a ``Pecan`` application and its environment based on passed
configuration.
:param config: Can be a dictionary containing configuration, a string which
represents a (relative) configuration filename
returns a pecan.Pecan object
"""
from .configuration import _runtime_conf, set_config
set_config(config, overwrite=True)
for package_name in getattr(_runtime_conf.app, 'modules', []):
module = __import__(package_name, fromlist=['app'])
if hasattr(module, 'app') and hasattr(module.app, 'setup_app'):
app = module.app.setup_app(_runtime_conf, **kwargs)
app.config = _runtime_conf
return app # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['package_name']]
raise RuntimeError('No app.setup_app found in any of the configured app.modules') |
def stop(self) -> None:
"""Stops the timer."""
self._running = False
if self._timeout is not None:
self.io_loop.remove_timeout(self._timeout)
self._timeout = None | def function[stop, parameter[self]]:
constant[Stops the timer.]
name[self]._running assign[=] constant[False]
if compare[name[self]._timeout is_not constant[None]] begin[:]
call[name[self].io_loop.remove_timeout, parameter[name[self]._timeout]]
name[self]._timeout assign[=] constant[None] | keyword[def] identifier[stop] ( identifier[self] )-> keyword[None] :
literal[string]
identifier[self] . identifier[_running] = keyword[False]
keyword[if] identifier[self] . identifier[_timeout] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[io_loop] . identifier[remove_timeout] ( identifier[self] . identifier[_timeout] )
identifier[self] . identifier[_timeout] = keyword[None] | def stop(self) -> None:
"""Stops the timer."""
self._running = False
if self._timeout is not None:
self.io_loop.remove_timeout(self._timeout)
self._timeout = None # depends on [control=['if'], data=[]] |
def small_abc_image_recognition():
"""!
@brief Trains network using letters 'A', 'B', 'C', and recognize each of them with and without noise.
"""
images = [];
images += IMAGE_SYMBOL_SAMPLES.LIST_IMAGES_SYMBOL_A;
images += IMAGE_SYMBOL_SAMPLES.LIST_IMAGES_SYMBOL_B;
images += IMAGE_SYMBOL_SAMPLES.LIST_IMAGES_SYMBOL_C;
template_recognition_image(images, 250, 25); | def function[small_abc_image_recognition, parameter[]]:
constant[!
@brief Trains network using letters 'A', 'B', 'C', and recognize each of them with and without noise.
]
variable[images] assign[=] list[[]]
<ast.AugAssign object at 0x7da18dc98370>
<ast.AugAssign object at 0x7da18dc9b250>
<ast.AugAssign object at 0x7da18dc9a4a0>
call[name[template_recognition_image], parameter[name[images], constant[250], constant[25]]] | keyword[def] identifier[small_abc_image_recognition] ():
literal[string]
identifier[images] =[];
identifier[images] += identifier[IMAGE_SYMBOL_SAMPLES] . identifier[LIST_IMAGES_SYMBOL_A] ;
identifier[images] += identifier[IMAGE_SYMBOL_SAMPLES] . identifier[LIST_IMAGES_SYMBOL_B] ;
identifier[images] += identifier[IMAGE_SYMBOL_SAMPLES] . identifier[LIST_IMAGES_SYMBOL_C] ;
identifier[template_recognition_image] ( identifier[images] , literal[int] , literal[int] ); | def small_abc_image_recognition():
"""!
@brief Trains network using letters 'A', 'B', 'C', and recognize each of them with and without noise.
"""
images = []
images += IMAGE_SYMBOL_SAMPLES.LIST_IMAGES_SYMBOL_A
images += IMAGE_SYMBOL_SAMPLES.LIST_IMAGES_SYMBOL_B
images += IMAGE_SYMBOL_SAMPLES.LIST_IMAGES_SYMBOL_C
template_recognition_image(images, 250, 25) |
def get_public_ip_validator():
""" Retrieves a validator for public IP address. Accepting all defaults will perform a check
for an existing name or ID with no ARM-required -type parameter. """
from msrestazure.tools import is_valid_resource_id, resource_id
def simple_validator(cmd, namespace):
if namespace.public_ip_address:
is_list = isinstance(namespace.public_ip_address, list)
def _validate_name_or_id(public_ip):
# determine if public_ip_address is name or ID
is_id = is_valid_resource_id(public_ip)
return public_ip if is_id else resource_id(
subscription=get_subscription_id(cmd.cli_ctx),
resource_group=namespace.resource_group_name,
namespace='Microsoft.Network',
type='publicIPAddresses',
name=public_ip)
if is_list:
for i, public_ip in enumerate(namespace.public_ip_address):
namespace.public_ip_address[i] = _validate_name_or_id(public_ip)
else:
namespace.public_ip_address = _validate_name_or_id(namespace.public_ip_address)
return simple_validator | def function[get_public_ip_validator, parameter[]]:
constant[ Retrieves a validator for public IP address. Accepting all defaults will perform a check
for an existing name or ID with no ARM-required -type parameter. ]
from relative_module[msrestazure.tools] import module[is_valid_resource_id], module[resource_id]
def function[simple_validator, parameter[cmd, namespace]]:
if name[namespace].public_ip_address begin[:]
variable[is_list] assign[=] call[name[isinstance], parameter[name[namespace].public_ip_address, name[list]]]
def function[_validate_name_or_id, parameter[public_ip]]:
variable[is_id] assign[=] call[name[is_valid_resource_id], parameter[name[public_ip]]]
return[<ast.IfExp object at 0x7da1b22adff0>]
if name[is_list] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b22aff70>, <ast.Name object at 0x7da1b22af400>]]] in starred[call[name[enumerate], parameter[name[namespace].public_ip_address]]] begin[:]
call[name[namespace].public_ip_address][name[i]] assign[=] call[name[_validate_name_or_id], parameter[name[public_ip]]]
return[name[simple_validator]] | keyword[def] identifier[get_public_ip_validator] ():
literal[string]
keyword[from] identifier[msrestazure] . identifier[tools] keyword[import] identifier[is_valid_resource_id] , identifier[resource_id]
keyword[def] identifier[simple_validator] ( identifier[cmd] , identifier[namespace] ):
keyword[if] identifier[namespace] . identifier[public_ip_address] :
identifier[is_list] = identifier[isinstance] ( identifier[namespace] . identifier[public_ip_address] , identifier[list] )
keyword[def] identifier[_validate_name_or_id] ( identifier[public_ip] ):
identifier[is_id] = identifier[is_valid_resource_id] ( identifier[public_ip] )
keyword[return] identifier[public_ip] keyword[if] identifier[is_id] keyword[else] identifier[resource_id] (
identifier[subscription] = identifier[get_subscription_id] ( identifier[cmd] . identifier[cli_ctx] ),
identifier[resource_group] = identifier[namespace] . identifier[resource_group_name] ,
identifier[namespace] = literal[string] ,
identifier[type] = literal[string] ,
identifier[name] = identifier[public_ip] )
keyword[if] identifier[is_list] :
keyword[for] identifier[i] , identifier[public_ip] keyword[in] identifier[enumerate] ( identifier[namespace] . identifier[public_ip_address] ):
identifier[namespace] . identifier[public_ip_address] [ identifier[i] ]= identifier[_validate_name_or_id] ( identifier[public_ip] )
keyword[else] :
identifier[namespace] . identifier[public_ip_address] = identifier[_validate_name_or_id] ( identifier[namespace] . identifier[public_ip_address] )
keyword[return] identifier[simple_validator] | def get_public_ip_validator():
""" Retrieves a validator for public IP address. Accepting all defaults will perform a check
for an existing name or ID with no ARM-required -type parameter. """
from msrestazure.tools import is_valid_resource_id, resource_id
def simple_validator(cmd, namespace):
if namespace.public_ip_address:
is_list = isinstance(namespace.public_ip_address, list)
def _validate_name_or_id(public_ip):
# determine if public_ip_address is name or ID
is_id = is_valid_resource_id(public_ip)
return public_ip if is_id else resource_id(subscription=get_subscription_id(cmd.cli_ctx), resource_group=namespace.resource_group_name, namespace='Microsoft.Network', type='publicIPAddresses', name=public_ip)
if is_list:
for (i, public_ip) in enumerate(namespace.public_ip_address):
namespace.public_ip_address[i] = _validate_name_or_id(public_ip) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
else:
namespace.public_ip_address = _validate_name_or_id(namespace.public_ip_address) # depends on [control=['if'], data=[]]
return simple_validator |
def list_ip(self, instance_id):
"""Add all IPs"""
output = self.client.describe_instances(InstanceIds=[instance_id])
output = output.get("Reservations")[0].get("Instances")[0]
ips = {}
ips['PrivateIp'] = output.get("PrivateIpAddress")
ips['PublicIp'] = output.get("PublicIpAddress")
return ips | def function[list_ip, parameter[self, instance_id]]:
constant[Add all IPs]
variable[output] assign[=] call[name[self].client.describe_instances, parameter[]]
variable[output] assign[=] call[call[call[call[name[output].get, parameter[constant[Reservations]]]][constant[0]].get, parameter[constant[Instances]]]][constant[0]]
variable[ips] assign[=] dictionary[[], []]
call[name[ips]][constant[PrivateIp]] assign[=] call[name[output].get, parameter[constant[PrivateIpAddress]]]
call[name[ips]][constant[PublicIp]] assign[=] call[name[output].get, parameter[constant[PublicIpAddress]]]
return[name[ips]] | keyword[def] identifier[list_ip] ( identifier[self] , identifier[instance_id] ):
literal[string]
identifier[output] = identifier[self] . identifier[client] . identifier[describe_instances] ( identifier[InstanceIds] =[ identifier[instance_id] ])
identifier[output] = identifier[output] . identifier[get] ( literal[string] )[ literal[int] ]. identifier[get] ( literal[string] )[ literal[int] ]
identifier[ips] ={}
identifier[ips] [ literal[string] ]= identifier[output] . identifier[get] ( literal[string] )
identifier[ips] [ literal[string] ]= identifier[output] . identifier[get] ( literal[string] )
keyword[return] identifier[ips] | def list_ip(self, instance_id):
"""Add all IPs"""
output = self.client.describe_instances(InstanceIds=[instance_id])
output = output.get('Reservations')[0].get('Instances')[0]
ips = {}
ips['PrivateIp'] = output.get('PrivateIpAddress')
ips['PublicIp'] = output.get('PublicIpAddress')
return ips |
def get_account(self, username):
"""return user by username.
"""
try:
account = self.model.objects.get(
**self._filter_user_by(username)
)
except self.model.DoesNotExist:
return None
return account | def function[get_account, parameter[self, username]]:
constant[return user by username.
]
<ast.Try object at 0x7da2041d9780>
return[name[account]] | keyword[def] identifier[get_account] ( identifier[self] , identifier[username] ):
literal[string]
keyword[try] :
identifier[account] = identifier[self] . identifier[model] . identifier[objects] . identifier[get] (
** identifier[self] . identifier[_filter_user_by] ( identifier[username] )
)
keyword[except] identifier[self] . identifier[model] . identifier[DoesNotExist] :
keyword[return] keyword[None]
keyword[return] identifier[account] | def get_account(self, username):
"""return user by username.
"""
try:
account = self.model.objects.get(**self._filter_user_by(username)) # depends on [control=['try'], data=[]]
except self.model.DoesNotExist:
return None # depends on [control=['except'], data=[]]
return account |
def isworkday(self, date):
"""
Check if a given date is a work date, ignoring holidays.
Args:
date (date, datetime or str): Date to be checked.
Returns:
bool: True if the date is a work date, False otherwise.
"""
date = parsefun(date)
return self.weekdaymap[date.weekday()].isworkday | def function[isworkday, parameter[self, date]]:
constant[
Check if a given date is a work date, ignoring holidays.
Args:
date (date, datetime or str): Date to be checked.
Returns:
bool: True if the date is a work date, False otherwise.
]
variable[date] assign[=] call[name[parsefun], parameter[name[date]]]
return[call[name[self].weekdaymap][call[name[date].weekday, parameter[]]].isworkday] | keyword[def] identifier[isworkday] ( identifier[self] , identifier[date] ):
literal[string]
identifier[date] = identifier[parsefun] ( identifier[date] )
keyword[return] identifier[self] . identifier[weekdaymap] [ identifier[date] . identifier[weekday] ()]. identifier[isworkday] | def isworkday(self, date):
"""
Check if a given date is a work date, ignoring holidays.
Args:
date (date, datetime or str): Date to be checked.
Returns:
bool: True if the date is a work date, False otherwise.
"""
date = parsefun(date)
return self.weekdaymap[date.weekday()].isworkday |
def get_qpimage_raw(self, idx):
"""Return QPImage without background correction"""
with self._qpseries() as qps:
qpi = qps.get_qpimage(index=idx).copy()
# Remove previously performed background correction
qpi.set_bg_data(None)
# Force meta data
for key in self.meta_data:
qpi[key] = self.meta_data[key]
# set identifier
qpi["identifier"] = self.get_identifier(idx)
return qpi | def function[get_qpimage_raw, parameter[self, idx]]:
constant[Return QPImage without background correction]
with call[name[self]._qpseries, parameter[]] begin[:]
variable[qpi] assign[=] call[call[name[qps].get_qpimage, parameter[]].copy, parameter[]]
call[name[qpi].set_bg_data, parameter[constant[None]]]
for taget[name[key]] in starred[name[self].meta_data] begin[:]
call[name[qpi]][name[key]] assign[=] call[name[self].meta_data][name[key]]
call[name[qpi]][constant[identifier]] assign[=] call[name[self].get_identifier, parameter[name[idx]]]
return[name[qpi]] | keyword[def] identifier[get_qpimage_raw] ( identifier[self] , identifier[idx] ):
literal[string]
keyword[with] identifier[self] . identifier[_qpseries] () keyword[as] identifier[qps] :
identifier[qpi] = identifier[qps] . identifier[get_qpimage] ( identifier[index] = identifier[idx] ). identifier[copy] ()
identifier[qpi] . identifier[set_bg_data] ( keyword[None] )
keyword[for] identifier[key] keyword[in] identifier[self] . identifier[meta_data] :
identifier[qpi] [ identifier[key] ]= identifier[self] . identifier[meta_data] [ identifier[key] ]
identifier[qpi] [ literal[string] ]= identifier[self] . identifier[get_identifier] ( identifier[idx] )
keyword[return] identifier[qpi] | def get_qpimage_raw(self, idx):
"""Return QPImage without background correction"""
with self._qpseries() as qps:
qpi = qps.get_qpimage(index=idx).copy() # depends on [control=['with'], data=['qps']]
# Remove previously performed background correction
qpi.set_bg_data(None)
# Force meta data
for key in self.meta_data:
qpi[key] = self.meta_data[key] # depends on [control=['for'], data=['key']]
# set identifier
qpi['identifier'] = self.get_identifier(idx)
return qpi |
def _get_available_placements(D, tt):
"""
Called from: _prompt_placement()
Get a list of possible places that we can put the new model data into.
If no model exists yet, we'll use something like chron0model0. If other models exist,
we'll go for the n+1 entry.
ex: chron0model0 already exists, so we'll look to chron0model1 next.
:param dict D: Metadata
:param str tt: Table Type
:return list _options: Possible placements
"""
_options = []
try:
for _pc in ["paleoData", "chronData"]:
if _pc in D:
# for each entry in pc
for section_name, section_data in D[_pc].items():
# looking for open spots for measurement tables
if tt == "measurement":
if "measurementTable" in section_data:
_options.append(_get_available_placements_1(section_data["measurementTable"], section_name, "measurement"))
# looking for open spots for model tables
else:
# Is there a model? Need model data to keep going
if "model" in section_data:
# this is for adding a whole model (all 4 tables, ens/dist/sum/method)
if tt == "model":
_options.append(_get_available_placements_1(section_data["model"], section_name, "model"))
else:
# for adding individual model tables
for _k, _v in section_data["model"]:
# keys here are stored as "<type>Table", so add "Table" to each table type
_tt_table = "{}Table".format(tt)
# does this table exist?
if _tt_table in _v:
# Get the first available position for this section
_options.append(
_get_available_placements_1(_v[_tt_table], _k, tt))
else:
# Doesn't currently exist. Make the first option index 0.
_options.append("{}{}0".format(_k, tt))
# no models present, so we automatically default placement options to the 0 index.
else:
if tt == "model":
# adding a whole model, so no need to be specific
_options.append("{}model0".format(section_name))
else:
# adding a specific table, so the position is more specific also
_options.append("{}model0{}0".format(section_name, tt))
except Exception as e:
sys.exit("Looking for open table positions: Unable to find placement options, {}".format(e))
# remove empty names
_options = [i for i in _options if i]
# Is the whole list empty? that's not good.
if not _options:
sys.exit("Error: No available positions found to place new data. Something went wrong.")
return _options | def function[_get_available_placements, parameter[D, tt]]:
constant[
Called from: _prompt_placement()
Get a list of possible places that we can put the new model data into.
If no model exists yet, we'll use something like chron0model0. If other models exist,
we'll go for the n+1 entry.
ex: chron0model0 already exists, so we'll look to chron0model1 next.
:param dict D: Metadata
:param str tt: Table Type
:return list _options: Possible placements
]
variable[_options] assign[=] list[[]]
<ast.Try object at 0x7da20c7ca380>
variable[_options] assign[=] <ast.ListComp object at 0x7da18f00e0e0>
if <ast.UnaryOp object at 0x7da18f00ceb0> begin[:]
call[name[sys].exit, parameter[constant[Error: No available positions found to place new data. Something went wrong.]]]
return[name[_options]] | keyword[def] identifier[_get_available_placements] ( identifier[D] , identifier[tt] ):
literal[string]
identifier[_options] =[]
keyword[try] :
keyword[for] identifier[_pc] keyword[in] [ literal[string] , literal[string] ]:
keyword[if] identifier[_pc] keyword[in] identifier[D] :
keyword[for] identifier[section_name] , identifier[section_data] keyword[in] identifier[D] [ identifier[_pc] ]. identifier[items] ():
keyword[if] identifier[tt] == literal[string] :
keyword[if] literal[string] keyword[in] identifier[section_data] :
identifier[_options] . identifier[append] ( identifier[_get_available_placements_1] ( identifier[section_data] [ literal[string] ], identifier[section_name] , literal[string] ))
keyword[else] :
keyword[if] literal[string] keyword[in] identifier[section_data] :
keyword[if] identifier[tt] == literal[string] :
identifier[_options] . identifier[append] ( identifier[_get_available_placements_1] ( identifier[section_data] [ literal[string] ], identifier[section_name] , literal[string] ))
keyword[else] :
keyword[for] identifier[_k] , identifier[_v] keyword[in] identifier[section_data] [ literal[string] ]:
identifier[_tt_table] = literal[string] . identifier[format] ( identifier[tt] )
keyword[if] identifier[_tt_table] keyword[in] identifier[_v] :
identifier[_options] . identifier[append] (
identifier[_get_available_placements_1] ( identifier[_v] [ identifier[_tt_table] ], identifier[_k] , identifier[tt] ))
keyword[else] :
identifier[_options] . identifier[append] ( literal[string] . identifier[format] ( identifier[_k] , identifier[tt] ))
keyword[else] :
keyword[if] identifier[tt] == literal[string] :
identifier[_options] . identifier[append] ( literal[string] . identifier[format] ( identifier[section_name] ))
keyword[else] :
identifier[_options] . identifier[append] ( literal[string] . identifier[format] ( identifier[section_name] , identifier[tt] ))
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[sys] . identifier[exit] ( literal[string] . identifier[format] ( identifier[e] ))
identifier[_options] =[ identifier[i] keyword[for] identifier[i] keyword[in] identifier[_options] keyword[if] identifier[i] ]
keyword[if] keyword[not] identifier[_options] :
identifier[sys] . identifier[exit] ( literal[string] )
keyword[return] identifier[_options] | def _get_available_placements(D, tt):
"""
Called from: _prompt_placement()
Get a list of possible places that we can put the new model data into.
If no model exists yet, we'll use something like chron0model0. If other models exist,
we'll go for the n+1 entry.
ex: chron0model0 already exists, so we'll look to chron0model1 next.
:param dict D: Metadata
:param str tt: Table Type
:return list _options: Possible placements
"""
_options = []
try:
for _pc in ['paleoData', 'chronData']:
if _pc in D:
# for each entry in pc
for (section_name, section_data) in D[_pc].items():
# looking for open spots for measurement tables
if tt == 'measurement':
if 'measurementTable' in section_data:
_options.append(_get_available_placements_1(section_data['measurementTable'], section_name, 'measurement')) # depends on [control=['if'], data=['section_data']] # depends on [control=['if'], data=[]]
# looking for open spots for model tables
# Is there a model? Need model data to keep going
elif 'model' in section_data:
# this is for adding a whole model (all 4 tables, ens/dist/sum/method)
if tt == 'model':
_options.append(_get_available_placements_1(section_data['model'], section_name, 'model')) # depends on [control=['if'], data=[]]
else:
# for adding individual model tables
for (_k, _v) in section_data['model']:
# keys here are stored as "<type>Table", so add "Table" to each table type
_tt_table = '{}Table'.format(tt)
# does this table exist?
if _tt_table in _v:
# Get the first available position for this section
_options.append(_get_available_placements_1(_v[_tt_table], _k, tt)) # depends on [control=['if'], data=['_tt_table', '_v']]
else:
# Doesn't currently exist. Make the first option index 0.
_options.append('{}{}0'.format(_k, tt)) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['section_data']]
# no models present, so we automatically default placement options to the 0 index.
elif tt == 'model':
# adding a whole model, so no need to be specific
_options.append('{}model0'.format(section_name)) # depends on [control=['if'], data=[]]
else:
# adding a specific table, so the position is more specific also
_options.append('{}model0{}0'.format(section_name, tt)) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['_pc', 'D']] # depends on [control=['for'], data=['_pc']] # depends on [control=['try'], data=[]]
except Exception as e:
sys.exit('Looking for open table positions: Unable to find placement options, {}'.format(e)) # depends on [control=['except'], data=['e']]
# remove empty names
_options = [i for i in _options if i]
# Is the whole list empty? that's not good.
if not _options:
sys.exit('Error: No available positions found to place new data. Something went wrong.') # depends on [control=['if'], data=[]]
return _options |
def get_all_tags(self):
"""
This method returns a list of all tags.
"""
data = self.get_data("tags")
return [
Tag(token=self.token, **tag) for tag in data['tags']
] | def function[get_all_tags, parameter[self]]:
constant[
This method returns a list of all tags.
]
variable[data] assign[=] call[name[self].get_data, parameter[constant[tags]]]
return[<ast.ListComp object at 0x7da1b016d030>] | keyword[def] identifier[get_all_tags] ( identifier[self] ):
literal[string]
identifier[data] = identifier[self] . identifier[get_data] ( literal[string] )
keyword[return] [
identifier[Tag] ( identifier[token] = identifier[self] . identifier[token] ,** identifier[tag] ) keyword[for] identifier[tag] keyword[in] identifier[data] [ literal[string] ]
] | def get_all_tags(self):
"""
This method returns a list of all tags.
"""
data = self.get_data('tags')
return [Tag(token=self.token, **tag) for tag in data['tags']] |
def check_for_insufficient_eth(
self,
transaction_name: str,
transaction_executed: bool,
required_gas: int,
block_identifier: BlockSpecification,
):
""" After estimate gas failure checks if our address has enough balance.
If the account did not have enough ETH balance to execute the,
transaction then it raises an `InsufficientFunds` error
"""
if transaction_executed:
return
our_address = to_checksum_address(self.address)
balance = self.web3.eth.getBalance(our_address, block_identifier)
required_balance = required_gas * self.gas_price()
if balance < required_balance:
msg = f'Failed to execute {transaction_name} due to insufficient ETH'
log.critical(msg, required_wei=required_balance, actual_wei=balance)
raise InsufficientFunds(msg) | def function[check_for_insufficient_eth, parameter[self, transaction_name, transaction_executed, required_gas, block_identifier]]:
constant[ After estimate gas failure checks if our address has enough balance.
If the account did not have enough ETH balance to execute the,
transaction then it raises an `InsufficientFunds` error
]
if name[transaction_executed] begin[:]
return[None]
variable[our_address] assign[=] call[name[to_checksum_address], parameter[name[self].address]]
variable[balance] assign[=] call[name[self].web3.eth.getBalance, parameter[name[our_address], name[block_identifier]]]
variable[required_balance] assign[=] binary_operation[name[required_gas] * call[name[self].gas_price, parameter[]]]
if compare[name[balance] less[<] name[required_balance]] begin[:]
variable[msg] assign[=] <ast.JoinedStr object at 0x7da1b17138b0>
call[name[log].critical, parameter[name[msg]]]
<ast.Raise object at 0x7da1b1710880> | keyword[def] identifier[check_for_insufficient_eth] (
identifier[self] ,
identifier[transaction_name] : identifier[str] ,
identifier[transaction_executed] : identifier[bool] ,
identifier[required_gas] : identifier[int] ,
identifier[block_identifier] : identifier[BlockSpecification] ,
):
literal[string]
keyword[if] identifier[transaction_executed] :
keyword[return]
identifier[our_address] = identifier[to_checksum_address] ( identifier[self] . identifier[address] )
identifier[balance] = identifier[self] . identifier[web3] . identifier[eth] . identifier[getBalance] ( identifier[our_address] , identifier[block_identifier] )
identifier[required_balance] = identifier[required_gas] * identifier[self] . identifier[gas_price] ()
keyword[if] identifier[balance] < identifier[required_balance] :
identifier[msg] = literal[string]
identifier[log] . identifier[critical] ( identifier[msg] , identifier[required_wei] = identifier[required_balance] , identifier[actual_wei] = identifier[balance] )
keyword[raise] identifier[InsufficientFunds] ( identifier[msg] ) | def check_for_insufficient_eth(self, transaction_name: str, transaction_executed: bool, required_gas: int, block_identifier: BlockSpecification):
""" After estimate gas failure checks if our address has enough balance.
If the account did not have enough ETH balance to execute the,
transaction then it raises an `InsufficientFunds` error
"""
if transaction_executed:
return # depends on [control=['if'], data=[]]
our_address = to_checksum_address(self.address)
balance = self.web3.eth.getBalance(our_address, block_identifier)
required_balance = required_gas * self.gas_price()
if balance < required_balance:
msg = f'Failed to execute {transaction_name} due to insufficient ETH'
log.critical(msg, required_wei=required_balance, actual_wei=balance)
raise InsufficientFunds(msg) # depends on [control=['if'], data=['balance', 'required_balance']] |
def from_stmt(stmt, engine, **kwargs):
"""
Execute a query in form of texture clause, return the result in form of
:class:`PrettyTable`.
:type stmt: TextClause
:param stmt:
:type engine: Engine
:param engine:
:rtype: PrettyTable
"""
result_proxy = engine.execute(stmt, **kwargs)
return from_db_cursor(result_proxy.cursor) | def function[from_stmt, parameter[stmt, engine]]:
constant[
Execute a query in form of texture clause, return the result in form of
:class:`PrettyTable`.
:type stmt: TextClause
:param stmt:
:type engine: Engine
:param engine:
:rtype: PrettyTable
]
variable[result_proxy] assign[=] call[name[engine].execute, parameter[name[stmt]]]
return[call[name[from_db_cursor], parameter[name[result_proxy].cursor]]] | keyword[def] identifier[from_stmt] ( identifier[stmt] , identifier[engine] ,** identifier[kwargs] ):
literal[string]
identifier[result_proxy] = identifier[engine] . identifier[execute] ( identifier[stmt] ,** identifier[kwargs] )
keyword[return] identifier[from_db_cursor] ( identifier[result_proxy] . identifier[cursor] ) | def from_stmt(stmt, engine, **kwargs):
"""
Execute a query in form of texture clause, return the result in form of
:class:`PrettyTable`.
:type stmt: TextClause
:param stmt:
:type engine: Engine
:param engine:
:rtype: PrettyTable
"""
result_proxy = engine.execute(stmt, **kwargs)
return from_db_cursor(result_proxy.cursor) |
def Nads_in_slab(self):
"""
Returns the TOTAL number of adsorbates in the slab on BOTH sides
"""
return sum([self.composition.as_dict()[a] for a \
in self.ads_entries_dict.keys()]) | def function[Nads_in_slab, parameter[self]]:
constant[
Returns the TOTAL number of adsorbates in the slab on BOTH sides
]
return[call[name[sum], parameter[<ast.ListComp object at 0x7da20e9573a0>]]] | keyword[def] identifier[Nads_in_slab] ( identifier[self] ):
literal[string]
keyword[return] identifier[sum] ([ identifier[self] . identifier[composition] . identifier[as_dict] ()[ identifier[a] ] keyword[for] identifier[a] keyword[in] identifier[self] . identifier[ads_entries_dict] . identifier[keys] ()]) | def Nads_in_slab(self):
"""
Returns the TOTAL number of adsorbates in the slab on BOTH sides
"""
return sum([self.composition.as_dict()[a] for a in self.ads_entries_dict.keys()]) |
def get_next_triangle(mesh, T, plane, intersection, dist_tol):
"""
Returns the next triangle to visit given the intersection and
the list of unvisited triangles (T)
We look for a triangle that is cut by the plane (2 intersections) as
opposed to one that only touch the plane (1 vertex intersection)
"""
if intersection[0] == INTERSECT_EDGE:
tris = mesh.triangles_for_edge(intersection[2])
elif intersection[0] == INTERSECT_VERTEX:
tris = mesh.triangles_for_vert(intersection[2])
else:
assert False, 'Invalid intersection[0] value : %d' % intersection[0]
# Knowing where we come from is not enough. If an edge of the triangle
# lies exactly on the plane, i.e. :
#
# /t1\
# -v1---v2-
# \t2/
#
# With v1, v2 being the vertices and t1, t2 being the triangles, then
# if you just try to go to the next connected triangle that intersect,
# you can visit v1 -> t1 -> v2 -> t2 -> v1 .
# Therefore, we need to limit the new candidates to the set of unvisited
# triangles and once we've visited a triangle and decided on a next one,
# remove all the neighbors of the visited triangle so we don't come
# back to it
T = set(T)
for tid in tris:
if tid in T:
intersections = compute_triangle_plane_intersections(
mesh, tid, plane, dist_tol)
if len(intersections) == 2:
T = T.difference(tris)
return tid, intersections, T
return None, [], T | def function[get_next_triangle, parameter[mesh, T, plane, intersection, dist_tol]]:
constant[
Returns the next triangle to visit given the intersection and
the list of unvisited triangles (T)
We look for a triangle that is cut by the plane (2 intersections) as
opposed to one that only touch the plane (1 vertex intersection)
]
if compare[call[name[intersection]][constant[0]] equal[==] name[INTERSECT_EDGE]] begin[:]
variable[tris] assign[=] call[name[mesh].triangles_for_edge, parameter[call[name[intersection]][constant[2]]]]
variable[T] assign[=] call[name[set], parameter[name[T]]]
for taget[name[tid]] in starred[name[tris]] begin[:]
if compare[name[tid] in name[T]] begin[:]
variable[intersections] assign[=] call[name[compute_triangle_plane_intersections], parameter[name[mesh], name[tid], name[plane], name[dist_tol]]]
if compare[call[name[len], parameter[name[intersections]]] equal[==] constant[2]] begin[:]
variable[T] assign[=] call[name[T].difference, parameter[name[tris]]]
return[tuple[[<ast.Name object at 0x7da1b0465f30>, <ast.Name object at 0x7da1b0467640>, <ast.Name object at 0x7da1b04667a0>]]]
return[tuple[[<ast.Constant object at 0x7da1b0467d60>, <ast.List object at 0x7da1b04665c0>, <ast.Name object at 0x7da1b0465a80>]]] | keyword[def] identifier[get_next_triangle] ( identifier[mesh] , identifier[T] , identifier[plane] , identifier[intersection] , identifier[dist_tol] ):
literal[string]
keyword[if] identifier[intersection] [ literal[int] ]== identifier[INTERSECT_EDGE] :
identifier[tris] = identifier[mesh] . identifier[triangles_for_edge] ( identifier[intersection] [ literal[int] ])
keyword[elif] identifier[intersection] [ literal[int] ]== identifier[INTERSECT_VERTEX] :
identifier[tris] = identifier[mesh] . identifier[triangles_for_vert] ( identifier[intersection] [ literal[int] ])
keyword[else] :
keyword[assert] keyword[False] , literal[string] % identifier[intersection] [ literal[int] ]
identifier[T] = identifier[set] ( identifier[T] )
keyword[for] identifier[tid] keyword[in] identifier[tris] :
keyword[if] identifier[tid] keyword[in] identifier[T] :
identifier[intersections] = identifier[compute_triangle_plane_intersections] (
identifier[mesh] , identifier[tid] , identifier[plane] , identifier[dist_tol] )
keyword[if] identifier[len] ( identifier[intersections] )== literal[int] :
identifier[T] = identifier[T] . identifier[difference] ( identifier[tris] )
keyword[return] identifier[tid] , identifier[intersections] , identifier[T]
keyword[return] keyword[None] ,[], identifier[T] | def get_next_triangle(mesh, T, plane, intersection, dist_tol):
"""
Returns the next triangle to visit given the intersection and
the list of unvisited triangles (T)
We look for a triangle that is cut by the plane (2 intersections) as
opposed to one that only touch the plane (1 vertex intersection)
"""
if intersection[0] == INTERSECT_EDGE:
tris = mesh.triangles_for_edge(intersection[2]) # depends on [control=['if'], data=[]]
elif intersection[0] == INTERSECT_VERTEX:
tris = mesh.triangles_for_vert(intersection[2]) # depends on [control=['if'], data=[]]
else:
assert False, 'Invalid intersection[0] value : %d' % intersection[0]
# Knowing where we come from is not enough. If an edge of the triangle
# lies exactly on the plane, i.e. :
#
# /t1\
# -v1---v2-
# \t2/
#
# With v1, v2 being the vertices and t1, t2 being the triangles, then
# if you just try to go to the next connected triangle that intersect,
# you can visit v1 -> t1 -> v2 -> t2 -> v1 .
# Therefore, we need to limit the new candidates to the set of unvisited
# triangles and once we've visited a triangle and decided on a next one,
# remove all the neighbors of the visited triangle so we don't come
# back to it
T = set(T)
for tid in tris:
if tid in T:
intersections = compute_triangle_plane_intersections(mesh, tid, plane, dist_tol)
if len(intersections) == 2:
T = T.difference(tris)
return (tid, intersections, T) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['tid', 'T']] # depends on [control=['for'], data=['tid']]
return (None, [], T) |
def _warn(message, warn_type='user'):
"""
message (unicode): The message to display.
category (Warning): The Warning to show.
"""
w_id = message.split('[', 1)[1].split(']', 1)[0] # get ID from string
if warn_type in SPACY_WARNING_TYPES and w_id not in SPACY_WARNING_IGNORE:
category = WARNINGS[warn_type]
stack = inspect.stack()[-1]
with warnings.catch_warnings():
if SPACY_WARNING_FILTER:
warnings.simplefilter(SPACY_WARNING_FILTER, category)
warnings.warn_explicit(message, category, stack[1], stack[2]) | def function[_warn, parameter[message, warn_type]]:
constant[
message (unicode): The message to display.
category (Warning): The Warning to show.
]
variable[w_id] assign[=] call[call[call[call[name[message].split, parameter[constant[[], constant[1]]]][constant[1]].split, parameter[constant[]], constant[1]]]][constant[0]]
if <ast.BoolOp object at 0x7da1b1a3cdf0> begin[:]
variable[category] assign[=] call[name[WARNINGS]][name[warn_type]]
variable[stack] assign[=] call[call[name[inspect].stack, parameter[]]][<ast.UnaryOp object at 0x7da1b1a3cd90>]
with call[name[warnings].catch_warnings, parameter[]] begin[:]
if name[SPACY_WARNING_FILTER] begin[:]
call[name[warnings].simplefilter, parameter[name[SPACY_WARNING_FILTER], name[category]]]
call[name[warnings].warn_explicit, parameter[name[message], name[category], call[name[stack]][constant[1]], call[name[stack]][constant[2]]]] | keyword[def] identifier[_warn] ( identifier[message] , identifier[warn_type] = literal[string] ):
literal[string]
identifier[w_id] = identifier[message] . identifier[split] ( literal[string] , literal[int] )[ literal[int] ]. identifier[split] ( literal[string] , literal[int] )[ literal[int] ]
keyword[if] identifier[warn_type] keyword[in] identifier[SPACY_WARNING_TYPES] keyword[and] identifier[w_id] keyword[not] keyword[in] identifier[SPACY_WARNING_IGNORE] :
identifier[category] = identifier[WARNINGS] [ identifier[warn_type] ]
identifier[stack] = identifier[inspect] . identifier[stack] ()[- literal[int] ]
keyword[with] identifier[warnings] . identifier[catch_warnings] ():
keyword[if] identifier[SPACY_WARNING_FILTER] :
identifier[warnings] . identifier[simplefilter] ( identifier[SPACY_WARNING_FILTER] , identifier[category] )
identifier[warnings] . identifier[warn_explicit] ( identifier[message] , identifier[category] , identifier[stack] [ literal[int] ], identifier[stack] [ literal[int] ]) | def _warn(message, warn_type='user'):
"""
message (unicode): The message to display.
category (Warning): The Warning to show.
"""
w_id = message.split('[', 1)[1].split(']', 1)[0] # get ID from string
if warn_type in SPACY_WARNING_TYPES and w_id not in SPACY_WARNING_IGNORE:
category = WARNINGS[warn_type]
stack = inspect.stack()[-1]
with warnings.catch_warnings():
if SPACY_WARNING_FILTER:
warnings.simplefilter(SPACY_WARNING_FILTER, category) # depends on [control=['if'], data=[]]
warnings.warn_explicit(message, category, stack[1], stack[2]) # depends on [control=['with'], data=[]] # depends on [control=['if'], data=[]] |
def gen_schedule(user, num_blocks=6, surrounding_blocks=None):
"""Generate a list of information about a block and a student's current activity signup.
Returns:
schedule
no_signup_today
"""
no_signup_today = None
schedule = []
if surrounding_blocks is None:
surrounding_blocks = EighthBlock.objects.get_upcoming_blocks(num_blocks)
if len(surrounding_blocks) == 0:
return None, False
# Use select_related to reduce query count
signups = (EighthSignup.objects.filter(user=user, scheduled_activity__block__in=surrounding_blocks).select_related(
"scheduled_activity", "scheduled_activity__block", "scheduled_activity__activity"))
block_signup_map = {s.scheduled_activity.block.id: s.scheduled_activity for s in signups}
for b in surrounding_blocks:
current_sched_act = block_signup_map.get(b.id, None)
if current_sched_act:
current_signup = current_sched_act.title_with_flags
current_signup_cancelled = current_sched_act.cancelled
current_signup_sticky = current_sched_act.activity.sticky
rooms = current_sched_act.get_scheduled_rooms()
else:
current_signup = None
current_signup_cancelled = False
current_signup_sticky = False
rooms = None
# warning flag (red block text and signup link) if no signup today
# cancelled flag (red activity text) if cancelled
flags = "locked" if b.locked else "open"
blk_today = b.is_today()
if blk_today and not current_signup:
flags += " warning"
if current_signup_cancelled:
flags += " cancelled warning"
if current_signup_cancelled:
# don't duplicate this info; already caught
current_signup = current_signup.replace(" (Cancelled)", "")
info = {
"id": b.id,
"block": b,
"block_letter": b.block_letter,
"current_signup": current_signup,
"current_signup_cancelled": current_signup_cancelled,
"current_signup_sticky": current_signup_sticky,
"locked": b.locked,
"date": b.date,
"flags": flags,
"is_today": blk_today,
"signup_time": b.signup_time,
"signup_time_future": b.signup_time_future,
"rooms": rooms
}
schedule.append(info)
if blk_today and not current_signup:
no_signup_today = True
return schedule, no_signup_today | def function[gen_schedule, parameter[user, num_blocks, surrounding_blocks]]:
constant[Generate a list of information about a block and a student's current activity signup.
Returns:
schedule
no_signup_today
]
variable[no_signup_today] assign[=] constant[None]
variable[schedule] assign[=] list[[]]
if compare[name[surrounding_blocks] is constant[None]] begin[:]
variable[surrounding_blocks] assign[=] call[name[EighthBlock].objects.get_upcoming_blocks, parameter[name[num_blocks]]]
if compare[call[name[len], parameter[name[surrounding_blocks]]] equal[==] constant[0]] begin[:]
return[tuple[[<ast.Constant object at 0x7da1b04d59c0>, <ast.Constant object at 0x7da1b04d5750>]]]
variable[signups] assign[=] call[call[name[EighthSignup].objects.filter, parameter[]].select_related, parameter[constant[scheduled_activity], constant[scheduled_activity__block], constant[scheduled_activity__activity]]]
variable[block_signup_map] assign[=] <ast.DictComp object at 0x7da1b04d62c0>
for taget[name[b]] in starred[name[surrounding_blocks]] begin[:]
variable[current_sched_act] assign[=] call[name[block_signup_map].get, parameter[name[b].id, constant[None]]]
if name[current_sched_act] begin[:]
variable[current_signup] assign[=] name[current_sched_act].title_with_flags
variable[current_signup_cancelled] assign[=] name[current_sched_act].cancelled
variable[current_signup_sticky] assign[=] name[current_sched_act].activity.sticky
variable[rooms] assign[=] call[name[current_sched_act].get_scheduled_rooms, parameter[]]
variable[flags] assign[=] <ast.IfExp object at 0x7da1b04d70a0>
variable[blk_today] assign[=] call[name[b].is_today, parameter[]]
if <ast.BoolOp object at 0x7da1b04d4430> begin[:]
<ast.AugAssign object at 0x7da1b04d6cb0>
if name[current_signup_cancelled] begin[:]
<ast.AugAssign object at 0x7da1b04d7e80>
if name[current_signup_cancelled] begin[:]
variable[current_signup] assign[=] call[name[current_signup].replace, parameter[constant[ (Cancelled)], constant[]]]
variable[info] assign[=] dictionary[[<ast.Constant object at 0x7da1b04d76a0>, <ast.Constant object at 0x7da1b04d5c30>, <ast.Constant object at 0x7da1b04d5180>, <ast.Constant object at 0x7da1b04d40d0>, <ast.Constant object at 0x7da1b04d7580>, <ast.Constant object at 0x7da1b04d6200>, <ast.Constant object at 0x7da1b04d7490>, <ast.Constant object at 0x7da1b04d7ac0>, <ast.Constant object at 0x7da1b04d6fb0>, <ast.Constant object at 0x7da1b04d7af0>, <ast.Constant object at 0x7da1b04d7ca0>, <ast.Constant object at 0x7da20c795b70>, <ast.Constant object at 0x7da20c7950c0>], [<ast.Attribute object at 0x7da20c797730>, <ast.Name object at 0x7da20c795630>, <ast.Attribute object at 0x7da20c7962f0>, <ast.Name object at 0x7da20c795a80>, <ast.Name object at 0x7da20c7942e0>, <ast.Name object at 0x7da20c796560>, <ast.Attribute object at 0x7da20c794130>, <ast.Attribute object at 0x7da20c7959f0>, <ast.Name object at 0x7da20c7946d0>, <ast.Name object at 0x7da20c796b30>, <ast.Attribute object at 0x7da20c795b10>, <ast.Attribute object at 0x7da2045666b0>, <ast.Name object at 0x7da204565660>]]
call[name[schedule].append, parameter[name[info]]]
if <ast.BoolOp object at 0x7da1b02e78b0> begin[:]
variable[no_signup_today] assign[=] constant[True]
return[tuple[[<ast.Name object at 0x7da1b02e4fa0>, <ast.Name object at 0x7da1b02e71f0>]]] | keyword[def] identifier[gen_schedule] ( identifier[user] , identifier[num_blocks] = literal[int] , identifier[surrounding_blocks] = keyword[None] ):
literal[string]
identifier[no_signup_today] = keyword[None]
identifier[schedule] =[]
keyword[if] identifier[surrounding_blocks] keyword[is] keyword[None] :
identifier[surrounding_blocks] = identifier[EighthBlock] . identifier[objects] . identifier[get_upcoming_blocks] ( identifier[num_blocks] )
keyword[if] identifier[len] ( identifier[surrounding_blocks] )== literal[int] :
keyword[return] keyword[None] , keyword[False]
identifier[signups] =( identifier[EighthSignup] . identifier[objects] . identifier[filter] ( identifier[user] = identifier[user] , identifier[scheduled_activity__block__in] = identifier[surrounding_blocks] ). identifier[select_related] (
literal[string] , literal[string] , literal[string] ))
identifier[block_signup_map] ={ identifier[s] . identifier[scheduled_activity] . identifier[block] . identifier[id] : identifier[s] . identifier[scheduled_activity] keyword[for] identifier[s] keyword[in] identifier[signups] }
keyword[for] identifier[b] keyword[in] identifier[surrounding_blocks] :
identifier[current_sched_act] = identifier[block_signup_map] . identifier[get] ( identifier[b] . identifier[id] , keyword[None] )
keyword[if] identifier[current_sched_act] :
identifier[current_signup] = identifier[current_sched_act] . identifier[title_with_flags]
identifier[current_signup_cancelled] = identifier[current_sched_act] . identifier[cancelled]
identifier[current_signup_sticky] = identifier[current_sched_act] . identifier[activity] . identifier[sticky]
identifier[rooms] = identifier[current_sched_act] . identifier[get_scheduled_rooms] ()
keyword[else] :
identifier[current_signup] = keyword[None]
identifier[current_signup_cancelled] = keyword[False]
identifier[current_signup_sticky] = keyword[False]
identifier[rooms] = keyword[None]
identifier[flags] = literal[string] keyword[if] identifier[b] . identifier[locked] keyword[else] literal[string]
identifier[blk_today] = identifier[b] . identifier[is_today] ()
keyword[if] identifier[blk_today] keyword[and] keyword[not] identifier[current_signup] :
identifier[flags] += literal[string]
keyword[if] identifier[current_signup_cancelled] :
identifier[flags] += literal[string]
keyword[if] identifier[current_signup_cancelled] :
identifier[current_signup] = identifier[current_signup] . identifier[replace] ( literal[string] , literal[string] )
identifier[info] ={
literal[string] : identifier[b] . identifier[id] ,
literal[string] : identifier[b] ,
literal[string] : identifier[b] . identifier[block_letter] ,
literal[string] : identifier[current_signup] ,
literal[string] : identifier[current_signup_cancelled] ,
literal[string] : identifier[current_signup_sticky] ,
literal[string] : identifier[b] . identifier[locked] ,
literal[string] : identifier[b] . identifier[date] ,
literal[string] : identifier[flags] ,
literal[string] : identifier[blk_today] ,
literal[string] : identifier[b] . identifier[signup_time] ,
literal[string] : identifier[b] . identifier[signup_time_future] ,
literal[string] : identifier[rooms]
}
identifier[schedule] . identifier[append] ( identifier[info] )
keyword[if] identifier[blk_today] keyword[and] keyword[not] identifier[current_signup] :
identifier[no_signup_today] = keyword[True]
keyword[return] identifier[schedule] , identifier[no_signup_today] | def gen_schedule(user, num_blocks=6, surrounding_blocks=None):
"""Generate a list of information about a block and a student's current activity signup.
Returns:
schedule
no_signup_today
"""
no_signup_today = None
schedule = []
if surrounding_blocks is None:
surrounding_blocks = EighthBlock.objects.get_upcoming_blocks(num_blocks) # depends on [control=['if'], data=['surrounding_blocks']]
if len(surrounding_blocks) == 0:
return (None, False) # depends on [control=['if'], data=[]]
# Use select_related to reduce query count
signups = EighthSignup.objects.filter(user=user, scheduled_activity__block__in=surrounding_blocks).select_related('scheduled_activity', 'scheduled_activity__block', 'scheduled_activity__activity')
block_signup_map = {s.scheduled_activity.block.id: s.scheduled_activity for s in signups}
for b in surrounding_blocks:
current_sched_act = block_signup_map.get(b.id, None)
if current_sched_act:
current_signup = current_sched_act.title_with_flags
current_signup_cancelled = current_sched_act.cancelled
current_signup_sticky = current_sched_act.activity.sticky
rooms = current_sched_act.get_scheduled_rooms() # depends on [control=['if'], data=[]]
else:
current_signup = None
current_signup_cancelled = False
current_signup_sticky = False
rooms = None
# warning flag (red block text and signup link) if no signup today
# cancelled flag (red activity text) if cancelled
flags = 'locked' if b.locked else 'open'
blk_today = b.is_today()
if blk_today and (not current_signup):
flags += ' warning' # depends on [control=['if'], data=[]]
if current_signup_cancelled:
flags += ' cancelled warning' # depends on [control=['if'], data=[]]
if current_signup_cancelled:
# don't duplicate this info; already caught
current_signup = current_signup.replace(' (Cancelled)', '') # depends on [control=['if'], data=[]]
info = {'id': b.id, 'block': b, 'block_letter': b.block_letter, 'current_signup': current_signup, 'current_signup_cancelled': current_signup_cancelled, 'current_signup_sticky': current_signup_sticky, 'locked': b.locked, 'date': b.date, 'flags': flags, 'is_today': blk_today, 'signup_time': b.signup_time, 'signup_time_future': b.signup_time_future, 'rooms': rooms}
schedule.append(info)
if blk_today and (not current_signup):
no_signup_today = True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['b']]
return (schedule, no_signup_today) |
def cull(data,index,min=None,max=None):
"""Sieve an emcee clouds by excluding walkers with search variable 'index'
smaller than 'min' or larger than 'max'."""
ret = data
if min is not None:
ret = ret[ret[:,index] > min,:]
if max is not None:
ret = ret[ret[:,index] < max,:]
return ret | def function[cull, parameter[data, index, min, max]]:
constant[Sieve an emcee clouds by excluding walkers with search variable 'index'
smaller than 'min' or larger than 'max'.]
variable[ret] assign[=] name[data]
if compare[name[min] is_not constant[None]] begin[:]
variable[ret] assign[=] call[name[ret]][tuple[[<ast.Compare object at 0x7da2046239d0>, <ast.Slice object at 0x7da207f01f90>]]]
if compare[name[max] is_not constant[None]] begin[:]
variable[ret] assign[=] call[name[ret]][tuple[[<ast.Compare object at 0x7da212d41a80>, <ast.Slice object at 0x7da212db5060>]]]
return[name[ret]] | keyword[def] identifier[cull] ( identifier[data] , identifier[index] , identifier[min] = keyword[None] , identifier[max] = keyword[None] ):
literal[string]
identifier[ret] = identifier[data]
keyword[if] identifier[min] keyword[is] keyword[not] keyword[None] :
identifier[ret] = identifier[ret] [ identifier[ret] [:, identifier[index] ]> identifier[min] ,:]
keyword[if] identifier[max] keyword[is] keyword[not] keyword[None] :
identifier[ret] = identifier[ret] [ identifier[ret] [:, identifier[index] ]< identifier[max] ,:]
keyword[return] identifier[ret] | def cull(data, index, min=None, max=None):
"""Sieve an emcee clouds by excluding walkers with search variable 'index'
smaller than 'min' or larger than 'max'."""
ret = data
if min is not None:
ret = ret[ret[:, index] > min, :] # depends on [control=['if'], data=['min']]
if max is not None:
ret = ret[ret[:, index] < max, :] # depends on [control=['if'], data=['max']]
return ret |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.