code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def name_for_number(numobj, lang, script=None, region=None): """Returns a carrier name for the given PhoneNumber object, in the language provided. The carrier name is the one the number was originally allocated to, however if the country supports mobile number portability the number might not belong to the returned carrier anymore. If no mapping is found an empty string is returned. This function explicitly checks the validity of the number passed in Arguments: numobj -- The PhoneNumber object for which we want to get a carrier name. lang -- A 2-letter lowercase ISO 639-1 language code for the language in which the description should be returned (e.g. "en") script -- A 4-letter titlecase (first letter uppercase, rest lowercase) ISO script code as defined in ISO 15924, separated by an underscore (e.g. "Hant") region -- A 2-letter uppercase ISO 3166-1 country code (e.g. "GB") Returns a carrier name in the given language code, for the given phone number, or an empty string if no description is available. """ ntype = number_type(numobj) if _is_mobile(ntype): return name_for_valid_number(numobj, lang, script, region) return U_EMPTY_STRING
def function[name_for_number, parameter[numobj, lang, script, region]]: constant[Returns a carrier name for the given PhoneNumber object, in the language provided. The carrier name is the one the number was originally allocated to, however if the country supports mobile number portability the number might not belong to the returned carrier anymore. If no mapping is found an empty string is returned. This function explicitly checks the validity of the number passed in Arguments: numobj -- The PhoneNumber object for which we want to get a carrier name. lang -- A 2-letter lowercase ISO 639-1 language code for the language in which the description should be returned (e.g. "en") script -- A 4-letter titlecase (first letter uppercase, rest lowercase) ISO script code as defined in ISO 15924, separated by an underscore (e.g. "Hant") region -- A 2-letter uppercase ISO 3166-1 country code (e.g. "GB") Returns a carrier name in the given language code, for the given phone number, or an empty string if no description is available. ] variable[ntype] assign[=] call[name[number_type], parameter[name[numobj]]] if call[name[_is_mobile], parameter[name[ntype]]] begin[:] return[call[name[name_for_valid_number], parameter[name[numobj], name[lang], name[script], name[region]]]] return[name[U_EMPTY_STRING]]
keyword[def] identifier[name_for_number] ( identifier[numobj] , identifier[lang] , identifier[script] = keyword[None] , identifier[region] = keyword[None] ): literal[string] identifier[ntype] = identifier[number_type] ( identifier[numobj] ) keyword[if] identifier[_is_mobile] ( identifier[ntype] ): keyword[return] identifier[name_for_valid_number] ( identifier[numobj] , identifier[lang] , identifier[script] , identifier[region] ) keyword[return] identifier[U_EMPTY_STRING]
def name_for_number(numobj, lang, script=None, region=None): """Returns a carrier name for the given PhoneNumber object, in the language provided. The carrier name is the one the number was originally allocated to, however if the country supports mobile number portability the number might not belong to the returned carrier anymore. If no mapping is found an empty string is returned. This function explicitly checks the validity of the number passed in Arguments: numobj -- The PhoneNumber object for which we want to get a carrier name. lang -- A 2-letter lowercase ISO 639-1 language code for the language in which the description should be returned (e.g. "en") script -- A 4-letter titlecase (first letter uppercase, rest lowercase) ISO script code as defined in ISO 15924, separated by an underscore (e.g. "Hant") region -- A 2-letter uppercase ISO 3166-1 country code (e.g. "GB") Returns a carrier name in the given language code, for the given phone number, or an empty string if no description is available. """ ntype = number_type(numobj) if _is_mobile(ntype): return name_for_valid_number(numobj, lang, script, region) # depends on [control=['if'], data=[]] return U_EMPTY_STRING
def handleResponseEnd(self): """ Extends handleResponseEnd to not care about the user closing/refreshing their browser before the response is finished. Also calls cacheContent in a thread that we don't care when it finishes. """ try: if not self._finished: reactor.callInThread( self.resource.cacheContent, self.father, self._response, self.buffer ) proxy.ProxyClient.handleResponseEnd(self) except RuntimeError: # because we don't care if the user hits # refresh before the request is done pass
def function[handleResponseEnd, parameter[self]]: constant[ Extends handleResponseEnd to not care about the user closing/refreshing their browser before the response is finished. Also calls cacheContent in a thread that we don't care when it finishes. ] <ast.Try object at 0x7da20e955c00>
keyword[def] identifier[handleResponseEnd] ( identifier[self] ): literal[string] keyword[try] : keyword[if] keyword[not] identifier[self] . identifier[_finished] : identifier[reactor] . identifier[callInThread] ( identifier[self] . identifier[resource] . identifier[cacheContent] , identifier[self] . identifier[father] , identifier[self] . identifier[_response] , identifier[self] . identifier[buffer] ) identifier[proxy] . identifier[ProxyClient] . identifier[handleResponseEnd] ( identifier[self] ) keyword[except] identifier[RuntimeError] : keyword[pass]
def handleResponseEnd(self): """ Extends handleResponseEnd to not care about the user closing/refreshing their browser before the response is finished. Also calls cacheContent in a thread that we don't care when it finishes. """ try: if not self._finished: reactor.callInThread(self.resource.cacheContent, self.father, self._response, self.buffer) # depends on [control=['if'], data=[]] proxy.ProxyClient.handleResponseEnd(self) # depends on [control=['try'], data=[]] except RuntimeError: # because we don't care if the user hits # refresh before the request is done pass # depends on [control=['except'], data=[]]
def ToJson(self): """ Convert object members to a dictionary that can be parsed as JSON. Returns: dict: """ jsn = {} jsn["txid"] = self.Hash.To0xString() jsn["size"] = self.Size() jsn["type"] = TransactionType.ToName(self.Type) jsn["version"] = self.Version jsn["attributes"] = [attr.ToJson() for attr in self.Attributes] jsn["vout"] = [out.ToJson(i) for i, out in enumerate(self.outputs)] jsn["vin"] = [input.ToJson() for input in self.inputs] jsn["sys_fee"] = self.SystemFee().ToNeoJsonString() jsn["net_fee"] = self.NetworkFee().ToNeoJsonString() jsn["scripts"] = [script.ToJson() for script in self.scripts] return jsn
def function[ToJson, parameter[self]]: constant[ Convert object members to a dictionary that can be parsed as JSON. Returns: dict: ] variable[jsn] assign[=] dictionary[[], []] call[name[jsn]][constant[txid]] assign[=] call[name[self].Hash.To0xString, parameter[]] call[name[jsn]][constant[size]] assign[=] call[name[self].Size, parameter[]] call[name[jsn]][constant[type]] assign[=] call[name[TransactionType].ToName, parameter[name[self].Type]] call[name[jsn]][constant[version]] assign[=] name[self].Version call[name[jsn]][constant[attributes]] assign[=] <ast.ListComp object at 0x7da18bc73bb0> call[name[jsn]][constant[vout]] assign[=] <ast.ListComp object at 0x7da18bc705b0> call[name[jsn]][constant[vin]] assign[=] <ast.ListComp object at 0x7da20c6c76a0> call[name[jsn]][constant[sys_fee]] assign[=] call[call[name[self].SystemFee, parameter[]].ToNeoJsonString, parameter[]] call[name[jsn]][constant[net_fee]] assign[=] call[call[name[self].NetworkFee, parameter[]].ToNeoJsonString, parameter[]] call[name[jsn]][constant[scripts]] assign[=] <ast.ListComp object at 0x7da2041d82e0> return[name[jsn]]
keyword[def] identifier[ToJson] ( identifier[self] ): literal[string] identifier[jsn] ={} identifier[jsn] [ literal[string] ]= identifier[self] . identifier[Hash] . identifier[To0xString] () identifier[jsn] [ literal[string] ]= identifier[self] . identifier[Size] () identifier[jsn] [ literal[string] ]= identifier[TransactionType] . identifier[ToName] ( identifier[self] . identifier[Type] ) identifier[jsn] [ literal[string] ]= identifier[self] . identifier[Version] identifier[jsn] [ literal[string] ]=[ identifier[attr] . identifier[ToJson] () keyword[for] identifier[attr] keyword[in] identifier[self] . identifier[Attributes] ] identifier[jsn] [ literal[string] ]=[ identifier[out] . identifier[ToJson] ( identifier[i] ) keyword[for] identifier[i] , identifier[out] keyword[in] identifier[enumerate] ( identifier[self] . identifier[outputs] )] identifier[jsn] [ literal[string] ]=[ identifier[input] . identifier[ToJson] () keyword[for] identifier[input] keyword[in] identifier[self] . identifier[inputs] ] identifier[jsn] [ literal[string] ]= identifier[self] . identifier[SystemFee] (). identifier[ToNeoJsonString] () identifier[jsn] [ literal[string] ]= identifier[self] . identifier[NetworkFee] (). identifier[ToNeoJsonString] () identifier[jsn] [ literal[string] ]=[ identifier[script] . identifier[ToJson] () keyword[for] identifier[script] keyword[in] identifier[self] . identifier[scripts] ] keyword[return] identifier[jsn]
def ToJson(self): """ Convert object members to a dictionary that can be parsed as JSON. Returns: dict: """ jsn = {} jsn['txid'] = self.Hash.To0xString() jsn['size'] = self.Size() jsn['type'] = TransactionType.ToName(self.Type) jsn['version'] = self.Version jsn['attributes'] = [attr.ToJson() for attr in self.Attributes] jsn['vout'] = [out.ToJson(i) for (i, out) in enumerate(self.outputs)] jsn['vin'] = [input.ToJson() for input in self.inputs] jsn['sys_fee'] = self.SystemFee().ToNeoJsonString() jsn['net_fee'] = self.NetworkFee().ToNeoJsonString() jsn['scripts'] = [script.ToJson() for script in self.scripts] return jsn
def parse(self, data): # type: (bytes) -> None ''' Parse the passed in data into a UDF Entity ID. Parameters: data - The data to parse. Returns: Nothing. ''' if self._initialized: raise pycdlibexception.PyCdlibInternalError('UDF Entity ID already initialized') (self.flags, self.identifier, self.suffix) = struct.unpack_from(self.FMT, data, 0) self._initialized = True
def function[parse, parameter[self, data]]: constant[ Parse the passed in data into a UDF Entity ID. Parameters: data - The data to parse. Returns: Nothing. ] if name[self]._initialized begin[:] <ast.Raise object at 0x7da20c6c4a90> <ast.Tuple object at 0x7da20c6c40a0> assign[=] call[name[struct].unpack_from, parameter[name[self].FMT, name[data], constant[0]]] name[self]._initialized assign[=] constant[True]
keyword[def] identifier[parse] ( identifier[self] , identifier[data] ): literal[string] keyword[if] identifier[self] . identifier[_initialized] : keyword[raise] identifier[pycdlibexception] . identifier[PyCdlibInternalError] ( literal[string] ) ( identifier[self] . identifier[flags] , identifier[self] . identifier[identifier] , identifier[self] . identifier[suffix] )= identifier[struct] . identifier[unpack_from] ( identifier[self] . identifier[FMT] , identifier[data] , literal[int] ) identifier[self] . identifier[_initialized] = keyword[True]
def parse(self, data): # type: (bytes) -> None '\n Parse the passed in data into a UDF Entity ID.\n\n Parameters:\n data - The data to parse.\n Returns:\n Nothing.\n ' if self._initialized: raise pycdlibexception.PyCdlibInternalError('UDF Entity ID already initialized') # depends on [control=['if'], data=[]] (self.flags, self.identifier, self.suffix) = struct.unpack_from(self.FMT, data, 0) self._initialized = True
def open(self): """ Called on new websocket connection. """ sess_id = self._get_sess_id() if sess_id: self.application.pc.websockets[self._get_sess_id()] = self self.write_message(json.dumps({"cmd": "status", "status": "open"})) else: self.write_message(json.dumps({"cmd": "error", "error": "Please login", "code": 401}))
def function[open, parameter[self]]: constant[ Called on new websocket connection. ] variable[sess_id] assign[=] call[name[self]._get_sess_id, parameter[]] if name[sess_id] begin[:] call[name[self].application.pc.websockets][call[name[self]._get_sess_id, parameter[]]] assign[=] name[self] call[name[self].write_message, parameter[call[name[json].dumps, parameter[dictionary[[<ast.Constant object at 0x7da20e9619c0>, <ast.Constant object at 0x7da20e9637c0>], [<ast.Constant object at 0x7da20e962110>, <ast.Constant object at 0x7da20e9605b0>]]]]]]
keyword[def] identifier[open] ( identifier[self] ): literal[string] identifier[sess_id] = identifier[self] . identifier[_get_sess_id] () keyword[if] identifier[sess_id] : identifier[self] . identifier[application] . identifier[pc] . identifier[websockets] [ identifier[self] . identifier[_get_sess_id] ()]= identifier[self] identifier[self] . identifier[write_message] ( identifier[json] . identifier[dumps] ({ literal[string] : literal[string] , literal[string] : literal[string] })) keyword[else] : identifier[self] . identifier[write_message] ( identifier[json] . identifier[dumps] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[int] }))
def open(self): """ Called on new websocket connection. """ sess_id = self._get_sess_id() if sess_id: self.application.pc.websockets[self._get_sess_id()] = self self.write_message(json.dumps({'cmd': 'status', 'status': 'open'})) # depends on [control=['if'], data=[]] else: self.write_message(json.dumps({'cmd': 'error', 'error': 'Please login', 'code': 401}))
def evaluate(self): """Evaluate functional value of previous iteration.""" if self.opt['AccurateDFid']: DX = self.reconstruct() W = self.dstep.W S = self.dstep.S else: W = mp_W S = mp_S Xf = mp_Zf Df = mp_Df DX = sl.irfftn(sl.inner( Df[np.newaxis, ...], Xf, axis=self.xstep.cri.axisM+1), self.xstep.cri.Nv, np.array(self.xstep.cri.axisN) + 1) dfd = (np.linalg.norm(W * (DX - S))**2) / 2.0 rl1 = np.sum(np.abs(self.getcoef())) obj = dfd + self.xstep.lmbda*rl1 return (obj, dfd, rl1)
def function[evaluate, parameter[self]]: constant[Evaluate functional value of previous iteration.] if call[name[self].opt][constant[AccurateDFid]] begin[:] variable[DX] assign[=] call[name[self].reconstruct, parameter[]] variable[W] assign[=] name[self].dstep.W variable[S] assign[=] name[self].dstep.S variable[dfd] assign[=] binary_operation[binary_operation[call[name[np].linalg.norm, parameter[binary_operation[name[W] * binary_operation[name[DX] - name[S]]]]] ** constant[2]] / constant[2.0]] variable[rl1] assign[=] call[name[np].sum, parameter[call[name[np].abs, parameter[call[name[self].getcoef, parameter[]]]]]] variable[obj] assign[=] binary_operation[name[dfd] + binary_operation[name[self].xstep.lmbda * name[rl1]]] return[tuple[[<ast.Name object at 0x7da1b06eada0>, <ast.Name object at 0x7da1b06eabf0>, <ast.Name object at 0x7da1b06eacb0>]]]
keyword[def] identifier[evaluate] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[opt] [ literal[string] ]: identifier[DX] = identifier[self] . identifier[reconstruct] () identifier[W] = identifier[self] . identifier[dstep] . identifier[W] identifier[S] = identifier[self] . identifier[dstep] . identifier[S] keyword[else] : identifier[W] = identifier[mp_W] identifier[S] = identifier[mp_S] identifier[Xf] = identifier[mp_Zf] identifier[Df] = identifier[mp_Df] identifier[DX] = identifier[sl] . identifier[irfftn] ( identifier[sl] . identifier[inner] ( identifier[Df] [ identifier[np] . identifier[newaxis] ,...], identifier[Xf] , identifier[axis] = identifier[self] . identifier[xstep] . identifier[cri] . identifier[axisM] + literal[int] ), identifier[self] . identifier[xstep] . identifier[cri] . identifier[Nv] , identifier[np] . identifier[array] ( identifier[self] . identifier[xstep] . identifier[cri] . identifier[axisN] )+ literal[int] ) identifier[dfd] =( identifier[np] . identifier[linalg] . identifier[norm] ( identifier[W] *( identifier[DX] - identifier[S] ))** literal[int] )/ literal[int] identifier[rl1] = identifier[np] . identifier[sum] ( identifier[np] . identifier[abs] ( identifier[self] . identifier[getcoef] ())) identifier[obj] = identifier[dfd] + identifier[self] . identifier[xstep] . identifier[lmbda] * identifier[rl1] keyword[return] ( identifier[obj] , identifier[dfd] , identifier[rl1] )
def evaluate(self): """Evaluate functional value of previous iteration.""" if self.opt['AccurateDFid']: DX = self.reconstruct() W = self.dstep.W S = self.dstep.S # depends on [control=['if'], data=[]] else: W = mp_W S = mp_S Xf = mp_Zf Df = mp_Df DX = sl.irfftn(sl.inner(Df[np.newaxis, ...], Xf, axis=self.xstep.cri.axisM + 1), self.xstep.cri.Nv, np.array(self.xstep.cri.axisN) + 1) dfd = np.linalg.norm(W * (DX - S)) ** 2 / 2.0 rl1 = np.sum(np.abs(self.getcoef())) obj = dfd + self.xstep.lmbda * rl1 return (obj, dfd, rl1)
def get_closest_point_to_line(A, B, P): ''' Find the closest point on a line. This point will be reproducible by a Hue lamp. ''' AP = XYPoint(P.x - A.x, P.y - A.y) AB = XYPoint(B.x - A.x, B.y - A.y) ab2 = AB.x * AB.x + AB.y * AB.y ap_ab = AP.x * AB.x + AP.y * AB.y t = ap_ab / ab2 if t < 0.0: t = 0.0 elif t > 1.0: t = 1.0 return XYPoint(A.x + AB.x * t, A.y + AB.y * t)
def function[get_closest_point_to_line, parameter[A, B, P]]: constant[ Find the closest point on a line. This point will be reproducible by a Hue lamp. ] variable[AP] assign[=] call[name[XYPoint], parameter[binary_operation[name[P].x - name[A].x], binary_operation[name[P].y - name[A].y]]] variable[AB] assign[=] call[name[XYPoint], parameter[binary_operation[name[B].x - name[A].x], binary_operation[name[B].y - name[A].y]]] variable[ab2] assign[=] binary_operation[binary_operation[name[AB].x * name[AB].x] + binary_operation[name[AB].y * name[AB].y]] variable[ap_ab] assign[=] binary_operation[binary_operation[name[AP].x * name[AB].x] + binary_operation[name[AP].y * name[AB].y]] variable[t] assign[=] binary_operation[name[ap_ab] / name[ab2]] if compare[name[t] less[<] constant[0.0]] begin[:] variable[t] assign[=] constant[0.0] return[call[name[XYPoint], parameter[binary_operation[name[A].x + binary_operation[name[AB].x * name[t]]], binary_operation[name[A].y + binary_operation[name[AB].y * name[t]]]]]]
keyword[def] identifier[get_closest_point_to_line] ( identifier[A] , identifier[B] , identifier[P] ): literal[string] identifier[AP] = identifier[XYPoint] ( identifier[P] . identifier[x] - identifier[A] . identifier[x] , identifier[P] . identifier[y] - identifier[A] . identifier[y] ) identifier[AB] = identifier[XYPoint] ( identifier[B] . identifier[x] - identifier[A] . identifier[x] , identifier[B] . identifier[y] - identifier[A] . identifier[y] ) identifier[ab2] = identifier[AB] . identifier[x] * identifier[AB] . identifier[x] + identifier[AB] . identifier[y] * identifier[AB] . identifier[y] identifier[ap_ab] = identifier[AP] . identifier[x] * identifier[AB] . identifier[x] + identifier[AP] . identifier[y] * identifier[AB] . identifier[y] identifier[t] = identifier[ap_ab] / identifier[ab2] keyword[if] identifier[t] < literal[int] : identifier[t] = literal[int] keyword[elif] identifier[t] > literal[int] : identifier[t] = literal[int] keyword[return] identifier[XYPoint] ( identifier[A] . identifier[x] + identifier[AB] . identifier[x] * identifier[t] , identifier[A] . identifier[y] + identifier[AB] . identifier[y] * identifier[t] )
def get_closest_point_to_line(A, B, P): """ Find the closest point on a line. This point will be reproducible by a Hue lamp. """ AP = XYPoint(P.x - A.x, P.y - A.y) AB = XYPoint(B.x - A.x, B.y - A.y) ab2 = AB.x * AB.x + AB.y * AB.y ap_ab = AP.x * AB.x + AP.y * AB.y t = ap_ab / ab2 if t < 0.0: t = 0.0 # depends on [control=['if'], data=['t']] elif t > 1.0: t = 1.0 # depends on [control=['if'], data=['t']] return XYPoint(A.x + AB.x * t, A.y + AB.y * t)
def get_type_size(self, type): """ Get the size of this type for converting a hex string to the type. Return 0 if the size is not known. """ typeobj = self.get_type(type) if hasattr(typeobj, 'size'): return typeobj.size() return 0
def function[get_type_size, parameter[self, type]]: constant[ Get the size of this type for converting a hex string to the type. Return 0 if the size is not known. ] variable[typeobj] assign[=] call[name[self].get_type, parameter[name[type]]] if call[name[hasattr], parameter[name[typeobj], constant[size]]] begin[:] return[call[name[typeobj].size, parameter[]]] return[constant[0]]
keyword[def] identifier[get_type_size] ( identifier[self] , identifier[type] ): literal[string] identifier[typeobj] = identifier[self] . identifier[get_type] ( identifier[type] ) keyword[if] identifier[hasattr] ( identifier[typeobj] , literal[string] ): keyword[return] identifier[typeobj] . identifier[size] () keyword[return] literal[int]
def get_type_size(self, type): """ Get the size of this type for converting a hex string to the type. Return 0 if the size is not known. """ typeobj = self.get_type(type) if hasattr(typeobj, 'size'): return typeobj.size() # depends on [control=['if'], data=[]] return 0
def add_permissions(self, grp_name, resource, permissions): """ Add additional permissions for the group associated with the given resource. Args: grp_name (string): Name of group. resource (intern.resource.boss.BossResource): Identifies which data model object to operate on. permissions (list): List of permissions to add to the given resource. Raises: requests.HTTPError on failure. """ self.service.add_permissions( grp_name, resource, permissions, self.url_prefix, self.auth, self.session, self.session_send_opts)
def function[add_permissions, parameter[self, grp_name, resource, permissions]]: constant[ Add additional permissions for the group associated with the given resource. Args: grp_name (string): Name of group. resource (intern.resource.boss.BossResource): Identifies which data model object to operate on. permissions (list): List of permissions to add to the given resource. Raises: requests.HTTPError on failure. ] call[name[self].service.add_permissions, parameter[name[grp_name], name[resource], name[permissions], name[self].url_prefix, name[self].auth, name[self].session, name[self].session_send_opts]]
keyword[def] identifier[add_permissions] ( identifier[self] , identifier[grp_name] , identifier[resource] , identifier[permissions] ): literal[string] identifier[self] . identifier[service] . identifier[add_permissions] ( identifier[grp_name] , identifier[resource] , identifier[permissions] , identifier[self] . identifier[url_prefix] , identifier[self] . identifier[auth] , identifier[self] . identifier[session] , identifier[self] . identifier[session_send_opts] )
def add_permissions(self, grp_name, resource, permissions): """ Add additional permissions for the group associated with the given resource. Args: grp_name (string): Name of group. resource (intern.resource.boss.BossResource): Identifies which data model object to operate on. permissions (list): List of permissions to add to the given resource. Raises: requests.HTTPError on failure. """ self.service.add_permissions(grp_name, resource, permissions, self.url_prefix, self.auth, self.session, self.session_send_opts)
def find_container_traits(cls_or_string): """ Find the container traits type of a declaration. Args: cls_or_string (str | declarations.declaration_t): a string Returns: declarations.container_traits: a container traits """ if utils.is_str(cls_or_string): if not templates.is_instantiation(cls_or_string): return None name = templates.name(cls_or_string) if name.startswith('std::'): name = name[len('std::'):] if name.startswith('std::tr1::'): name = name[len('std::tr1::'):] for cls_traits in all_container_traits: if cls_traits.name() == name: return cls_traits else: if isinstance(cls_or_string, class_declaration.class_types): # Look in the cache. if cls_or_string.cache.container_traits is not None: return cls_or_string.cache.container_traits # Look for a container traits for cls_traits in all_container_traits: if cls_traits.is_my_case(cls_or_string): # Store in the cache if isinstance(cls_or_string, class_declaration.class_types): cls_or_string.cache.container_traits = cls_traits return cls_traits
def function[find_container_traits, parameter[cls_or_string]]: constant[ Find the container traits type of a declaration. Args: cls_or_string (str | declarations.declaration_t): a string Returns: declarations.container_traits: a container traits ] if call[name[utils].is_str, parameter[name[cls_or_string]]] begin[:] if <ast.UnaryOp object at 0x7da20c76f490> begin[:] return[constant[None]] variable[name] assign[=] call[name[templates].name, parameter[name[cls_or_string]]] if call[name[name].startswith, parameter[constant[std::]]] begin[:] variable[name] assign[=] call[name[name]][<ast.Slice object at 0x7da204622fb0>] if call[name[name].startswith, parameter[constant[std::tr1::]]] begin[:] variable[name] assign[=] call[name[name]][<ast.Slice object at 0x7da2041da170>] for taget[name[cls_traits]] in starred[name[all_container_traits]] begin[:] if compare[call[name[cls_traits].name, parameter[]] equal[==] name[name]] begin[:] return[name[cls_traits]]
keyword[def] identifier[find_container_traits] ( identifier[cls_or_string] ): literal[string] keyword[if] identifier[utils] . identifier[is_str] ( identifier[cls_or_string] ): keyword[if] keyword[not] identifier[templates] . identifier[is_instantiation] ( identifier[cls_or_string] ): keyword[return] keyword[None] identifier[name] = identifier[templates] . identifier[name] ( identifier[cls_or_string] ) keyword[if] identifier[name] . identifier[startswith] ( literal[string] ): identifier[name] = identifier[name] [ identifier[len] ( literal[string] ):] keyword[if] identifier[name] . identifier[startswith] ( literal[string] ): identifier[name] = identifier[name] [ identifier[len] ( literal[string] ):] keyword[for] identifier[cls_traits] keyword[in] identifier[all_container_traits] : keyword[if] identifier[cls_traits] . identifier[name] ()== identifier[name] : keyword[return] identifier[cls_traits] keyword[else] : keyword[if] identifier[isinstance] ( identifier[cls_or_string] , identifier[class_declaration] . identifier[class_types] ): keyword[if] identifier[cls_or_string] . identifier[cache] . identifier[container_traits] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[cls_or_string] . identifier[cache] . identifier[container_traits] keyword[for] identifier[cls_traits] keyword[in] identifier[all_container_traits] : keyword[if] identifier[cls_traits] . identifier[is_my_case] ( identifier[cls_or_string] ): keyword[if] identifier[isinstance] ( identifier[cls_or_string] , identifier[class_declaration] . identifier[class_types] ): identifier[cls_or_string] . identifier[cache] . identifier[container_traits] = identifier[cls_traits] keyword[return] identifier[cls_traits]
def find_container_traits(cls_or_string): """ Find the container traits type of a declaration. Args: cls_or_string (str | declarations.declaration_t): a string Returns: declarations.container_traits: a container traits """ if utils.is_str(cls_or_string): if not templates.is_instantiation(cls_or_string): return None # depends on [control=['if'], data=[]] name = templates.name(cls_or_string) if name.startswith('std::'): name = name[len('std::'):] # depends on [control=['if'], data=[]] if name.startswith('std::tr1::'): name = name[len('std::tr1::'):] # depends on [control=['if'], data=[]] for cls_traits in all_container_traits: if cls_traits.name() == name: return cls_traits # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['cls_traits']] # depends on [control=['if'], data=[]] else: if isinstance(cls_or_string, class_declaration.class_types): # Look in the cache. if cls_or_string.cache.container_traits is not None: return cls_or_string.cache.container_traits # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # Look for a container traits for cls_traits in all_container_traits: if cls_traits.is_my_case(cls_or_string): # Store in the cache if isinstance(cls_or_string, class_declaration.class_types): cls_or_string.cache.container_traits = cls_traits # depends on [control=['if'], data=[]] return cls_traits # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['cls_traits']]
def get_query_result(self, query_object): """Returns a pandas dataframe based on the query object""" # Here, we assume that all the queries will use the same datasource, which is # is a valid assumption for current setting. In a long term, we may or maynot # support multiple queries from different data source. timestamp_format = None if self.datasource.type == 'table': dttm_col = self.datasource.get_col(query_object.granularity) if dttm_col: timestamp_format = dttm_col.python_date_format # The datasource here can be different backend but the interface is common result = self.datasource.query(query_object.to_dict()) df = result.df # Transform the timestamp we received from database to pandas supported # datetime format. If no python_date_format is specified, the pattern will # be considered as the default ISO date format # If the datetime format is unix, the parse will use the corresponding # parsing logic if df is not None and not df.empty: if DTTM_ALIAS in df.columns: if timestamp_format in ('epoch_s', 'epoch_ms'): # Column has already been formatted as a timestamp. df[DTTM_ALIAS] = df[DTTM_ALIAS].apply(pd.Timestamp) else: df[DTTM_ALIAS] = pd.to_datetime( df[DTTM_ALIAS], utc=False, format=timestamp_format) if self.datasource.offset: df[DTTM_ALIAS] += timedelta(hours=self.datasource.offset) df[DTTM_ALIAS] += query_object.time_shift if self.enforce_numerical_metrics: self.df_metrics_to_num(df, query_object) df.replace([np.inf, -np.inf], np.nan) return { 'query': result.query, 'status': result.status, 'error_message': result.error_message, 'df': df, }
def function[get_query_result, parameter[self, query_object]]: constant[Returns a pandas dataframe based on the query object] variable[timestamp_format] assign[=] constant[None] if compare[name[self].datasource.type equal[==] constant[table]] begin[:] variable[dttm_col] assign[=] call[name[self].datasource.get_col, parameter[name[query_object].granularity]] if name[dttm_col] begin[:] variable[timestamp_format] assign[=] name[dttm_col].python_date_format variable[result] assign[=] call[name[self].datasource.query, parameter[call[name[query_object].to_dict, parameter[]]]] variable[df] assign[=] name[result].df if <ast.BoolOp object at 0x7da1b1ef93f0> begin[:] if compare[name[DTTM_ALIAS] in name[df].columns] begin[:] if compare[name[timestamp_format] in tuple[[<ast.Constant object at 0x7da1b1eb4760>, <ast.Constant object at 0x7da1b1eb4c70>]]] begin[:] call[name[df]][name[DTTM_ALIAS]] assign[=] call[call[name[df]][name[DTTM_ALIAS]].apply, parameter[name[pd].Timestamp]] if name[self].datasource.offset begin[:] <ast.AugAssign object at 0x7da1b20e5db0> <ast.AugAssign object at 0x7da1b20e7670> if name[self].enforce_numerical_metrics begin[:] call[name[self].df_metrics_to_num, parameter[name[df], name[query_object]]] call[name[df].replace, parameter[list[[<ast.Attribute object at 0x7da1b20e5990>, <ast.UnaryOp object at 0x7da1b20e6710>]], name[np].nan]] return[dictionary[[<ast.Constant object at 0x7da1b20e6ce0>, <ast.Constant object at 0x7da1b20e4580>, <ast.Constant object at 0x7da1b20e55a0>, <ast.Constant object at 0x7da1b20e66b0>], [<ast.Attribute object at 0x7da1b20e7640>, <ast.Attribute object at 0x7da1b20e4760>, <ast.Attribute object at 0x7da1b20e5840>, <ast.Name object at 0x7da1b20e4880>]]]
keyword[def] identifier[get_query_result] ( identifier[self] , identifier[query_object] ): literal[string] identifier[timestamp_format] = keyword[None] keyword[if] identifier[self] . identifier[datasource] . identifier[type] == literal[string] : identifier[dttm_col] = identifier[self] . identifier[datasource] . identifier[get_col] ( identifier[query_object] . identifier[granularity] ) keyword[if] identifier[dttm_col] : identifier[timestamp_format] = identifier[dttm_col] . identifier[python_date_format] identifier[result] = identifier[self] . identifier[datasource] . identifier[query] ( identifier[query_object] . identifier[to_dict] ()) identifier[df] = identifier[result] . identifier[df] keyword[if] identifier[df] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[df] . identifier[empty] : keyword[if] identifier[DTTM_ALIAS] keyword[in] identifier[df] . identifier[columns] : keyword[if] identifier[timestamp_format] keyword[in] ( literal[string] , literal[string] ): identifier[df] [ identifier[DTTM_ALIAS] ]= identifier[df] [ identifier[DTTM_ALIAS] ]. identifier[apply] ( identifier[pd] . identifier[Timestamp] ) keyword[else] : identifier[df] [ identifier[DTTM_ALIAS] ]= identifier[pd] . identifier[to_datetime] ( identifier[df] [ identifier[DTTM_ALIAS] ], identifier[utc] = keyword[False] , identifier[format] = identifier[timestamp_format] ) keyword[if] identifier[self] . identifier[datasource] . identifier[offset] : identifier[df] [ identifier[DTTM_ALIAS] ]+= identifier[timedelta] ( identifier[hours] = identifier[self] . identifier[datasource] . identifier[offset] ) identifier[df] [ identifier[DTTM_ALIAS] ]+= identifier[query_object] . identifier[time_shift] keyword[if] identifier[self] . identifier[enforce_numerical_metrics] : identifier[self] . identifier[df_metrics_to_num] ( identifier[df] , identifier[query_object] ) identifier[df] . identifier[replace] ([ identifier[np] . identifier[inf] ,- identifier[np] . identifier[inf] ], identifier[np] . identifier[nan] ) keyword[return] { literal[string] : identifier[result] . identifier[query] , literal[string] : identifier[result] . identifier[status] , literal[string] : identifier[result] . identifier[error_message] , literal[string] : identifier[df] , }
def get_query_result(self, query_object): """Returns a pandas dataframe based on the query object""" # Here, we assume that all the queries will use the same datasource, which is # is a valid assumption for current setting. In a long term, we may or maynot # support multiple queries from different data source. timestamp_format = None if self.datasource.type == 'table': dttm_col = self.datasource.get_col(query_object.granularity) if dttm_col: timestamp_format = dttm_col.python_date_format # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # The datasource here can be different backend but the interface is common result = self.datasource.query(query_object.to_dict()) df = result.df # Transform the timestamp we received from database to pandas supported # datetime format. If no python_date_format is specified, the pattern will # be considered as the default ISO date format # If the datetime format is unix, the parse will use the corresponding # parsing logic if df is not None and (not df.empty): if DTTM_ALIAS in df.columns: if timestamp_format in ('epoch_s', 'epoch_ms'): # Column has already been formatted as a timestamp. df[DTTM_ALIAS] = df[DTTM_ALIAS].apply(pd.Timestamp) # depends on [control=['if'], data=[]] else: df[DTTM_ALIAS] = pd.to_datetime(df[DTTM_ALIAS], utc=False, format=timestamp_format) if self.datasource.offset: df[DTTM_ALIAS] += timedelta(hours=self.datasource.offset) # depends on [control=['if'], data=[]] df[DTTM_ALIAS] += query_object.time_shift # depends on [control=['if'], data=['DTTM_ALIAS']] if self.enforce_numerical_metrics: self.df_metrics_to_num(df, query_object) # depends on [control=['if'], data=[]] df.replace([np.inf, -np.inf], np.nan) # depends on [control=['if'], data=[]] return {'query': result.query, 'status': result.status, 'error_message': result.error_message, 'df': df}
def get_country_by_id(self, country_id) -> 'Country': """ Gets a country in this coalition by its ID Args: country_id: country Id Returns: Country """ VALID_POSITIVE_INT.validate(country_id, 'get_country_by_id', exc=ValueError) if country_id not in self._countries_by_id.keys(): for country in self.countries: if country.country_id == country_id: return country raise ValueError(country_id) else: return self._countries_by_id[country_id]
def function[get_country_by_id, parameter[self, country_id]]: constant[ Gets a country in this coalition by its ID Args: country_id: country Id Returns: Country ] call[name[VALID_POSITIVE_INT].validate, parameter[name[country_id], constant[get_country_by_id]]] if compare[name[country_id] <ast.NotIn object at 0x7da2590d7190> call[name[self]._countries_by_id.keys, parameter[]]] begin[:] for taget[name[country]] in starred[name[self].countries] begin[:] if compare[name[country].country_id equal[==] name[country_id]] begin[:] return[name[country]] <ast.Raise object at 0x7da1b14558d0>
keyword[def] identifier[get_country_by_id] ( identifier[self] , identifier[country_id] )-> literal[string] : literal[string] identifier[VALID_POSITIVE_INT] . identifier[validate] ( identifier[country_id] , literal[string] , identifier[exc] = identifier[ValueError] ) keyword[if] identifier[country_id] keyword[not] keyword[in] identifier[self] . identifier[_countries_by_id] . identifier[keys] (): keyword[for] identifier[country] keyword[in] identifier[self] . identifier[countries] : keyword[if] identifier[country] . identifier[country_id] == identifier[country_id] : keyword[return] identifier[country] keyword[raise] identifier[ValueError] ( identifier[country_id] ) keyword[else] : keyword[return] identifier[self] . identifier[_countries_by_id] [ identifier[country_id] ]
def get_country_by_id(self, country_id) -> 'Country': """ Gets a country in this coalition by its ID Args: country_id: country Id Returns: Country """ VALID_POSITIVE_INT.validate(country_id, 'get_country_by_id', exc=ValueError) if country_id not in self._countries_by_id.keys(): for country in self.countries: if country.country_id == country_id: return country # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['country']] raise ValueError(country_id) # depends on [control=['if'], data=['country_id']] else: return self._countries_by_id[country_id]
def dict(self, name, key_caps=False, value_caps=False): ''' Returns a JSON dict @key_caps: Converts all dictionary keys to uppercase @value_caps: Converts all dictionary values to uppercase @return: JSON item (may be a variable, list or dictionary) ''' # Invalid Dictionary if not isinstance(self.json_data[name], dict): raise InvalidDictionaryException # Convert key and/or values of dictionary to uppercase output = {} for key, value in self.json_data[name].items(): output[key.upper() if key_caps else key] = value.upper() if value_caps else value return output
def function[dict, parameter[self, name, key_caps, value_caps]]: constant[ Returns a JSON dict @key_caps: Converts all dictionary keys to uppercase @value_caps: Converts all dictionary values to uppercase @return: JSON item (may be a variable, list or dictionary) ] if <ast.UnaryOp object at 0x7da1b11a3f40> begin[:] <ast.Raise object at 0x7da1b11a0760> variable[output] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da1b11a25c0>, <ast.Name object at 0x7da1b11a2080>]]] in starred[call[call[name[self].json_data][name[name]].items, parameter[]]] begin[:] call[name[output]][<ast.IfExp object at 0x7da1b11a02b0>] assign[=] <ast.IfExp object at 0x7da1b11a1450> return[name[output]]
keyword[def] identifier[dict] ( identifier[self] , identifier[name] , identifier[key_caps] = keyword[False] , identifier[value_caps] = keyword[False] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[self] . identifier[json_data] [ identifier[name] ], identifier[dict] ): keyword[raise] identifier[InvalidDictionaryException] identifier[output] ={} keyword[for] identifier[key] , identifier[value] keyword[in] identifier[self] . identifier[json_data] [ identifier[name] ]. identifier[items] (): identifier[output] [ identifier[key] . identifier[upper] () keyword[if] identifier[key_caps] keyword[else] identifier[key] ]= identifier[value] . identifier[upper] () keyword[if] identifier[value_caps] keyword[else] identifier[value] keyword[return] identifier[output]
def dict(self, name, key_caps=False, value_caps=False): """ Returns a JSON dict @key_caps: Converts all dictionary keys to uppercase @value_caps: Converts all dictionary values to uppercase @return: JSON item (may be a variable, list or dictionary) """ # Invalid Dictionary if not isinstance(self.json_data[name], dict): raise InvalidDictionaryException # depends on [control=['if'], data=[]] # Convert key and/or values of dictionary to uppercase output = {} for (key, value) in self.json_data[name].items(): output[key.upper() if key_caps else key] = value.upper() if value_caps else value # depends on [control=['for'], data=[]] return output
def __update(self, task_source): """ Recheck next start of tasks from the given one only :param task_source: source to check :return: None """ next_start = task_source.next_start() if next_start is not None: if next_start.tzinfo is None or next_start.tzinfo != timezone.utc: raise ValueError('Invalid timezone information') if self.__next_start is None or next_start < self.__next_start: self.__next_start = next_start self.__next_sources = [task_source] elif next_start == self.__next_start: self.__next_sources.append(task_source)
def function[__update, parameter[self, task_source]]: constant[ Recheck next start of tasks from the given one only :param task_source: source to check :return: None ] variable[next_start] assign[=] call[name[task_source].next_start, parameter[]] if compare[name[next_start] is_not constant[None]] begin[:] if <ast.BoolOp object at 0x7da18f09ee30> begin[:] <ast.Raise object at 0x7da18f09e050> if <ast.BoolOp object at 0x7da18f09e770> begin[:] name[self].__next_start assign[=] name[next_start] name[self].__next_sources assign[=] list[[<ast.Name object at 0x7da18eb57010>]]
keyword[def] identifier[__update] ( identifier[self] , identifier[task_source] ): literal[string] identifier[next_start] = identifier[task_source] . identifier[next_start] () keyword[if] identifier[next_start] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[next_start] . identifier[tzinfo] keyword[is] keyword[None] keyword[or] identifier[next_start] . identifier[tzinfo] != identifier[timezone] . identifier[utc] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[self] . identifier[__next_start] keyword[is] keyword[None] keyword[or] identifier[next_start] < identifier[self] . identifier[__next_start] : identifier[self] . identifier[__next_start] = identifier[next_start] identifier[self] . identifier[__next_sources] =[ identifier[task_source] ] keyword[elif] identifier[next_start] == identifier[self] . identifier[__next_start] : identifier[self] . identifier[__next_sources] . identifier[append] ( identifier[task_source] )
def __update(self, task_source): """ Recheck next start of tasks from the given one only :param task_source: source to check :return: None """ next_start = task_source.next_start() if next_start is not None: if next_start.tzinfo is None or next_start.tzinfo != timezone.utc: raise ValueError('Invalid timezone information') # depends on [control=['if'], data=[]] if self.__next_start is None or next_start < self.__next_start: self.__next_start = next_start self.__next_sources = [task_source] # depends on [control=['if'], data=[]] elif next_start == self.__next_start: self.__next_sources.append(task_source) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['next_start']]
def readlink(path): ''' Equivalent to os.readlink() ''' if six.PY3 or not salt.utils.platform.is_windows(): return os.readlink(path) if not HAS_WIN32FILE: log.error('Cannot read %s, missing required modules', path) reparse_data = _get_reparse_data(path) if not reparse_data: # Reproduce *NIX behavior when os.readlink is performed on a path that # is not a symbolic link. raise OSError(errno.EINVAL, 'Invalid argument: \'{0}\''.format(path)) # REPARSE_DATA_BUFFER structure - see # http://msdn.microsoft.com/en-us/library/ff552012.aspx # parse the structure header to work out which type of reparse point this is header_parser = struct.Struct('L') ReparseTag, = header_parser.unpack(reparse_data[:header_parser.size]) # http://msdn.microsoft.com/en-us/library/windows/desktop/aa365511.aspx if not ReparseTag & 0xA000FFFF == 0xA000000C: raise OSError( errno.EINVAL, '{0} is not a symlink, but another type of reparse point ' '(0x{0:X}).'.format(ReparseTag) ) # parse as a symlink reparse point structure (the structure for other # reparse points is different) data_parser = struct.Struct('LHHHHHHL') ReparseTag, ReparseDataLength, Reserved, SubstituteNameOffset, \ SubstituteNameLength, PrintNameOffset, \ PrintNameLength, Flags = data_parser.unpack(reparse_data[:data_parser.size]) path_buffer_offset = data_parser.size absolute_substitute_name_offset = path_buffer_offset + SubstituteNameOffset target_bytes = reparse_data[absolute_substitute_name_offset:absolute_substitute_name_offset+SubstituteNameLength] target = target_bytes.decode('UTF-16') if target.startswith('\\??\\'): target = target[4:] try: # comes out in 8.3 form; convert it to LFN to make it look nicer target = win32file.GetLongPathName(target) except pywinerror as exc: # If target is on a UNC share, the decoded target will be in the format # "UNC\hostanme\sharename\additional\subdirs\under\share". So, in # these cases, return the target path in the proper UNC path format. if target.startswith('UNC\\'): return re.sub(r'^UNC\\+', r'\\\\', target) # if file is not found (i.e. bad symlink), return it anyway like on *nix if exc.winerror == 2: return target raise return target
def function[readlink, parameter[path]]: constant[ Equivalent to os.readlink() ] if <ast.BoolOp object at 0x7da18dc9bc40> begin[:] return[call[name[os].readlink, parameter[name[path]]]] if <ast.UnaryOp object at 0x7da18dc98cd0> begin[:] call[name[log].error, parameter[constant[Cannot read %s, missing required modules], name[path]]] variable[reparse_data] assign[=] call[name[_get_reparse_data], parameter[name[path]]] if <ast.UnaryOp object at 0x7da18dc98d30> begin[:] <ast.Raise object at 0x7da18dc98940> variable[header_parser] assign[=] call[name[struct].Struct, parameter[constant[L]]] <ast.Tuple object at 0x7da18dc9b7f0> assign[=] call[name[header_parser].unpack, parameter[call[name[reparse_data]][<ast.Slice object at 0x7da18dc9b670>]]] if <ast.UnaryOp object at 0x7da18dc9ae00> begin[:] <ast.Raise object at 0x7da18dc98970> variable[data_parser] assign[=] call[name[struct].Struct, parameter[constant[LHHHHHHL]]] <ast.Tuple object at 0x7da18dc99480> assign[=] call[name[data_parser].unpack, parameter[call[name[reparse_data]][<ast.Slice object at 0x7da18dc9b4f0>]]] variable[path_buffer_offset] assign[=] name[data_parser].size variable[absolute_substitute_name_offset] assign[=] binary_operation[name[path_buffer_offset] + name[SubstituteNameOffset]] variable[target_bytes] assign[=] call[name[reparse_data]][<ast.Slice object at 0x7da18dc9afe0>] variable[target] assign[=] call[name[target_bytes].decode, parameter[constant[UTF-16]]] if call[name[target].startswith, parameter[constant[\??\]]] begin[:] variable[target] assign[=] call[name[target]][<ast.Slice object at 0x7da18dc9ac20>] <ast.Try object at 0x7da18dc99a80> return[name[target]]
keyword[def] identifier[readlink] ( identifier[path] ): literal[string] keyword[if] identifier[six] . identifier[PY3] keyword[or] keyword[not] identifier[salt] . identifier[utils] . identifier[platform] . identifier[is_windows] (): keyword[return] identifier[os] . identifier[readlink] ( identifier[path] ) keyword[if] keyword[not] identifier[HAS_WIN32FILE] : identifier[log] . identifier[error] ( literal[string] , identifier[path] ) identifier[reparse_data] = identifier[_get_reparse_data] ( identifier[path] ) keyword[if] keyword[not] identifier[reparse_data] : keyword[raise] identifier[OSError] ( identifier[errno] . identifier[EINVAL] , literal[string] . identifier[format] ( identifier[path] )) identifier[header_parser] = identifier[struct] . identifier[Struct] ( literal[string] ) identifier[ReparseTag] ,= identifier[header_parser] . identifier[unpack] ( identifier[reparse_data] [: identifier[header_parser] . identifier[size] ]) keyword[if] keyword[not] identifier[ReparseTag] & literal[int] == literal[int] : keyword[raise] identifier[OSError] ( identifier[errno] . identifier[EINVAL] , literal[string] literal[string] . identifier[format] ( identifier[ReparseTag] ) ) identifier[data_parser] = identifier[struct] . identifier[Struct] ( literal[string] ) identifier[ReparseTag] , identifier[ReparseDataLength] , identifier[Reserved] , identifier[SubstituteNameOffset] , identifier[SubstituteNameLength] , identifier[PrintNameOffset] , identifier[PrintNameLength] , identifier[Flags] = identifier[data_parser] . identifier[unpack] ( identifier[reparse_data] [: identifier[data_parser] . identifier[size] ]) identifier[path_buffer_offset] = identifier[data_parser] . identifier[size] identifier[absolute_substitute_name_offset] = identifier[path_buffer_offset] + identifier[SubstituteNameOffset] identifier[target_bytes] = identifier[reparse_data] [ identifier[absolute_substitute_name_offset] : identifier[absolute_substitute_name_offset] + identifier[SubstituteNameLength] ] identifier[target] = identifier[target_bytes] . identifier[decode] ( literal[string] ) keyword[if] identifier[target] . identifier[startswith] ( literal[string] ): identifier[target] = identifier[target] [ literal[int] :] keyword[try] : identifier[target] = identifier[win32file] . identifier[GetLongPathName] ( identifier[target] ) keyword[except] identifier[pywinerror] keyword[as] identifier[exc] : keyword[if] identifier[target] . identifier[startswith] ( literal[string] ): keyword[return] identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[target] ) keyword[if] identifier[exc] . identifier[winerror] == literal[int] : keyword[return] identifier[target] keyword[raise] keyword[return] identifier[target]
def readlink(path): """ Equivalent to os.readlink() """ if six.PY3 or not salt.utils.platform.is_windows(): return os.readlink(path) # depends on [control=['if'], data=[]] if not HAS_WIN32FILE: log.error('Cannot read %s, missing required modules', path) # depends on [control=['if'], data=[]] reparse_data = _get_reparse_data(path) if not reparse_data: # Reproduce *NIX behavior when os.readlink is performed on a path that # is not a symbolic link. raise OSError(errno.EINVAL, "Invalid argument: '{0}'".format(path)) # depends on [control=['if'], data=[]] # REPARSE_DATA_BUFFER structure - see # http://msdn.microsoft.com/en-us/library/ff552012.aspx # parse the structure header to work out which type of reparse point this is header_parser = struct.Struct('L') (ReparseTag,) = header_parser.unpack(reparse_data[:header_parser.size]) # http://msdn.microsoft.com/en-us/library/windows/desktop/aa365511.aspx if not ReparseTag & 2684420095 == 2684354572: raise OSError(errno.EINVAL, '{0} is not a symlink, but another type of reparse point (0x{0:X}).'.format(ReparseTag)) # depends on [control=['if'], data=[]] # parse as a symlink reparse point structure (the structure for other # reparse points is different) data_parser = struct.Struct('LHHHHHHL') (ReparseTag, ReparseDataLength, Reserved, SubstituteNameOffset, SubstituteNameLength, PrintNameOffset, PrintNameLength, Flags) = data_parser.unpack(reparse_data[:data_parser.size]) path_buffer_offset = data_parser.size absolute_substitute_name_offset = path_buffer_offset + SubstituteNameOffset target_bytes = reparse_data[absolute_substitute_name_offset:absolute_substitute_name_offset + SubstituteNameLength] target = target_bytes.decode('UTF-16') if target.startswith('\\??\\'): target = target[4:] # depends on [control=['if'], data=[]] try: # comes out in 8.3 form; convert it to LFN to make it look nicer target = win32file.GetLongPathName(target) # depends on [control=['try'], data=[]] except pywinerror as exc: # If target is on a UNC share, the decoded target will be in the format # "UNC\hostanme\sharename\additional\subdirs\under\share". So, in # these cases, return the target path in the proper UNC path format. if target.startswith('UNC\\'): return re.sub('^UNC\\\\+', '\\\\\\\\', target) # depends on [control=['if'], data=[]] # if file is not found (i.e. bad symlink), return it anyway like on *nix if exc.winerror == 2: return target # depends on [control=['if'], data=[]] raise # depends on [control=['except'], data=['exc']] return target
def clear(self): """ Clear any existing values from this queue. """ logger.debug('Clearing queue: "%s"', self.name) return self.redis.delete(self.name)
def function[clear, parameter[self]]: constant[ Clear any existing values from this queue. ] call[name[logger].debug, parameter[constant[Clearing queue: "%s"], name[self].name]] return[call[name[self].redis.delete, parameter[name[self].name]]]
keyword[def] identifier[clear] ( identifier[self] ): literal[string] identifier[logger] . identifier[debug] ( literal[string] , identifier[self] . identifier[name] ) keyword[return] identifier[self] . identifier[redis] . identifier[delete] ( identifier[self] . identifier[name] )
def clear(self): """ Clear any existing values from this queue. """ logger.debug('Clearing queue: "%s"', self.name) return self.redis.delete(self.name)
def validate_token_request(self, request): """ :param request: OAuthlib request. :type request: oauthlib.common.Request """ # REQUIRED. Value MUST be set to "refresh_token". if request.grant_type != 'refresh_token': raise errors.UnsupportedGrantTypeError(request=request) for validator in self.custom_validators.pre_token: validator(request) if request.refresh_token is None: raise errors.InvalidRequestError( description='Missing refresh token parameter.', request=request) # Because refresh tokens are typically long-lasting credentials used to # request additional access tokens, the refresh token is bound to the # client to which it was issued. If the client type is confidential or # the client was issued client credentials (or assigned other # authentication requirements), the client MUST authenticate with the # authorization server as described in Section 3.2.1. # https://tools.ietf.org/html/rfc6749#section-3.2.1 if self.request_validator.client_authentication_required(request): log.debug('Authenticating client, %r.', request) if not self.request_validator.authenticate_client(request): log.debug('Invalid client (%r), denying access.', request) raise errors.InvalidClientError(request=request) elif not self.request_validator.authenticate_client_id(request.client_id, request): log.debug('Client authentication failed, %r.', request) raise errors.InvalidClientError(request=request) # Ensure client is authorized use of this grant type self.validate_grant_type(request) # REQUIRED. The refresh token issued to the client. log.debug('Validating refresh token %s for client %r.', request.refresh_token, request.client) if not self.request_validator.validate_refresh_token( request.refresh_token, request.client, request): log.debug('Invalid refresh token, %s, for client %r.', request.refresh_token, request.client) raise errors.InvalidGrantError(request=request) original_scopes = utils.scope_to_list( self.request_validator.get_original_scopes( request.refresh_token, request)) if request.scope: request.scopes = utils.scope_to_list(request.scope) if (not all((s in original_scopes for s in request.scopes)) and not self.request_validator.is_within_original_scope( request.scopes, request.refresh_token, request)): log.debug('Refresh token %s lack requested scopes, %r.', request.refresh_token, request.scopes) raise errors.InvalidScopeError(request=request) else: request.scopes = original_scopes for validator in self.custom_validators.post_token: validator(request)
def function[validate_token_request, parameter[self, request]]: constant[ :param request: OAuthlib request. :type request: oauthlib.common.Request ] if compare[name[request].grant_type not_equal[!=] constant[refresh_token]] begin[:] <ast.Raise object at 0x7da1b18e4eb0> for taget[name[validator]] in starred[name[self].custom_validators.pre_token] begin[:] call[name[validator], parameter[name[request]]] if compare[name[request].refresh_token is constant[None]] begin[:] <ast.Raise object at 0x7da1b18e4e80> if call[name[self].request_validator.client_authentication_required, parameter[name[request]]] begin[:] call[name[log].debug, parameter[constant[Authenticating client, %r.], name[request]]] if <ast.UnaryOp object at 0x7da1b17f8bb0> begin[:] call[name[log].debug, parameter[constant[Invalid client (%r), denying access.], name[request]]] <ast.Raise object at 0x7da1b17f9210> call[name[self].validate_grant_type, parameter[name[request]]] call[name[log].debug, parameter[constant[Validating refresh token %s for client %r.], name[request].refresh_token, name[request].client]] if <ast.UnaryOp object at 0x7da1b17f9c30> begin[:] call[name[log].debug, parameter[constant[Invalid refresh token, %s, for client %r.], name[request].refresh_token, name[request].client]] <ast.Raise object at 0x7da1b180d9f0> variable[original_scopes] assign[=] call[name[utils].scope_to_list, parameter[call[name[self].request_validator.get_original_scopes, parameter[name[request].refresh_token, name[request]]]]] if name[request].scope begin[:] name[request].scopes assign[=] call[name[utils].scope_to_list, parameter[name[request].scope]] if <ast.BoolOp object at 0x7da1b180c760> begin[:] call[name[log].debug, parameter[constant[Refresh token %s lack requested scopes, %r.], name[request].refresh_token, name[request].scopes]] <ast.Raise object at 0x7da1b17fb9a0> for taget[name[validator]] in starred[name[self].custom_validators.post_token] begin[:] call[name[validator], parameter[name[request]]]
keyword[def] identifier[validate_token_request] ( identifier[self] , identifier[request] ): literal[string] keyword[if] identifier[request] . identifier[grant_type] != literal[string] : keyword[raise] identifier[errors] . identifier[UnsupportedGrantTypeError] ( identifier[request] = identifier[request] ) keyword[for] identifier[validator] keyword[in] identifier[self] . identifier[custom_validators] . identifier[pre_token] : identifier[validator] ( identifier[request] ) keyword[if] identifier[request] . identifier[refresh_token] keyword[is] keyword[None] : keyword[raise] identifier[errors] . identifier[InvalidRequestError] ( identifier[description] = literal[string] , identifier[request] = identifier[request] ) keyword[if] identifier[self] . identifier[request_validator] . identifier[client_authentication_required] ( identifier[request] ): identifier[log] . identifier[debug] ( literal[string] , identifier[request] ) keyword[if] keyword[not] identifier[self] . identifier[request_validator] . identifier[authenticate_client] ( identifier[request] ): identifier[log] . identifier[debug] ( literal[string] , identifier[request] ) keyword[raise] identifier[errors] . identifier[InvalidClientError] ( identifier[request] = identifier[request] ) keyword[elif] keyword[not] identifier[self] . identifier[request_validator] . identifier[authenticate_client_id] ( identifier[request] . identifier[client_id] , identifier[request] ): identifier[log] . identifier[debug] ( literal[string] , identifier[request] ) keyword[raise] identifier[errors] . identifier[InvalidClientError] ( identifier[request] = identifier[request] ) identifier[self] . identifier[validate_grant_type] ( identifier[request] ) identifier[log] . identifier[debug] ( literal[string] , identifier[request] . identifier[refresh_token] , identifier[request] . identifier[client] ) keyword[if] keyword[not] identifier[self] . identifier[request_validator] . identifier[validate_refresh_token] ( identifier[request] . identifier[refresh_token] , identifier[request] . identifier[client] , identifier[request] ): identifier[log] . identifier[debug] ( literal[string] , identifier[request] . identifier[refresh_token] , identifier[request] . identifier[client] ) keyword[raise] identifier[errors] . identifier[InvalidGrantError] ( identifier[request] = identifier[request] ) identifier[original_scopes] = identifier[utils] . identifier[scope_to_list] ( identifier[self] . identifier[request_validator] . identifier[get_original_scopes] ( identifier[request] . identifier[refresh_token] , identifier[request] )) keyword[if] identifier[request] . identifier[scope] : identifier[request] . identifier[scopes] = identifier[utils] . identifier[scope_to_list] ( identifier[request] . identifier[scope] ) keyword[if] ( keyword[not] identifier[all] (( identifier[s] keyword[in] identifier[original_scopes] keyword[for] identifier[s] keyword[in] identifier[request] . identifier[scopes] )) keyword[and] keyword[not] identifier[self] . identifier[request_validator] . identifier[is_within_original_scope] ( identifier[request] . identifier[scopes] , identifier[request] . identifier[refresh_token] , identifier[request] )): identifier[log] . identifier[debug] ( literal[string] , identifier[request] . identifier[refresh_token] , identifier[request] . identifier[scopes] ) keyword[raise] identifier[errors] . identifier[InvalidScopeError] ( identifier[request] = identifier[request] ) keyword[else] : identifier[request] . identifier[scopes] = identifier[original_scopes] keyword[for] identifier[validator] keyword[in] identifier[self] . identifier[custom_validators] . identifier[post_token] : identifier[validator] ( identifier[request] )
def validate_token_request(self, request): """ :param request: OAuthlib request. :type request: oauthlib.common.Request """ # REQUIRED. Value MUST be set to "refresh_token". if request.grant_type != 'refresh_token': raise errors.UnsupportedGrantTypeError(request=request) # depends on [control=['if'], data=[]] for validator in self.custom_validators.pre_token: validator(request) # depends on [control=['for'], data=['validator']] if request.refresh_token is None: raise errors.InvalidRequestError(description='Missing refresh token parameter.', request=request) # depends on [control=['if'], data=[]] # Because refresh tokens are typically long-lasting credentials used to # request additional access tokens, the refresh token is bound to the # client to which it was issued. If the client type is confidential or # the client was issued client credentials (or assigned other # authentication requirements), the client MUST authenticate with the # authorization server as described in Section 3.2.1. # https://tools.ietf.org/html/rfc6749#section-3.2.1 if self.request_validator.client_authentication_required(request): log.debug('Authenticating client, %r.', request) if not self.request_validator.authenticate_client(request): log.debug('Invalid client (%r), denying access.', request) raise errors.InvalidClientError(request=request) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif not self.request_validator.authenticate_client_id(request.client_id, request): log.debug('Client authentication failed, %r.', request) raise errors.InvalidClientError(request=request) # depends on [control=['if'], data=[]] # Ensure client is authorized use of this grant type self.validate_grant_type(request) # REQUIRED. The refresh token issued to the client. log.debug('Validating refresh token %s for client %r.', request.refresh_token, request.client) if not self.request_validator.validate_refresh_token(request.refresh_token, request.client, request): log.debug('Invalid refresh token, %s, for client %r.', request.refresh_token, request.client) raise errors.InvalidGrantError(request=request) # depends on [control=['if'], data=[]] original_scopes = utils.scope_to_list(self.request_validator.get_original_scopes(request.refresh_token, request)) if request.scope: request.scopes = utils.scope_to_list(request.scope) if not all((s in original_scopes for s in request.scopes)) and (not self.request_validator.is_within_original_scope(request.scopes, request.refresh_token, request)): log.debug('Refresh token %s lack requested scopes, %r.', request.refresh_token, request.scopes) raise errors.InvalidScopeError(request=request) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: request.scopes = original_scopes for validator in self.custom_validators.post_token: validator(request) # depends on [control=['for'], data=['validator']]
def afx_small(): """Small transformer model with small batch size for fast step times.""" hparams = transformer.transformer_tpu() hparams.filter_size = 1024 hparams.num_heads = 4 hparams.num_hidden_layers = 3 hparams.batch_size = 512 return hparams
def function[afx_small, parameter[]]: constant[Small transformer model with small batch size for fast step times.] variable[hparams] assign[=] call[name[transformer].transformer_tpu, parameter[]] name[hparams].filter_size assign[=] constant[1024] name[hparams].num_heads assign[=] constant[4] name[hparams].num_hidden_layers assign[=] constant[3] name[hparams].batch_size assign[=] constant[512] return[name[hparams]]
keyword[def] identifier[afx_small] (): literal[string] identifier[hparams] = identifier[transformer] . identifier[transformer_tpu] () identifier[hparams] . identifier[filter_size] = literal[int] identifier[hparams] . identifier[num_heads] = literal[int] identifier[hparams] . identifier[num_hidden_layers] = literal[int] identifier[hparams] . identifier[batch_size] = literal[int] keyword[return] identifier[hparams]
def afx_small(): """Small transformer model with small batch size for fast step times.""" hparams = transformer.transformer_tpu() hparams.filter_size = 1024 hparams.num_heads = 4 hparams.num_hidden_layers = 3 hparams.batch_size = 512 return hparams
def configure(cfgpath=None): """ Configure lexibank. :return: a pair (config, logger) """ cfgpath = Path(cfgpath) \ if cfgpath else Path(user_config_dir(pylexibank.__name__)) / 'config.ini' if not cfgpath.exists(): print(""" {0} You seem to be running lexibank for the first time. Your system configuration will now be written to a config file to be used whenever lexibank is run lateron. """.format( colored('Welcome to lexibank!', 'blue', attrs=['bold', 'reverse']))) if not cfgpath.parent.exists(): cfgpath.parent.mkdir(parents=True) cfg = Config() cfg['paths'] = {k: get_path(src) for k, src in REPOS} cfg.write(cfgpath) print(""" Configuration has been written to: {0} You may edit this file to adapt to changes in your system or to reconfigure settings such as the logging level.""".format(cfgpath.resolve())) else: cfg = Config.from_file(cfgpath) try: cfg.glottolog except (FileNotFoundError, ValueError): raise ParserError('Misconfigured Glottolog path in {0}'.format(cfgpath)) if not Path(cfg['paths']['concepticon']).exists(): raise ParserError('Misconfigured Concepticon path in {0}'.format(cfgpath)) # Print the configuration directory for reference: print("Using configuration file at:") print(str(cfgpath) + '\n') return cfg
def function[configure, parameter[cfgpath]]: constant[ Configure lexibank. :return: a pair (config, logger) ] variable[cfgpath] assign[=] <ast.IfExp object at 0x7da1b26ac790> if <ast.UnaryOp object at 0x7da20c6c4490> begin[:] call[name[print], parameter[call[constant[ {0} You seem to be running lexibank for the first time. Your system configuration will now be written to a config file to be used whenever lexibank is run lateron. ].format, parameter[call[name[colored], parameter[constant[Welcome to lexibank!], constant[blue]]]]]]] if <ast.UnaryOp object at 0x7da20c6c6da0> begin[:] call[name[cfgpath].parent.mkdir, parameter[]] variable[cfg] assign[=] call[name[Config], parameter[]] call[name[cfg]][constant[paths]] assign[=] <ast.DictComp object at 0x7da20c6c4910> call[name[cfg].write, parameter[name[cfgpath]]] call[name[print], parameter[call[constant[ Configuration has been written to: {0} You may edit this file to adapt to changes in your system or to reconfigure settings such as the logging level.].format, parameter[call[name[cfgpath].resolve, parameter[]]]]]] <ast.Try object at 0x7da20c6c5810> if <ast.UnaryOp object at 0x7da20c6c6920> begin[:] <ast.Raise object at 0x7da20c6c5ff0> call[name[print], parameter[constant[Using configuration file at:]]] call[name[print], parameter[binary_operation[call[name[str], parameter[name[cfgpath]]] + constant[ ]]]] return[name[cfg]]
keyword[def] identifier[configure] ( identifier[cfgpath] = keyword[None] ): literal[string] identifier[cfgpath] = identifier[Path] ( identifier[cfgpath] ) keyword[if] identifier[cfgpath] keyword[else] identifier[Path] ( identifier[user_config_dir] ( identifier[pylexibank] . identifier[__name__] ))/ literal[string] keyword[if] keyword[not] identifier[cfgpath] . identifier[exists] (): identifier[print] ( literal[string] . identifier[format] ( identifier[colored] ( literal[string] , literal[string] , identifier[attrs] =[ literal[string] , literal[string] ]))) keyword[if] keyword[not] identifier[cfgpath] . identifier[parent] . identifier[exists] (): identifier[cfgpath] . identifier[parent] . identifier[mkdir] ( identifier[parents] = keyword[True] ) identifier[cfg] = identifier[Config] () identifier[cfg] [ literal[string] ]={ identifier[k] : identifier[get_path] ( identifier[src] ) keyword[for] identifier[k] , identifier[src] keyword[in] identifier[REPOS] } identifier[cfg] . identifier[write] ( identifier[cfgpath] ) identifier[print] ( literal[string] . identifier[format] ( identifier[cfgpath] . identifier[resolve] ())) keyword[else] : identifier[cfg] = identifier[Config] . identifier[from_file] ( identifier[cfgpath] ) keyword[try] : identifier[cfg] . identifier[glottolog] keyword[except] ( identifier[FileNotFoundError] , identifier[ValueError] ): keyword[raise] identifier[ParserError] ( literal[string] . identifier[format] ( identifier[cfgpath] )) keyword[if] keyword[not] identifier[Path] ( identifier[cfg] [ literal[string] ][ literal[string] ]). identifier[exists] (): keyword[raise] identifier[ParserError] ( literal[string] . identifier[format] ( identifier[cfgpath] )) identifier[print] ( literal[string] ) identifier[print] ( identifier[str] ( identifier[cfgpath] )+ literal[string] ) keyword[return] identifier[cfg]
def configure(cfgpath=None): """ Configure lexibank. :return: a pair (config, logger) """ cfgpath = Path(cfgpath) if cfgpath else Path(user_config_dir(pylexibank.__name__)) / 'config.ini' if not cfgpath.exists(): print('\n{0}\n\nYou seem to be running lexibank for the first time.\nYour system configuration will now be written to a config file to be used\nwhenever lexibank is run lateron.\n'.format(colored('Welcome to lexibank!', 'blue', attrs=['bold', 'reverse']))) if not cfgpath.parent.exists(): cfgpath.parent.mkdir(parents=True) # depends on [control=['if'], data=[]] cfg = Config() cfg['paths'] = {k: get_path(src) for (k, src) in REPOS} cfg.write(cfgpath) print('\nConfiguration has been written to:\n{0}\nYou may edit this file to adapt to changes in your system or to reconfigure settings\nsuch as the logging level.'.format(cfgpath.resolve())) # depends on [control=['if'], data=[]] else: cfg = Config.from_file(cfgpath) try: cfg.glottolog # depends on [control=['try'], data=[]] except (FileNotFoundError, ValueError): raise ParserError('Misconfigured Glottolog path in {0}'.format(cfgpath)) # depends on [control=['except'], data=[]] if not Path(cfg['paths']['concepticon']).exists(): raise ParserError('Misconfigured Concepticon path in {0}'.format(cfgpath)) # depends on [control=['if'], data=[]] # Print the configuration directory for reference: print('Using configuration file at:') print(str(cfgpath) + '\n') return cfg
def _sunos_memdata(): ''' Return the memory information for SunOS-like systems ''' grains = {'mem_total': 0, 'swap_total': 0} prtconf = '/usr/sbin/prtconf 2>/dev/null' for line in __salt__['cmd.run'](prtconf, python_shell=True).splitlines(): comps = line.split(' ') if comps[0].strip() == 'Memory' and comps[1].strip() == 'size:': grains['mem_total'] = int(comps[2].strip()) swap_cmd = salt.utils.path.which('swap') swap_data = __salt__['cmd.run']('{0} -s'.format(swap_cmd)).split() try: swap_avail = int(swap_data[-2][:-1]) swap_used = int(swap_data[-4][:-1]) swap_total = (swap_avail + swap_used) // 1024 except ValueError: swap_total = None grains['swap_total'] = swap_total return grains
def function[_sunos_memdata, parameter[]]: constant[ Return the memory information for SunOS-like systems ] variable[grains] assign[=] dictionary[[<ast.Constant object at 0x7da1b2054430>, <ast.Constant object at 0x7da1b2054040>], [<ast.Constant object at 0x7da1b2054070>, <ast.Constant object at 0x7da1b20540a0>]] variable[prtconf] assign[=] constant[/usr/sbin/prtconf 2>/dev/null] for taget[name[line]] in starred[call[call[call[name[__salt__]][constant[cmd.run]], parameter[name[prtconf]]].splitlines, parameter[]]] begin[:] variable[comps] assign[=] call[name[line].split, parameter[constant[ ]]] if <ast.BoolOp object at 0x7da1b2054490> begin[:] call[name[grains]][constant[mem_total]] assign[=] call[name[int], parameter[call[call[name[comps]][constant[2]].strip, parameter[]]]] variable[swap_cmd] assign[=] call[name[salt].utils.path.which, parameter[constant[swap]]] variable[swap_data] assign[=] call[call[call[name[__salt__]][constant[cmd.run]], parameter[call[constant[{0} -s].format, parameter[name[swap_cmd]]]]].split, parameter[]] <ast.Try object at 0x7da1b1f39de0> call[name[grains]][constant[swap_total]] assign[=] name[swap_total] return[name[grains]]
keyword[def] identifier[_sunos_memdata] (): literal[string] identifier[grains] ={ literal[string] : literal[int] , literal[string] : literal[int] } identifier[prtconf] = literal[string] keyword[for] identifier[line] keyword[in] identifier[__salt__] [ literal[string] ]( identifier[prtconf] , identifier[python_shell] = keyword[True] ). identifier[splitlines] (): identifier[comps] = identifier[line] . identifier[split] ( literal[string] ) keyword[if] identifier[comps] [ literal[int] ]. identifier[strip] ()== literal[string] keyword[and] identifier[comps] [ literal[int] ]. identifier[strip] ()== literal[string] : identifier[grains] [ literal[string] ]= identifier[int] ( identifier[comps] [ literal[int] ]. identifier[strip] ()) identifier[swap_cmd] = identifier[salt] . identifier[utils] . identifier[path] . identifier[which] ( literal[string] ) identifier[swap_data] = identifier[__salt__] [ literal[string] ]( literal[string] . identifier[format] ( identifier[swap_cmd] )). identifier[split] () keyword[try] : identifier[swap_avail] = identifier[int] ( identifier[swap_data] [- literal[int] ][:- literal[int] ]) identifier[swap_used] = identifier[int] ( identifier[swap_data] [- literal[int] ][:- literal[int] ]) identifier[swap_total] =( identifier[swap_avail] + identifier[swap_used] )// literal[int] keyword[except] identifier[ValueError] : identifier[swap_total] = keyword[None] identifier[grains] [ literal[string] ]= identifier[swap_total] keyword[return] identifier[grains]
def _sunos_memdata(): """ Return the memory information for SunOS-like systems """ grains = {'mem_total': 0, 'swap_total': 0} prtconf = '/usr/sbin/prtconf 2>/dev/null' for line in __salt__['cmd.run'](prtconf, python_shell=True).splitlines(): comps = line.split(' ') if comps[0].strip() == 'Memory' and comps[1].strip() == 'size:': grains['mem_total'] = int(comps[2].strip()) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] swap_cmd = salt.utils.path.which('swap') swap_data = __salt__['cmd.run']('{0} -s'.format(swap_cmd)).split() try: swap_avail = int(swap_data[-2][:-1]) swap_used = int(swap_data[-4][:-1]) swap_total = (swap_avail + swap_used) // 1024 # depends on [control=['try'], data=[]] except ValueError: swap_total = None # depends on [control=['except'], data=[]] grains['swap_total'] = swap_total return grains
def list_to_tf_input(data, response_index, num_outcomes): """ Separates the outcome feature from the data. """ matrix = np.matrix([row[:response_index] + row[response_index+1:] for row in data]) outcomes = np.asarray([row[response_index] for row in data], dtype=np.uint8) return matrix, outcomes
def function[list_to_tf_input, parameter[data, response_index, num_outcomes]]: constant[ Separates the outcome feature from the data. ] variable[matrix] assign[=] call[name[np].matrix, parameter[<ast.ListComp object at 0x7da1b0353160>]] variable[outcomes] assign[=] call[name[np].asarray, parameter[<ast.ListComp object at 0x7da1b0350e50>]] return[tuple[[<ast.Name object at 0x7da1b0352140>, <ast.Name object at 0x7da1b0352a70>]]]
keyword[def] identifier[list_to_tf_input] ( identifier[data] , identifier[response_index] , identifier[num_outcomes] ): literal[string] identifier[matrix] = identifier[np] . identifier[matrix] ([ identifier[row] [: identifier[response_index] ]+ identifier[row] [ identifier[response_index] + literal[int] :] keyword[for] identifier[row] keyword[in] identifier[data] ]) identifier[outcomes] = identifier[np] . identifier[asarray] ([ identifier[row] [ identifier[response_index] ] keyword[for] identifier[row] keyword[in] identifier[data] ], identifier[dtype] = identifier[np] . identifier[uint8] ) keyword[return] identifier[matrix] , identifier[outcomes]
def list_to_tf_input(data, response_index, num_outcomes): """ Separates the outcome feature from the data. """ matrix = np.matrix([row[:response_index] + row[response_index + 1:] for row in data]) outcomes = np.asarray([row[response_index] for row in data], dtype=np.uint8) return (matrix, outcomes)
def edges_unique_length(self): """ How long is each unique edge. Returns ---------- length : (len(self.edges_unique), ) float Length of each unique edge """ vector = np.subtract(*self.vertices[self.edges_unique.T]) length = np.linalg.norm(vector, axis=1) return length
def function[edges_unique_length, parameter[self]]: constant[ How long is each unique edge. Returns ---------- length : (len(self.edges_unique), ) float Length of each unique edge ] variable[vector] assign[=] call[name[np].subtract, parameter[<ast.Starred object at 0x7da20c76e500>]] variable[length] assign[=] call[name[np].linalg.norm, parameter[name[vector]]] return[name[length]]
keyword[def] identifier[edges_unique_length] ( identifier[self] ): literal[string] identifier[vector] = identifier[np] . identifier[subtract] (* identifier[self] . identifier[vertices] [ identifier[self] . identifier[edges_unique] . identifier[T] ]) identifier[length] = identifier[np] . identifier[linalg] . identifier[norm] ( identifier[vector] , identifier[axis] = literal[int] ) keyword[return] identifier[length]
def edges_unique_length(self): """ How long is each unique edge. Returns ---------- length : (len(self.edges_unique), ) float Length of each unique edge """ vector = np.subtract(*self.vertices[self.edges_unique.T]) length = np.linalg.norm(vector, axis=1) return length
def do_i_raise_dependency(self, status, inherit_parents, hosts, services, timeperiods): # pylint: disable=too-many-locals """Check if this object or one of its dependency state (chk dependencies) match the status :param status: state list where dependency matters (notification failure criteria) :type status: list :param inherit_parents: recurse over parents :type inherit_parents: bool :param hosts: hosts objects, used to raise dependency check :type hosts: alignak.objects.host.Hosts :param services: services objects, used to raise dependency check :type services: alignak.objects.service.Services :param timeperiods: Timeperiods objects, used for all kind of timeperiod (notif, check) :type timeperiods: alignak.objects.timeperiod.Timeperiods :return: True if one state matched the status list, otherwise False :rtype: bool """ # Do I raise dep? for stat in status: if self.is_state(stat): return True # If we do not inherit parent, we have no reason to be blocking if not inherit_parents: return False # Ok, I do not raise dep, but my dep maybe raise me now = time.time() for (dep_id, dep_status, _, timeperiod_id, inh_parent) in self.chk_depend_of: if dep_id in hosts: dep = hosts[dep_id] else: dep = services[dep_id] timeperiod = timeperiods[timeperiod_id] if dep.do_i_raise_dependency(dep_status, inh_parent, hosts, services, timeperiods): if timeperiod is None or timeperiod.is_time_valid(now): return True # No, I really do not raise... return False
def function[do_i_raise_dependency, parameter[self, status, inherit_parents, hosts, services, timeperiods]]: constant[Check if this object or one of its dependency state (chk dependencies) match the status :param status: state list where dependency matters (notification failure criteria) :type status: list :param inherit_parents: recurse over parents :type inherit_parents: bool :param hosts: hosts objects, used to raise dependency check :type hosts: alignak.objects.host.Hosts :param services: services objects, used to raise dependency check :type services: alignak.objects.service.Services :param timeperiods: Timeperiods objects, used for all kind of timeperiod (notif, check) :type timeperiods: alignak.objects.timeperiod.Timeperiods :return: True if one state matched the status list, otherwise False :rtype: bool ] for taget[name[stat]] in starred[name[status]] begin[:] if call[name[self].is_state, parameter[name[stat]]] begin[:] return[constant[True]] if <ast.UnaryOp object at 0x7da20c6aae60> begin[:] return[constant[False]] variable[now] assign[=] call[name[time].time, parameter[]] for taget[tuple[[<ast.Name object at 0x7da18dc07190>, <ast.Name object at 0x7da18dc064d0>, <ast.Name object at 0x7da18dc07ee0>, <ast.Name object at 0x7da18dc04cd0>, <ast.Name object at 0x7da18dc06830>]]] in starred[name[self].chk_depend_of] begin[:] if compare[name[dep_id] in name[hosts]] begin[:] variable[dep] assign[=] call[name[hosts]][name[dep_id]] variable[timeperiod] assign[=] call[name[timeperiods]][name[timeperiod_id]] if call[name[dep].do_i_raise_dependency, parameter[name[dep_status], name[inh_parent], name[hosts], name[services], name[timeperiods]]] begin[:] if <ast.BoolOp object at 0x7da1b0d21930> begin[:] return[constant[True]] return[constant[False]]
keyword[def] identifier[do_i_raise_dependency] ( identifier[self] , identifier[status] , identifier[inherit_parents] , identifier[hosts] , identifier[services] , identifier[timeperiods] ): literal[string] keyword[for] identifier[stat] keyword[in] identifier[status] : keyword[if] identifier[self] . identifier[is_state] ( identifier[stat] ): keyword[return] keyword[True] keyword[if] keyword[not] identifier[inherit_parents] : keyword[return] keyword[False] identifier[now] = identifier[time] . identifier[time] () keyword[for] ( identifier[dep_id] , identifier[dep_status] , identifier[_] , identifier[timeperiod_id] , identifier[inh_parent] ) keyword[in] identifier[self] . identifier[chk_depend_of] : keyword[if] identifier[dep_id] keyword[in] identifier[hosts] : identifier[dep] = identifier[hosts] [ identifier[dep_id] ] keyword[else] : identifier[dep] = identifier[services] [ identifier[dep_id] ] identifier[timeperiod] = identifier[timeperiods] [ identifier[timeperiod_id] ] keyword[if] identifier[dep] . identifier[do_i_raise_dependency] ( identifier[dep_status] , identifier[inh_parent] , identifier[hosts] , identifier[services] , identifier[timeperiods] ): keyword[if] identifier[timeperiod] keyword[is] keyword[None] keyword[or] identifier[timeperiod] . identifier[is_time_valid] ( identifier[now] ): keyword[return] keyword[True] keyword[return] keyword[False]
def do_i_raise_dependency(self, status, inherit_parents, hosts, services, timeperiods): # pylint: disable=too-many-locals 'Check if this object or one of its dependency state (chk dependencies) match the status\n\n :param status: state list where dependency matters (notification failure criteria)\n :type status: list\n :param inherit_parents: recurse over parents\n :type inherit_parents: bool\n :param hosts: hosts objects, used to raise dependency check\n :type hosts: alignak.objects.host.Hosts\n :param services: services objects, used to raise dependency check\n :type services: alignak.objects.service.Services\n :param timeperiods: Timeperiods objects, used for all kind of timeperiod (notif, check)\n :type timeperiods: alignak.objects.timeperiod.Timeperiods\n :return: True if one state matched the status list, otherwise False\n :rtype: bool\n ' # Do I raise dep? for stat in status: if self.is_state(stat): return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['stat']] # If we do not inherit parent, we have no reason to be blocking if not inherit_parents: return False # depends on [control=['if'], data=[]] # Ok, I do not raise dep, but my dep maybe raise me now = time.time() for (dep_id, dep_status, _, timeperiod_id, inh_parent) in self.chk_depend_of: if dep_id in hosts: dep = hosts[dep_id] # depends on [control=['if'], data=['dep_id', 'hosts']] else: dep = services[dep_id] timeperiod = timeperiods[timeperiod_id] if dep.do_i_raise_dependency(dep_status, inh_parent, hosts, services, timeperiods): if timeperiod is None or timeperiod.is_time_valid(now): return True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # No, I really do not raise... return False
def default(self, meth): """ Decorator that allows to set the default for an attribute. Returns *meth* unchanged. :raises DefaultAlreadySetError: If default has been set before. .. versionadded:: 17.1.0 """ if self._default is not NOTHING: raise DefaultAlreadySetError() self._default = Factory(meth, takes_self=True) return meth
def function[default, parameter[self, meth]]: constant[ Decorator that allows to set the default for an attribute. Returns *meth* unchanged. :raises DefaultAlreadySetError: If default has been set before. .. versionadded:: 17.1.0 ] if compare[name[self]._default is_not name[NOTHING]] begin[:] <ast.Raise object at 0x7da18bccac50> name[self]._default assign[=] call[name[Factory], parameter[name[meth]]] return[name[meth]]
keyword[def] identifier[default] ( identifier[self] , identifier[meth] ): literal[string] keyword[if] identifier[self] . identifier[_default] keyword[is] keyword[not] identifier[NOTHING] : keyword[raise] identifier[DefaultAlreadySetError] () identifier[self] . identifier[_default] = identifier[Factory] ( identifier[meth] , identifier[takes_self] = keyword[True] ) keyword[return] identifier[meth]
def default(self, meth): """ Decorator that allows to set the default for an attribute. Returns *meth* unchanged. :raises DefaultAlreadySetError: If default has been set before. .. versionadded:: 17.1.0 """ if self._default is not NOTHING: raise DefaultAlreadySetError() # depends on [control=['if'], data=[]] self._default = Factory(meth, takes_self=True) return meth
def legacy_write(self, request_id, msg, max_doc_size, with_last_error): """Send OP_INSERT, etc., optionally returning response as a dict. Can raise ConnectionFailure or OperationFailure. :Parameters: - `request_id`: an int. - `msg`: bytes, an OP_INSERT, OP_UPDATE, or OP_DELETE message, perhaps with a getlasterror command appended. - `max_doc_size`: size in bytes of the largest document in `msg`. - `with_last_error`: True if a getlasterror command is appended. """ if not with_last_error and not self.is_writable: # Write won't succeed, bail as if we'd done a getlasterror. raise NotMasterError("not master") self.send_message(msg, max_doc_size) if with_last_error: response = self.receive_message(1, request_id) return helpers._check_gle_response(response)
def function[legacy_write, parameter[self, request_id, msg, max_doc_size, with_last_error]]: constant[Send OP_INSERT, etc., optionally returning response as a dict. Can raise ConnectionFailure or OperationFailure. :Parameters: - `request_id`: an int. - `msg`: bytes, an OP_INSERT, OP_UPDATE, or OP_DELETE message, perhaps with a getlasterror command appended. - `max_doc_size`: size in bytes of the largest document in `msg`. - `with_last_error`: True if a getlasterror command is appended. ] if <ast.BoolOp object at 0x7da18dc9b880> begin[:] <ast.Raise object at 0x7da18dc9b7f0> call[name[self].send_message, parameter[name[msg], name[max_doc_size]]] if name[with_last_error] begin[:] variable[response] assign[=] call[name[self].receive_message, parameter[constant[1], name[request_id]]] return[call[name[helpers]._check_gle_response, parameter[name[response]]]]
keyword[def] identifier[legacy_write] ( identifier[self] , identifier[request_id] , identifier[msg] , identifier[max_doc_size] , identifier[with_last_error] ): literal[string] keyword[if] keyword[not] identifier[with_last_error] keyword[and] keyword[not] identifier[self] . identifier[is_writable] : keyword[raise] identifier[NotMasterError] ( literal[string] ) identifier[self] . identifier[send_message] ( identifier[msg] , identifier[max_doc_size] ) keyword[if] identifier[with_last_error] : identifier[response] = identifier[self] . identifier[receive_message] ( literal[int] , identifier[request_id] ) keyword[return] identifier[helpers] . identifier[_check_gle_response] ( identifier[response] )
def legacy_write(self, request_id, msg, max_doc_size, with_last_error): """Send OP_INSERT, etc., optionally returning response as a dict. Can raise ConnectionFailure or OperationFailure. :Parameters: - `request_id`: an int. - `msg`: bytes, an OP_INSERT, OP_UPDATE, or OP_DELETE message, perhaps with a getlasterror command appended. - `max_doc_size`: size in bytes of the largest document in `msg`. - `with_last_error`: True if a getlasterror command is appended. """ if not with_last_error and (not self.is_writable): # Write won't succeed, bail as if we'd done a getlasterror. raise NotMasterError('not master') # depends on [control=['if'], data=[]] self.send_message(msg, max_doc_size) if with_last_error: response = self.receive_message(1, request_id) return helpers._check_gle_response(response) # depends on [control=['if'], data=[]]
def list_nodes_select(nodes, selection, call=None): ''' Return a list of the VMs that are on the provider, with select fields ''' if call == 'action': raise SaltCloudSystemExit( 'The list_nodes_select function must be called ' 'with -f or --function.' ) if 'error' in nodes: raise SaltCloudSystemExit( 'An error occurred while listing nodes: {0}'.format( nodes['error']['Errors']['Error']['Message'] ) ) ret = {} for node in nodes: pairs = {} data = nodes[node] for key in data: if six.text_type(key) in selection: value = data[key] pairs[key] = value ret[node] = pairs return ret
def function[list_nodes_select, parameter[nodes, selection, call]]: constant[ Return a list of the VMs that are on the provider, with select fields ] if compare[name[call] equal[==] constant[action]] begin[:] <ast.Raise object at 0x7da1b1ff0d30> if compare[constant[error] in name[nodes]] begin[:] <ast.Raise object at 0x7da1b1ff15d0> variable[ret] assign[=] dictionary[[], []] for taget[name[node]] in starred[name[nodes]] begin[:] variable[pairs] assign[=] dictionary[[], []] variable[data] assign[=] call[name[nodes]][name[node]] for taget[name[key]] in starred[name[data]] begin[:] if compare[call[name[six].text_type, parameter[name[key]]] in name[selection]] begin[:] variable[value] assign[=] call[name[data]][name[key]] call[name[pairs]][name[key]] assign[=] name[value] call[name[ret]][name[node]] assign[=] name[pairs] return[name[ret]]
keyword[def] identifier[list_nodes_select] ( identifier[nodes] , identifier[selection] , identifier[call] = keyword[None] ): literal[string] keyword[if] identifier[call] == literal[string] : keyword[raise] identifier[SaltCloudSystemExit] ( literal[string] literal[string] ) keyword[if] literal[string] keyword[in] identifier[nodes] : keyword[raise] identifier[SaltCloudSystemExit] ( literal[string] . identifier[format] ( identifier[nodes] [ literal[string] ][ literal[string] ][ literal[string] ][ literal[string] ] ) ) identifier[ret] ={} keyword[for] identifier[node] keyword[in] identifier[nodes] : identifier[pairs] ={} identifier[data] = identifier[nodes] [ identifier[node] ] keyword[for] identifier[key] keyword[in] identifier[data] : keyword[if] identifier[six] . identifier[text_type] ( identifier[key] ) keyword[in] identifier[selection] : identifier[value] = identifier[data] [ identifier[key] ] identifier[pairs] [ identifier[key] ]= identifier[value] identifier[ret] [ identifier[node] ]= identifier[pairs] keyword[return] identifier[ret]
def list_nodes_select(nodes, selection, call=None): """ Return a list of the VMs that are on the provider, with select fields """ if call == 'action': raise SaltCloudSystemExit('The list_nodes_select function must be called with -f or --function.') # depends on [control=['if'], data=[]] if 'error' in nodes: raise SaltCloudSystemExit('An error occurred while listing nodes: {0}'.format(nodes['error']['Errors']['Error']['Message'])) # depends on [control=['if'], data=['nodes']] ret = {} for node in nodes: pairs = {} data = nodes[node] for key in data: if six.text_type(key) in selection: value = data[key] pairs[key] = value # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']] ret[node] = pairs # depends on [control=['for'], data=['node']] return ret
def loadAnns(self, ids=[]): """ Load anns with the specified ids. :param ids (int array) : integer ids specifying anns :return: anns (object array) : loaded ann objects """ if type(ids) == list: return [self.anns[id] for id in ids] elif type(ids) == int: return [self.anns[ids]]
def function[loadAnns, parameter[self, ids]]: constant[ Load anns with the specified ids. :param ids (int array) : integer ids specifying anns :return: anns (object array) : loaded ann objects ] if compare[call[name[type], parameter[name[ids]]] equal[==] name[list]] begin[:] return[<ast.ListComp object at 0x7da2041d91b0>]
keyword[def] identifier[loadAnns] ( identifier[self] , identifier[ids] =[]): literal[string] keyword[if] identifier[type] ( identifier[ids] )== identifier[list] : keyword[return] [ identifier[self] . identifier[anns] [ identifier[id] ] keyword[for] identifier[id] keyword[in] identifier[ids] ] keyword[elif] identifier[type] ( identifier[ids] )== identifier[int] : keyword[return] [ identifier[self] . identifier[anns] [ identifier[ids] ]]
def loadAnns(self, ids=[]): """ Load anns with the specified ids. :param ids (int array) : integer ids specifying anns :return: anns (object array) : loaded ann objects """ if type(ids) == list: return [self.anns[id] for id in ids] # depends on [control=['if'], data=[]] elif type(ids) == int: return [self.anns[ids]] # depends on [control=['if'], data=[]]
def build(self, context, variant, build_path, install_path, install=False, build_type=BuildType.local): """Perform the build. Note that most of the func args aren't used here - that's because this info is already passed to the custom build command via environment variables. """ ret = {} if self.write_build_scripts: # write out the script that places the user in a build env, where # they can run bez directly themselves. build_env_script = os.path.join(build_path, "build-env") create_forwarding_script(build_env_script, module=("build_system", "custom"), func_name="_FWD__spawn_build_shell", working_dir=self.working_dir, build_path=build_path, variant_index=variant.index, install=install, install_path=install_path) ret["success"] = True ret["build_env_script"] = build_env_script return ret # get build command command = self.package.build_command # False just means no build command if command is False: ret["success"] = True return ret def expand(txt): root = self.package.root install_ = "install" if install else '' return txt.format(root=root, install=install_).strip() if isinstance(command, basestring): if self.build_args: command = command + ' ' + ' '.join(map(quote, self.build_args)) command = expand(command) cmd_str = command else: # list command = command + self.build_args command = map(expand, command) cmd_str = ' '.join(map(quote, command)) if self.verbose: pr = Printer(sys.stdout) pr("Running build command: %s" % cmd_str, heading) # run the build command def _callback(executor): self._add_build_actions(executor, context=context, package=self.package, variant=variant, build_type=build_type, install=install, build_path=build_path, install_path=install_path) if self.opts: # write args defined in ./parse_build_args.py out as env vars extra_args = getattr(self.opts.parser, "_rezbuild_extra_args", []) for key, value in vars(self.opts).iteritems(): if key in extra_args: varname = "__PARSE_ARG_%s" % key.upper() # do some value conversions if isinstance(value, bool): value = 1 if value else 0 elif isinstance(value, (list, tuple)): value = map(str, value) value = map(quote, value) value = ' '.join(value) executor.env[varname] = value retcode, _, _ = context.execute_shell(command=command, block=True, cwd=build_path, actions_callback=_callback) ret["success"] = (not retcode) return ret
def function[build, parameter[self, context, variant, build_path, install_path, install, build_type]]: constant[Perform the build. Note that most of the func args aren't used here - that's because this info is already passed to the custom build command via environment variables. ] variable[ret] assign[=] dictionary[[], []] if name[self].write_build_scripts begin[:] variable[build_env_script] assign[=] call[name[os].path.join, parameter[name[build_path], constant[build-env]]] call[name[create_forwarding_script], parameter[name[build_env_script]]] call[name[ret]][constant[success]] assign[=] constant[True] call[name[ret]][constant[build_env_script]] assign[=] name[build_env_script] return[name[ret]] variable[command] assign[=] name[self].package.build_command if compare[name[command] is constant[False]] begin[:] call[name[ret]][constant[success]] assign[=] constant[True] return[name[ret]] def function[expand, parameter[txt]]: variable[root] assign[=] name[self].package.root variable[install_] assign[=] <ast.IfExp object at 0x7da2045648e0> return[call[call[name[txt].format, parameter[]].strip, parameter[]]] if call[name[isinstance], parameter[name[command], name[basestring]]] begin[:] if name[self].build_args begin[:] variable[command] assign[=] binary_operation[binary_operation[name[command] + constant[ ]] + call[constant[ ].join, parameter[call[name[map], parameter[name[quote], name[self].build_args]]]]] variable[command] assign[=] call[name[expand], parameter[name[command]]] variable[cmd_str] assign[=] name[command] if name[self].verbose begin[:] variable[pr] assign[=] call[name[Printer], parameter[name[sys].stdout]] call[name[pr], parameter[binary_operation[constant[Running build command: %s] <ast.Mod object at 0x7da2590d6920> name[cmd_str]], name[heading]]] def function[_callback, parameter[executor]]: call[name[self]._add_build_actions, parameter[name[executor]]] if name[self].opts begin[:] variable[extra_args] assign[=] call[name[getattr], parameter[name[self].opts.parser, constant[_rezbuild_extra_args], list[[]]]] for taget[tuple[[<ast.Name object at 0x7da1b170f7f0>, <ast.Name object at 0x7da1b170e410>]]] in starred[call[call[name[vars], parameter[name[self].opts]].iteritems, parameter[]]] begin[:] if compare[name[key] in name[extra_args]] begin[:] variable[varname] assign[=] binary_operation[constant[__PARSE_ARG_%s] <ast.Mod object at 0x7da2590d6920> call[name[key].upper, parameter[]]] if call[name[isinstance], parameter[name[value], name[bool]]] begin[:] variable[value] assign[=] <ast.IfExp object at 0x7da1b170ece0> call[name[executor].env][name[varname]] assign[=] name[value] <ast.Tuple object at 0x7da1b17cf670> assign[=] call[name[context].execute_shell, parameter[]] call[name[ret]][constant[success]] assign[=] <ast.UnaryOp object at 0x7da1b17ce500> return[name[ret]]
keyword[def] identifier[build] ( identifier[self] , identifier[context] , identifier[variant] , identifier[build_path] , identifier[install_path] , identifier[install] = keyword[False] , identifier[build_type] = identifier[BuildType] . identifier[local] ): literal[string] identifier[ret] ={} keyword[if] identifier[self] . identifier[write_build_scripts] : identifier[build_env_script] = identifier[os] . identifier[path] . identifier[join] ( identifier[build_path] , literal[string] ) identifier[create_forwarding_script] ( identifier[build_env_script] , identifier[module] =( literal[string] , literal[string] ), identifier[func_name] = literal[string] , identifier[working_dir] = identifier[self] . identifier[working_dir] , identifier[build_path] = identifier[build_path] , identifier[variant_index] = identifier[variant] . identifier[index] , identifier[install] = identifier[install] , identifier[install_path] = identifier[install_path] ) identifier[ret] [ literal[string] ]= keyword[True] identifier[ret] [ literal[string] ]= identifier[build_env_script] keyword[return] identifier[ret] identifier[command] = identifier[self] . identifier[package] . identifier[build_command] keyword[if] identifier[command] keyword[is] keyword[False] : identifier[ret] [ literal[string] ]= keyword[True] keyword[return] identifier[ret] keyword[def] identifier[expand] ( identifier[txt] ): identifier[root] = identifier[self] . identifier[package] . identifier[root] identifier[install_] = literal[string] keyword[if] identifier[install] keyword[else] literal[string] keyword[return] identifier[txt] . identifier[format] ( identifier[root] = identifier[root] , identifier[install] = identifier[install_] ). identifier[strip] () keyword[if] identifier[isinstance] ( identifier[command] , identifier[basestring] ): keyword[if] identifier[self] . identifier[build_args] : identifier[command] = identifier[command] + literal[string] + literal[string] . identifier[join] ( identifier[map] ( identifier[quote] , identifier[self] . identifier[build_args] )) identifier[command] = identifier[expand] ( identifier[command] ) identifier[cmd_str] = identifier[command] keyword[else] : identifier[command] = identifier[command] + identifier[self] . identifier[build_args] identifier[command] = identifier[map] ( identifier[expand] , identifier[command] ) identifier[cmd_str] = literal[string] . identifier[join] ( identifier[map] ( identifier[quote] , identifier[command] )) keyword[if] identifier[self] . identifier[verbose] : identifier[pr] = identifier[Printer] ( identifier[sys] . identifier[stdout] ) identifier[pr] ( literal[string] % identifier[cmd_str] , identifier[heading] ) keyword[def] identifier[_callback] ( identifier[executor] ): identifier[self] . identifier[_add_build_actions] ( identifier[executor] , identifier[context] = identifier[context] , identifier[package] = identifier[self] . identifier[package] , identifier[variant] = identifier[variant] , identifier[build_type] = identifier[build_type] , identifier[install] = identifier[install] , identifier[build_path] = identifier[build_path] , identifier[install_path] = identifier[install_path] ) keyword[if] identifier[self] . identifier[opts] : identifier[extra_args] = identifier[getattr] ( identifier[self] . identifier[opts] . identifier[parser] , literal[string] ,[]) keyword[for] identifier[key] , identifier[value] keyword[in] identifier[vars] ( identifier[self] . identifier[opts] ). identifier[iteritems] (): keyword[if] identifier[key] keyword[in] identifier[extra_args] : identifier[varname] = literal[string] % identifier[key] . identifier[upper] () keyword[if] identifier[isinstance] ( identifier[value] , identifier[bool] ): identifier[value] = literal[int] keyword[if] identifier[value] keyword[else] literal[int] keyword[elif] identifier[isinstance] ( identifier[value] ,( identifier[list] , identifier[tuple] )): identifier[value] = identifier[map] ( identifier[str] , identifier[value] ) identifier[value] = identifier[map] ( identifier[quote] , identifier[value] ) identifier[value] = literal[string] . identifier[join] ( identifier[value] ) identifier[executor] . identifier[env] [ identifier[varname] ]= identifier[value] identifier[retcode] , identifier[_] , identifier[_] = identifier[context] . identifier[execute_shell] ( identifier[command] = identifier[command] , identifier[block] = keyword[True] , identifier[cwd] = identifier[build_path] , identifier[actions_callback] = identifier[_callback] ) identifier[ret] [ literal[string] ]=( keyword[not] identifier[retcode] ) keyword[return] identifier[ret]
def build(self, context, variant, build_path, install_path, install=False, build_type=BuildType.local): """Perform the build. Note that most of the func args aren't used here - that's because this info is already passed to the custom build command via environment variables. """ ret = {} if self.write_build_scripts: # write out the script that places the user in a build env, where # they can run bez directly themselves. build_env_script = os.path.join(build_path, 'build-env') create_forwarding_script(build_env_script, module=('build_system', 'custom'), func_name='_FWD__spawn_build_shell', working_dir=self.working_dir, build_path=build_path, variant_index=variant.index, install=install, install_path=install_path) ret['success'] = True ret['build_env_script'] = build_env_script return ret # depends on [control=['if'], data=[]] # get build command command = self.package.build_command # False just means no build command if command is False: ret['success'] = True return ret # depends on [control=['if'], data=[]] def expand(txt): root = self.package.root install_ = 'install' if install else '' return txt.format(root=root, install=install_).strip() if isinstance(command, basestring): if self.build_args: command = command + ' ' + ' '.join(map(quote, self.build_args)) # depends on [control=['if'], data=[]] command = expand(command) cmd_str = command # depends on [control=['if'], data=[]] else: # list command = command + self.build_args command = map(expand, command) cmd_str = ' '.join(map(quote, command)) if self.verbose: pr = Printer(sys.stdout) pr('Running build command: %s' % cmd_str, heading) # depends on [control=['if'], data=[]] # run the build command def _callback(executor): self._add_build_actions(executor, context=context, package=self.package, variant=variant, build_type=build_type, install=install, build_path=build_path, install_path=install_path) if self.opts: # write args defined in ./parse_build_args.py out as env vars extra_args = getattr(self.opts.parser, '_rezbuild_extra_args', []) for (key, value) in vars(self.opts).iteritems(): if key in extra_args: varname = '__PARSE_ARG_%s' % key.upper() # do some value conversions if isinstance(value, bool): value = 1 if value else 0 # depends on [control=['if'], data=[]] elif isinstance(value, (list, tuple)): value = map(str, value) value = map(quote, value) value = ' '.join(value) # depends on [control=['if'], data=[]] executor.env[varname] = value # depends on [control=['if'], data=['key']] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] (retcode, _, _) = context.execute_shell(command=command, block=True, cwd=build_path, actions_callback=_callback) ret['success'] = not retcode return ret
def NewPathSpec(cls, type_indicator, **kwargs): """Creates a new path specification for the specific type indicator. Args: type_indicator (str): type indicator. kwargs (dict): keyword arguments depending on the path specification. Returns: PathSpec: path specification. Raises: KeyError: if path specification is not registered. """ if type_indicator not in cls._path_spec_types: raise KeyError( 'Path specification type: {0:s} not set.'.format(type_indicator)) # An empty parent will cause parentless path specifications to raise # so we conveniently remove it here. if 'parent' in kwargs and kwargs['parent'] is None: del kwargs['parent'] path_spec_type = cls._path_spec_types[type_indicator] return path_spec_type(**kwargs)
def function[NewPathSpec, parameter[cls, type_indicator]]: constant[Creates a new path specification for the specific type indicator. Args: type_indicator (str): type indicator. kwargs (dict): keyword arguments depending on the path specification. Returns: PathSpec: path specification. Raises: KeyError: if path specification is not registered. ] if compare[name[type_indicator] <ast.NotIn object at 0x7da2590d7190> name[cls]._path_spec_types] begin[:] <ast.Raise object at 0x7da1b07b9ba0> if <ast.BoolOp object at 0x7da1b07a9c90> begin[:] <ast.Delete object at 0x7da1b07a90c0> variable[path_spec_type] assign[=] call[name[cls]._path_spec_types][name[type_indicator]] return[call[name[path_spec_type], parameter[]]]
keyword[def] identifier[NewPathSpec] ( identifier[cls] , identifier[type_indicator] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[type_indicator] keyword[not] keyword[in] identifier[cls] . identifier[_path_spec_types] : keyword[raise] identifier[KeyError] ( literal[string] . identifier[format] ( identifier[type_indicator] )) keyword[if] literal[string] keyword[in] identifier[kwargs] keyword[and] identifier[kwargs] [ literal[string] ] keyword[is] keyword[None] : keyword[del] identifier[kwargs] [ literal[string] ] identifier[path_spec_type] = identifier[cls] . identifier[_path_spec_types] [ identifier[type_indicator] ] keyword[return] identifier[path_spec_type] (** identifier[kwargs] )
def NewPathSpec(cls, type_indicator, **kwargs): """Creates a new path specification for the specific type indicator. Args: type_indicator (str): type indicator. kwargs (dict): keyword arguments depending on the path specification. Returns: PathSpec: path specification. Raises: KeyError: if path specification is not registered. """ if type_indicator not in cls._path_spec_types: raise KeyError('Path specification type: {0:s} not set.'.format(type_indicator)) # depends on [control=['if'], data=['type_indicator']] # An empty parent will cause parentless path specifications to raise # so we conveniently remove it here. if 'parent' in kwargs and kwargs['parent'] is None: del kwargs['parent'] # depends on [control=['if'], data=[]] path_spec_type = cls._path_spec_types[type_indicator] return path_spec_type(**kwargs)
def update(self,iiter,H,Y,eta,loss): """Update the trace_var in new iteration""" if iiter <= self.niter_trace+1: self.H[iiter] = H self.Y[iiter] = Y elif iiter >self.niter - self.niter_trace + 1: self.H[self.ltrace+iiter-self.niter-1] = H self.Y[self.ltrace+iiter-self.niter-1] = Y self.etas[iiter] = eta self.loss[iiter] = loss if self.loss[iiter] < self.lmin: self.Yh = Y self.lmin = self.loss[iiter] self.miniter = iiter if not iiter == -1 else self.niter + 1
def function[update, parameter[self, iiter, H, Y, eta, loss]]: constant[Update the trace_var in new iteration] if compare[name[iiter] less_or_equal[<=] binary_operation[name[self].niter_trace + constant[1]]] begin[:] call[name[self].H][name[iiter]] assign[=] name[H] call[name[self].Y][name[iiter]] assign[=] name[Y] call[name[self].etas][name[iiter]] assign[=] name[eta] call[name[self].loss][name[iiter]] assign[=] name[loss] if compare[call[name[self].loss][name[iiter]] less[<] name[self].lmin] begin[:] name[self].Yh assign[=] name[Y] name[self].lmin assign[=] call[name[self].loss][name[iiter]] name[self].miniter assign[=] <ast.IfExp object at 0x7da18bc72080>
keyword[def] identifier[update] ( identifier[self] , identifier[iiter] , identifier[H] , identifier[Y] , identifier[eta] , identifier[loss] ): literal[string] keyword[if] identifier[iiter] <= identifier[self] . identifier[niter_trace] + literal[int] : identifier[self] . identifier[H] [ identifier[iiter] ]= identifier[H] identifier[self] . identifier[Y] [ identifier[iiter] ]= identifier[Y] keyword[elif] identifier[iiter] > identifier[self] . identifier[niter] - identifier[self] . identifier[niter_trace] + literal[int] : identifier[self] . identifier[H] [ identifier[self] . identifier[ltrace] + identifier[iiter] - identifier[self] . identifier[niter] - literal[int] ]= identifier[H] identifier[self] . identifier[Y] [ identifier[self] . identifier[ltrace] + identifier[iiter] - identifier[self] . identifier[niter] - literal[int] ]= identifier[Y] identifier[self] . identifier[etas] [ identifier[iiter] ]= identifier[eta] identifier[self] . identifier[loss] [ identifier[iiter] ]= identifier[loss] keyword[if] identifier[self] . identifier[loss] [ identifier[iiter] ]< identifier[self] . identifier[lmin] : identifier[self] . identifier[Yh] = identifier[Y] identifier[self] . identifier[lmin] = identifier[self] . identifier[loss] [ identifier[iiter] ] identifier[self] . identifier[miniter] = identifier[iiter] keyword[if] keyword[not] identifier[iiter] ==- literal[int] keyword[else] identifier[self] . identifier[niter] + literal[int]
def update(self, iiter, H, Y, eta, loss): """Update the trace_var in new iteration""" if iiter <= self.niter_trace + 1: self.H[iiter] = H self.Y[iiter] = Y # depends on [control=['if'], data=['iiter']] elif iiter > self.niter - self.niter_trace + 1: self.H[self.ltrace + iiter - self.niter - 1] = H self.Y[self.ltrace + iiter - self.niter - 1] = Y # depends on [control=['if'], data=['iiter']] self.etas[iiter] = eta self.loss[iiter] = loss if self.loss[iiter] < self.lmin: self.Yh = Y self.lmin = self.loss[iiter] self.miniter = iiter if not iiter == -1 else self.niter + 1 # depends on [control=['if'], data=[]]
def drop_dose(self): """ Drop the maximum dose and related response values. """ for fld in ("doses", "ns", "means", "stdevs"): arr = getattr(self, fld)[:-1] setattr(self, fld, arr) self._validate()
def function[drop_dose, parameter[self]]: constant[ Drop the maximum dose and related response values. ] for taget[name[fld]] in starred[tuple[[<ast.Constant object at 0x7da204345750>, <ast.Constant object at 0x7da204345090>, <ast.Constant object at 0x7da204346080>, <ast.Constant object at 0x7da204345e70>]]] begin[:] variable[arr] assign[=] call[call[name[getattr], parameter[name[self], name[fld]]]][<ast.Slice object at 0x7da20e954e50>] call[name[setattr], parameter[name[self], name[fld], name[arr]]] call[name[self]._validate, parameter[]]
keyword[def] identifier[drop_dose] ( identifier[self] ): literal[string] keyword[for] identifier[fld] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] ): identifier[arr] = identifier[getattr] ( identifier[self] , identifier[fld] )[:- literal[int] ] identifier[setattr] ( identifier[self] , identifier[fld] , identifier[arr] ) identifier[self] . identifier[_validate] ()
def drop_dose(self): """ Drop the maximum dose and related response values. """ for fld in ('doses', 'ns', 'means', 'stdevs'): arr = getattr(self, fld)[:-1] setattr(self, fld, arr) # depends on [control=['for'], data=['fld']] self._validate()
def handle_events(self): """ An event handler that processes events from stdin and calls the on_click function of the respective object. This function is run in another thread, so as to not stall the main thread. """ for event in sys.stdin: if event.startswith('['): continue name = json.loads(event.lstrip(','))['name'] for obj in self.loader.objects: if obj.output_options['name'] == name: obj.on_click(json.loads(event.lstrip(',')))
def function[handle_events, parameter[self]]: constant[ An event handler that processes events from stdin and calls the on_click function of the respective object. This function is run in another thread, so as to not stall the main thread. ] for taget[name[event]] in starred[name[sys].stdin] begin[:] if call[name[event].startswith, parameter[constant[[]]] begin[:] continue variable[name] assign[=] call[call[name[json].loads, parameter[call[name[event].lstrip, parameter[constant[,]]]]]][constant[name]] for taget[name[obj]] in starred[name[self].loader.objects] begin[:] if compare[call[name[obj].output_options][constant[name]] equal[==] name[name]] begin[:] call[name[obj].on_click, parameter[call[name[json].loads, parameter[call[name[event].lstrip, parameter[constant[,]]]]]]]
keyword[def] identifier[handle_events] ( identifier[self] ): literal[string] keyword[for] identifier[event] keyword[in] identifier[sys] . identifier[stdin] : keyword[if] identifier[event] . identifier[startswith] ( literal[string] ): keyword[continue] identifier[name] = identifier[json] . identifier[loads] ( identifier[event] . identifier[lstrip] ( literal[string] ))[ literal[string] ] keyword[for] identifier[obj] keyword[in] identifier[self] . identifier[loader] . identifier[objects] : keyword[if] identifier[obj] . identifier[output_options] [ literal[string] ]== identifier[name] : identifier[obj] . identifier[on_click] ( identifier[json] . identifier[loads] ( identifier[event] . identifier[lstrip] ( literal[string] )))
def handle_events(self): """ An event handler that processes events from stdin and calls the on_click function of the respective object. This function is run in another thread, so as to not stall the main thread. """ for event in sys.stdin: if event.startswith('['): continue # depends on [control=['if'], data=[]] name = json.loads(event.lstrip(','))['name'] for obj in self.loader.objects: if obj.output_options['name'] == name: obj.on_click(json.loads(event.lstrip(','))) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['obj']] # depends on [control=['for'], data=['event']]
def ru_strftime(format=u"%d.%m.%Y", date=None, inflected=False, inflected_day=False, preposition=False): """ Russian strftime without locale @param format: strftime format, default=u'%d.%m.%Y' @type format: C{unicode} @param date: date value, default=None translates to today @type date: C{datetime.date} or C{datetime.datetime} @param inflected: is month inflected, default False @type inflected: C{bool} @param inflected_day: is day inflected, default False @type inflected: C{bool} @param preposition: is preposition used, default False preposition=True automatically implies inflected_day=True @type preposition: C{bool} @return: strftime string @rtype: unicode """ if date is None: date = datetime.datetime.today() weekday = date.weekday() prepos = preposition and DAY_NAMES[weekday][3] or u"" month_idx = inflected and 2 or 1 day_idx = (inflected_day or preposition) and 2 or 1 # for russian typography standard, # 1 April 2007, but 01.04.2007 if u'%b' in format or u'%B' in format: format = format.replace(u'%d', six.text_type(date.day)) format = format.replace(u'%a', prepos+DAY_NAMES[weekday][0]) format = format.replace(u'%A', prepos+DAY_NAMES[weekday][day_idx]) format = format.replace(u'%b', MONTH_NAMES[date.month-1][0]) format = format.replace(u'%B', MONTH_NAMES[date.month-1][month_idx]) # Python 2: strftime's argument must be str # Python 3: strftime's argument str, not a bitestring if six.PY2: # strftime must be str, so encode it to utf8: s_format = format.encode("utf-8") s_res = date.strftime(s_format) # and back to unicode u_res = s_res.decode("utf-8") else: u_res = date.strftime(format) return u_res
def function[ru_strftime, parameter[format, date, inflected, inflected_day, preposition]]: constant[ Russian strftime without locale @param format: strftime format, default=u'%d.%m.%Y' @type format: C{unicode} @param date: date value, default=None translates to today @type date: C{datetime.date} or C{datetime.datetime} @param inflected: is month inflected, default False @type inflected: C{bool} @param inflected_day: is day inflected, default False @type inflected: C{bool} @param preposition: is preposition used, default False preposition=True automatically implies inflected_day=True @type preposition: C{bool} @return: strftime string @rtype: unicode ] if compare[name[date] is constant[None]] begin[:] variable[date] assign[=] call[name[datetime].datetime.today, parameter[]] variable[weekday] assign[=] call[name[date].weekday, parameter[]] variable[prepos] assign[=] <ast.BoolOp object at 0x7da1b0f3a230> variable[month_idx] assign[=] <ast.BoolOp object at 0x7da1b0f3b8e0> variable[day_idx] assign[=] <ast.BoolOp object at 0x7da1b0f39210> if <ast.BoolOp object at 0x7da1b0f383d0> begin[:] variable[format] assign[=] call[name[format].replace, parameter[constant[%d], call[name[six].text_type, parameter[name[date].day]]]] variable[format] assign[=] call[name[format].replace, parameter[constant[%a], binary_operation[name[prepos] + call[call[name[DAY_NAMES]][name[weekday]]][constant[0]]]]] variable[format] assign[=] call[name[format].replace, parameter[constant[%A], binary_operation[name[prepos] + call[call[name[DAY_NAMES]][name[weekday]]][name[day_idx]]]]] variable[format] assign[=] call[name[format].replace, parameter[constant[%b], call[call[name[MONTH_NAMES]][binary_operation[name[date].month - constant[1]]]][constant[0]]]] variable[format] assign[=] call[name[format].replace, parameter[constant[%B], call[call[name[MONTH_NAMES]][binary_operation[name[date].month - constant[1]]]][name[month_idx]]]] if name[six].PY2 begin[:] variable[s_format] assign[=] call[name[format].encode, parameter[constant[utf-8]]] variable[s_res] assign[=] call[name[date].strftime, parameter[name[s_format]]] variable[u_res] assign[=] call[name[s_res].decode, parameter[constant[utf-8]]] return[name[u_res]]
keyword[def] identifier[ru_strftime] ( identifier[format] = literal[string] , identifier[date] = keyword[None] , identifier[inflected] = keyword[False] , identifier[inflected_day] = keyword[False] , identifier[preposition] = keyword[False] ): literal[string] keyword[if] identifier[date] keyword[is] keyword[None] : identifier[date] = identifier[datetime] . identifier[datetime] . identifier[today] () identifier[weekday] = identifier[date] . identifier[weekday] () identifier[prepos] = identifier[preposition] keyword[and] identifier[DAY_NAMES] [ identifier[weekday] ][ literal[int] ] keyword[or] literal[string] identifier[month_idx] = identifier[inflected] keyword[and] literal[int] keyword[or] literal[int] identifier[day_idx] =( identifier[inflected_day] keyword[or] identifier[preposition] ) keyword[and] literal[int] keyword[or] literal[int] keyword[if] literal[string] keyword[in] identifier[format] keyword[or] literal[string] keyword[in] identifier[format] : identifier[format] = identifier[format] . identifier[replace] ( literal[string] , identifier[six] . identifier[text_type] ( identifier[date] . identifier[day] )) identifier[format] = identifier[format] . identifier[replace] ( literal[string] , identifier[prepos] + identifier[DAY_NAMES] [ identifier[weekday] ][ literal[int] ]) identifier[format] = identifier[format] . identifier[replace] ( literal[string] , identifier[prepos] + identifier[DAY_NAMES] [ identifier[weekday] ][ identifier[day_idx] ]) identifier[format] = identifier[format] . identifier[replace] ( literal[string] , identifier[MONTH_NAMES] [ identifier[date] . identifier[month] - literal[int] ][ literal[int] ]) identifier[format] = identifier[format] . identifier[replace] ( literal[string] , identifier[MONTH_NAMES] [ identifier[date] . identifier[month] - literal[int] ][ identifier[month_idx] ]) keyword[if] identifier[six] . identifier[PY2] : identifier[s_format] = identifier[format] . identifier[encode] ( literal[string] ) identifier[s_res] = identifier[date] . identifier[strftime] ( identifier[s_format] ) identifier[u_res] = identifier[s_res] . identifier[decode] ( literal[string] ) keyword[else] : identifier[u_res] = identifier[date] . identifier[strftime] ( identifier[format] ) keyword[return] identifier[u_res]
def ru_strftime(format=u'%d.%m.%Y', date=None, inflected=False, inflected_day=False, preposition=False): """ Russian strftime without locale @param format: strftime format, default=u'%d.%m.%Y' @type format: C{unicode} @param date: date value, default=None translates to today @type date: C{datetime.date} or C{datetime.datetime} @param inflected: is month inflected, default False @type inflected: C{bool} @param inflected_day: is day inflected, default False @type inflected: C{bool} @param preposition: is preposition used, default False preposition=True automatically implies inflected_day=True @type preposition: C{bool} @return: strftime string @rtype: unicode """ if date is None: date = datetime.datetime.today() # depends on [control=['if'], data=['date']] weekday = date.weekday() prepos = preposition and DAY_NAMES[weekday][3] or u'' month_idx = inflected and 2 or 1 day_idx = (inflected_day or preposition) and 2 or 1 # for russian typography standard, # 1 April 2007, but 01.04.2007 if u'%b' in format or u'%B' in format: format = format.replace(u'%d', six.text_type(date.day)) # depends on [control=['if'], data=[]] format = format.replace(u'%a', prepos + DAY_NAMES[weekday][0]) format = format.replace(u'%A', prepos + DAY_NAMES[weekday][day_idx]) format = format.replace(u'%b', MONTH_NAMES[date.month - 1][0]) format = format.replace(u'%B', MONTH_NAMES[date.month - 1][month_idx]) # Python 2: strftime's argument must be str # Python 3: strftime's argument str, not a bitestring if six.PY2: # strftime must be str, so encode it to utf8: s_format = format.encode('utf-8') s_res = date.strftime(s_format) # and back to unicode u_res = s_res.decode('utf-8') # depends on [control=['if'], data=[]] else: u_res = date.strftime(format) return u_res
def explore(layer=None): """Function used to discover the Scapy layers and protocols. It helps to see which packets exists in contrib or layer files. params: - layer: If specified, the function will explore the layer. If not, the GUI mode will be activated, to browse the available layers examples: >>> explore() # Launches the GUI >>> explore("dns") # Explore scapy.layers.dns >>> explore("http2") # Explore scapy.contrib.http2 >>> explore(scapy.layers.bluetooth4LE) Note: to search a packet by name, use ls("name") rather than explore. """ if layer is None: # GUI MODE if not conf.interactive: raise Scapy_Exception("explore() GUI-mode cannot be run in " "interactive mode. Please provide a " "'layer' parameter !") # 0 - Imports try: import prompt_toolkit except ImportError: raise ImportError("prompt_toolkit is not installed ! " "You may install IPython, which contains it, via" " `pip install ipython`") if not _version_checker(prompt_toolkit, (2, 0)): raise ImportError("prompt_toolkit >= 2.0.0 is required !") # Only available with prompt_toolkit > 2.0, not released on PyPi yet from prompt_toolkit.shortcuts.dialogs import radiolist_dialog, \ button_dialog from prompt_toolkit.formatted_text import HTML # 1 - Ask for layer or contrib action = button_dialog( title="Scapy v%s" % conf.version, text=HTML( six.text_type( '<style bg="white" fg="red">Chose the type of packets' ' you want to explore:</style>' ) ), buttons=[ (six.text_type("Layers"), "layers"), (six.text_type("Contribs"), "contribs"), (six.text_type("Cancel"), "cancel") ]) # 2 - Retrieve list of Packets if action == "layers": # Get all loaded layers _radio_values = conf.layers.layers() # Restrict to layers-only (not contribs) + packet.py and asn1*.py _radio_values = [x for x in _radio_values if ("layers" in x[0] or "packet" in x[0] or "asn1" in x[0])] elif action == "contribs": # Get all existing contribs from scapy.main import list_contrib _radio_values = list_contrib(ret=True) _radio_values = [(x['name'], x['description']) for x in _radio_values] # Remove very specific modules _radio_values = [x for x in _radio_values if not ("can" in x[0])] else: # Escape/Cancel was pressed return # Python 2 compat if six.PY2: _radio_values = [(six.text_type(x), six.text_type(y)) for x, y in _radio_values] # 3 - Ask for the layer/contrib module to explore result = radiolist_dialog( values=_radio_values, title="Scapy v%s" % conf.version, text=HTML( six.text_type( '<style bg="white" fg="red">Please select a layer among' ' the following, to see all packets contained in' ' it:</style>' ) )) if result is None: return # User pressed "Cancel" # 4 - (Contrib only): load contrib if action == "contribs": from scapy.main import load_contrib load_contrib(result) result = "scapy.contrib." + result else: # NON-GUI MODE # We handle layer as a short layer name, full layer name # or the module itself if isinstance(layer, types.ModuleType): layer = layer.__name__ if isinstance(layer, str): if layer.startswith("scapy.layers."): result = layer else: if layer.startswith("scapy.contrib."): layer = layer.replace("scapy.contrib.", "") from scapy.main import load_contrib load_contrib(layer) result_layer, result_contrib = (("scapy.layers.%s" % layer), ("scapy.contrib.%s" % layer)) if result_layer in conf.layers.ldict: result = result_layer elif result_contrib in conf.layers.ldict: result = result_contrib else: raise Scapy_Exception("Unknown scapy module '%s'" % layer) else: warning("Wrong usage ! Check out help(explore)") return # COMMON PART # Get the list of all Packets contained in that module try: all_layers = conf.layers.ldict[result] except KeyError: raise Scapy_Exception("Unknown scapy module '%s'" % layer) # Print print(conf.color_theme.layer_name("Packets contained in %s:" % result)) rtlst = [(lay.__name__ or "", lay._name or "") for lay in all_layers] print(pretty_list(rtlst, [("Class", "Name")], borders=True))
def function[explore, parameter[layer]]: constant[Function used to discover the Scapy layers and protocols. It helps to see which packets exists in contrib or layer files. params: - layer: If specified, the function will explore the layer. If not, the GUI mode will be activated, to browse the available layers examples: >>> explore() # Launches the GUI >>> explore("dns") # Explore scapy.layers.dns >>> explore("http2") # Explore scapy.contrib.http2 >>> explore(scapy.layers.bluetooth4LE) Note: to search a packet by name, use ls("name") rather than explore. ] if compare[name[layer] is constant[None]] begin[:] if <ast.UnaryOp object at 0x7da1b2125450> begin[:] <ast.Raise object at 0x7da1b21277f0> <ast.Try object at 0x7da1b2125db0> if <ast.UnaryOp object at 0x7da1b2126200> begin[:] <ast.Raise object at 0x7da1b2125ea0> from relative_module[prompt_toolkit.shortcuts.dialogs] import module[radiolist_dialog], module[button_dialog] from relative_module[prompt_toolkit.formatted_text] import module[HTML] variable[action] assign[=] call[name[button_dialog], parameter[]] if compare[name[action] equal[==] constant[layers]] begin[:] variable[_radio_values] assign[=] call[name[conf].layers.layers, parameter[]] variable[_radio_values] assign[=] <ast.ListComp object at 0x7da1b2126470> if name[six].PY2 begin[:] variable[_radio_values] assign[=] <ast.ListComp object at 0x7da1b21253f0> variable[result] assign[=] call[name[radiolist_dialog], parameter[]] if compare[name[result] is constant[None]] begin[:] return[None] if compare[name[action] equal[==] constant[contribs]] begin[:] from relative_module[scapy.main] import module[load_contrib] call[name[load_contrib], parameter[name[result]]] variable[result] assign[=] binary_operation[constant[scapy.contrib.] + name[result]] <ast.Try object at 0x7da1b21d6a10> call[name[print], parameter[call[name[conf].color_theme.layer_name, parameter[binary_operation[constant[Packets contained in %s:] <ast.Mod object at 0x7da2590d6920> name[result]]]]]] variable[rtlst] assign[=] <ast.ListComp object at 0x7da1b21d48b0> call[name[print], parameter[call[name[pretty_list], parameter[name[rtlst], list[[<ast.Tuple object at 0x7da1b21d7970>]]]]]]
keyword[def] identifier[explore] ( identifier[layer] = keyword[None] ): literal[string] keyword[if] identifier[layer] keyword[is] keyword[None] : keyword[if] keyword[not] identifier[conf] . identifier[interactive] : keyword[raise] identifier[Scapy_Exception] ( literal[string] literal[string] literal[string] ) keyword[try] : keyword[import] identifier[prompt_toolkit] keyword[except] identifier[ImportError] : keyword[raise] identifier[ImportError] ( literal[string] literal[string] literal[string] ) keyword[if] keyword[not] identifier[_version_checker] ( identifier[prompt_toolkit] ,( literal[int] , literal[int] )): keyword[raise] identifier[ImportError] ( literal[string] ) keyword[from] identifier[prompt_toolkit] . identifier[shortcuts] . identifier[dialogs] keyword[import] identifier[radiolist_dialog] , identifier[button_dialog] keyword[from] identifier[prompt_toolkit] . identifier[formatted_text] keyword[import] identifier[HTML] identifier[action] = identifier[button_dialog] ( identifier[title] = literal[string] % identifier[conf] . identifier[version] , identifier[text] = identifier[HTML] ( identifier[six] . identifier[text_type] ( literal[string] literal[string] ) ), identifier[buttons] =[ ( identifier[six] . identifier[text_type] ( literal[string] ), literal[string] ), ( identifier[six] . identifier[text_type] ( literal[string] ), literal[string] ), ( identifier[six] . identifier[text_type] ( literal[string] ), literal[string] ) ]) keyword[if] identifier[action] == literal[string] : identifier[_radio_values] = identifier[conf] . identifier[layers] . identifier[layers] () identifier[_radio_values] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[_radio_values] keyword[if] ( literal[string] keyword[in] identifier[x] [ literal[int] ] keyword[or] literal[string] keyword[in] identifier[x] [ literal[int] ] keyword[or] literal[string] keyword[in] identifier[x] [ literal[int] ])] keyword[elif] identifier[action] == literal[string] : keyword[from] identifier[scapy] . identifier[main] keyword[import] identifier[list_contrib] identifier[_radio_values] = identifier[list_contrib] ( identifier[ret] = keyword[True] ) identifier[_radio_values] =[( identifier[x] [ literal[string] ], identifier[x] [ literal[string] ]) keyword[for] identifier[x] keyword[in] identifier[_radio_values] ] identifier[_radio_values] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[_radio_values] keyword[if] keyword[not] ( literal[string] keyword[in] identifier[x] [ literal[int] ])] keyword[else] : keyword[return] keyword[if] identifier[six] . identifier[PY2] : identifier[_radio_values] =[( identifier[six] . identifier[text_type] ( identifier[x] ), identifier[six] . identifier[text_type] ( identifier[y] )) keyword[for] identifier[x] , identifier[y] keyword[in] identifier[_radio_values] ] identifier[result] = identifier[radiolist_dialog] ( identifier[values] = identifier[_radio_values] , identifier[title] = literal[string] % identifier[conf] . identifier[version] , identifier[text] = identifier[HTML] ( identifier[six] . identifier[text_type] ( literal[string] literal[string] literal[string] ) )) keyword[if] identifier[result] keyword[is] keyword[None] : keyword[return] keyword[if] identifier[action] == literal[string] : keyword[from] identifier[scapy] . identifier[main] keyword[import] identifier[load_contrib] identifier[load_contrib] ( identifier[result] ) identifier[result] = literal[string] + identifier[result] keyword[else] : keyword[if] identifier[isinstance] ( identifier[layer] , identifier[types] . identifier[ModuleType] ): identifier[layer] = identifier[layer] . identifier[__name__] keyword[if] identifier[isinstance] ( identifier[layer] , identifier[str] ): keyword[if] identifier[layer] . identifier[startswith] ( literal[string] ): identifier[result] = identifier[layer] keyword[else] : keyword[if] identifier[layer] . identifier[startswith] ( literal[string] ): identifier[layer] = identifier[layer] . identifier[replace] ( literal[string] , literal[string] ) keyword[from] identifier[scapy] . identifier[main] keyword[import] identifier[load_contrib] identifier[load_contrib] ( identifier[layer] ) identifier[result_layer] , identifier[result_contrib] =(( literal[string] % identifier[layer] ), ( literal[string] % identifier[layer] )) keyword[if] identifier[result_layer] keyword[in] identifier[conf] . identifier[layers] . identifier[ldict] : identifier[result] = identifier[result_layer] keyword[elif] identifier[result_contrib] keyword[in] identifier[conf] . identifier[layers] . identifier[ldict] : identifier[result] = identifier[result_contrib] keyword[else] : keyword[raise] identifier[Scapy_Exception] ( literal[string] % identifier[layer] ) keyword[else] : identifier[warning] ( literal[string] ) keyword[return] keyword[try] : identifier[all_layers] = identifier[conf] . identifier[layers] . identifier[ldict] [ identifier[result] ] keyword[except] identifier[KeyError] : keyword[raise] identifier[Scapy_Exception] ( literal[string] % identifier[layer] ) identifier[print] ( identifier[conf] . identifier[color_theme] . identifier[layer_name] ( literal[string] % identifier[result] )) identifier[rtlst] =[( identifier[lay] . identifier[__name__] keyword[or] literal[string] , identifier[lay] . identifier[_name] keyword[or] literal[string] ) keyword[for] identifier[lay] keyword[in] identifier[all_layers] ] identifier[print] ( identifier[pretty_list] ( identifier[rtlst] ,[( literal[string] , literal[string] )], identifier[borders] = keyword[True] ))
def explore(layer=None): """Function used to discover the Scapy layers and protocols. It helps to see which packets exists in contrib or layer files. params: - layer: If specified, the function will explore the layer. If not, the GUI mode will be activated, to browse the available layers examples: >>> explore() # Launches the GUI >>> explore("dns") # Explore scapy.layers.dns >>> explore("http2") # Explore scapy.contrib.http2 >>> explore(scapy.layers.bluetooth4LE) Note: to search a packet by name, use ls("name") rather than explore. """ if layer is None: # GUI MODE if not conf.interactive: raise Scapy_Exception("explore() GUI-mode cannot be run in interactive mode. Please provide a 'layer' parameter !") # depends on [control=['if'], data=[]] # 0 - Imports try: import prompt_toolkit # depends on [control=['try'], data=[]] except ImportError: raise ImportError('prompt_toolkit is not installed ! You may install IPython, which contains it, via `pip install ipython`') # depends on [control=['except'], data=[]] if not _version_checker(prompt_toolkit, (2, 0)): raise ImportError('prompt_toolkit >= 2.0.0 is required !') # depends on [control=['if'], data=[]] # Only available with prompt_toolkit > 2.0, not released on PyPi yet from prompt_toolkit.shortcuts.dialogs import radiolist_dialog, button_dialog from prompt_toolkit.formatted_text import HTML # 1 - Ask for layer or contrib action = button_dialog(title='Scapy v%s' % conf.version, text=HTML(six.text_type('<style bg="white" fg="red">Chose the type of packets you want to explore:</style>')), buttons=[(six.text_type('Layers'), 'layers'), (six.text_type('Contribs'), 'contribs'), (six.text_type('Cancel'), 'cancel')]) # 2 - Retrieve list of Packets if action == 'layers': # Get all loaded layers _radio_values = conf.layers.layers() # Restrict to layers-only (not contribs) + packet.py and asn1*.py _radio_values = [x for x in _radio_values if 'layers' in x[0] or 'packet' in x[0] or 'asn1' in x[0]] # depends on [control=['if'], data=[]] elif action == 'contribs': # Get all existing contribs from scapy.main import list_contrib _radio_values = list_contrib(ret=True) _radio_values = [(x['name'], x['description']) for x in _radio_values] # Remove very specific modules _radio_values = [x for x in _radio_values if not 'can' in x[0]] # depends on [control=['if'], data=[]] else: # Escape/Cancel was pressed return # Python 2 compat if six.PY2: _radio_values = [(six.text_type(x), six.text_type(y)) for (x, y) in _radio_values] # depends on [control=['if'], data=[]] # 3 - Ask for the layer/contrib module to explore result = radiolist_dialog(values=_radio_values, title='Scapy v%s' % conf.version, text=HTML(six.text_type('<style bg="white" fg="red">Please select a layer among the following, to see all packets contained in it:</style>'))) if result is None: return # User pressed "Cancel" # depends on [control=['if'], data=[]] # 4 - (Contrib only): load contrib if action == 'contribs': from scapy.main import load_contrib load_contrib(result) result = 'scapy.contrib.' + result # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: # NON-GUI MODE # We handle layer as a short layer name, full layer name # or the module itself if isinstance(layer, types.ModuleType): layer = layer.__name__ # depends on [control=['if'], data=[]] if isinstance(layer, str): if layer.startswith('scapy.layers.'): result = layer # depends on [control=['if'], data=[]] else: if layer.startswith('scapy.contrib.'): layer = layer.replace('scapy.contrib.', '') # depends on [control=['if'], data=[]] from scapy.main import load_contrib load_contrib(layer) (result_layer, result_contrib) = ('scapy.layers.%s' % layer, 'scapy.contrib.%s' % layer) if result_layer in conf.layers.ldict: result = result_layer # depends on [control=['if'], data=['result_layer']] elif result_contrib in conf.layers.ldict: result = result_contrib # depends on [control=['if'], data=['result_contrib']] else: raise Scapy_Exception("Unknown scapy module '%s'" % layer) # depends on [control=['if'], data=[]] else: warning('Wrong usage ! Check out help(explore)') return # COMMON PART # Get the list of all Packets contained in that module try: all_layers = conf.layers.ldict[result] # depends on [control=['try'], data=[]] except KeyError: raise Scapy_Exception("Unknown scapy module '%s'" % layer) # depends on [control=['except'], data=[]] # Print print(conf.color_theme.layer_name('Packets contained in %s:' % result)) rtlst = [(lay.__name__ or '', lay._name or '') for lay in all_layers] print(pretty_list(rtlst, [('Class', 'Name')], borders=True))
def phase_bin_magseries(phases, mags, binsize=0.005, minbinelems=7): '''Bins a phased magnitude/flux time-series using the bin size provided. Parameters ---------- phases,mags : np.array The phased magnitude/flux time-series to bin in phase. Non-finite elements will be removed from these arrays. At least 10 elements in each array are required for this function to operate. binsize : float The bin size to use to group together measurements closer than this amount in phase. This is in units of phase. minbinelems : int The minimum number of elements required per bin to include it in the output. Returns ------- dict A dict of the following form is returned:: {'phasebin_indices': a list of the index arrays into the nan-filtered input arrays per each bin, 'phasebins': list of bin boundaries for each bin, 'nbins': the number of bins generated, 'binnedphases': the phase values associated with each phase bin; this is the median of the phase value in each bin, 'binnedmags': the mag/flux values associated with each phase bin; this is the median of the mags/fluxes in each bin} ''' # check if the input arrays are ok if not(phases.shape and mags.shape and len(phases) > 10 and len(mags) > 10): LOGERROR("input time/mag arrays don't have enough elements") return # find all the finite values of the magnitudes and phases finiteind = np.isfinite(mags) & np.isfinite(phases) finite_phases = phases[finiteind] finite_mags = mags[finiteind] nbins = int(np.ceil((np.nanmax(finite_phases) - np.nanmin(finite_phases))/binsize) + 1) minphase = np.nanmin(finite_phases) phasebins = [(minphase + x*binsize) for x in range(nbins)] # make a KD-tree on the PHASEs so we can do fast distance calculations. we # need to add a bogus y coord to make this a problem that KD-trees can # solve. time_coords = np.array([[x,1.0] for x in finite_phases]) phasetree = cKDTree(time_coords) binned_finite_phaseseries_indices = [] collected_binned_mags = {} for phase in phasebins: # find all bin indices close to within binsize of this point using the # cKDTree query. we use the p-norm = 1 for pairwise Euclidean distance. bin_indices = phasetree.query_ball_point(np.array([phase,1.0]), binsize/2.0, p=1.0) # if the bin_indices have already been collected, then we're # done with this bin, move to the next one. if they haven't, # then this is the start of a new bin. if (bin_indices not in binned_finite_phaseseries_indices and len(bin_indices) >= minbinelems): binned_finite_phaseseries_indices.append(bin_indices) # convert to ndarrays binned_finite_phaseseries_indices = [np.array(x) for x in binned_finite_phaseseries_indices] collected_binned_mags['phasebins_indices'] = ( binned_finite_phaseseries_indices ) collected_binned_mags['phasebins'] = phasebins collected_binned_mags['nbins'] = len(binned_finite_phaseseries_indices) # collect the finite_phases binned_phase = np.array([np.median(finite_phases[x]) for x in binned_finite_phaseseries_indices]) collected_binned_mags['binnedphases'] = binned_phase collected_binned_mags['binsize'] = binsize # median bin the magnitudes according to the calculated indices collected_binned_mags['binnedmags'] = ( np.array([np.median(finite_mags[x]) for x in binned_finite_phaseseries_indices]) ) return collected_binned_mags
def function[phase_bin_magseries, parameter[phases, mags, binsize, minbinelems]]: constant[Bins a phased magnitude/flux time-series using the bin size provided. Parameters ---------- phases,mags : np.array The phased magnitude/flux time-series to bin in phase. Non-finite elements will be removed from these arrays. At least 10 elements in each array are required for this function to operate. binsize : float The bin size to use to group together measurements closer than this amount in phase. This is in units of phase. minbinelems : int The minimum number of elements required per bin to include it in the output. Returns ------- dict A dict of the following form is returned:: {'phasebin_indices': a list of the index arrays into the nan-filtered input arrays per each bin, 'phasebins': list of bin boundaries for each bin, 'nbins': the number of bins generated, 'binnedphases': the phase values associated with each phase bin; this is the median of the phase value in each bin, 'binnedmags': the mag/flux values associated with each phase bin; this is the median of the mags/fluxes in each bin} ] if <ast.UnaryOp object at 0x7da1b00bc0a0> begin[:] call[name[LOGERROR], parameter[constant[input time/mag arrays don't have enough elements]]] return[None] variable[finiteind] assign[=] binary_operation[call[name[np].isfinite, parameter[name[mags]]] <ast.BitAnd object at 0x7da2590d6b60> call[name[np].isfinite, parameter[name[phases]]]] variable[finite_phases] assign[=] call[name[phases]][name[finiteind]] variable[finite_mags] assign[=] call[name[mags]][name[finiteind]] variable[nbins] assign[=] call[name[int], parameter[binary_operation[call[name[np].ceil, parameter[binary_operation[binary_operation[call[name[np].nanmax, parameter[name[finite_phases]]] - call[name[np].nanmin, parameter[name[finite_phases]]]] / name[binsize]]]] + constant[1]]]] variable[minphase] assign[=] call[name[np].nanmin, parameter[name[finite_phases]]] variable[phasebins] assign[=] <ast.ListComp object at 0x7da1b003c5e0> variable[time_coords] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da1b003c730>]] variable[phasetree] assign[=] call[name[cKDTree], parameter[name[time_coords]]] variable[binned_finite_phaseseries_indices] assign[=] list[[]] variable[collected_binned_mags] assign[=] dictionary[[], []] for taget[name[phase]] in starred[name[phasebins]] begin[:] variable[bin_indices] assign[=] call[name[phasetree].query_ball_point, parameter[call[name[np].array, parameter[list[[<ast.Name object at 0x7da1b00b93c0>, <ast.Constant object at 0x7da1b00baa70>]]]], binary_operation[name[binsize] / constant[2.0]]]] if <ast.BoolOp object at 0x7da1b00bb5e0> begin[:] call[name[binned_finite_phaseseries_indices].append, parameter[name[bin_indices]]] variable[binned_finite_phaseseries_indices] assign[=] <ast.ListComp object at 0x7da1b00baf50> call[name[collected_binned_mags]][constant[phasebins_indices]] assign[=] name[binned_finite_phaseseries_indices] call[name[collected_binned_mags]][constant[phasebins]] assign[=] name[phasebins] call[name[collected_binned_mags]][constant[nbins]] assign[=] call[name[len], parameter[name[binned_finite_phaseseries_indices]]] variable[binned_phase] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da1b00b98d0>]] call[name[collected_binned_mags]][constant[binnedphases]] assign[=] name[binned_phase] call[name[collected_binned_mags]][constant[binsize]] assign[=] name[binsize] call[name[collected_binned_mags]][constant[binnedmags]] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da1b00ba620>]] return[name[collected_binned_mags]]
keyword[def] identifier[phase_bin_magseries] ( identifier[phases] , identifier[mags] , identifier[binsize] = literal[int] , identifier[minbinelems] = literal[int] ): literal[string] keyword[if] keyword[not] ( identifier[phases] . identifier[shape] keyword[and] identifier[mags] . identifier[shape] keyword[and] identifier[len] ( identifier[phases] )> literal[int] keyword[and] identifier[len] ( identifier[mags] )> literal[int] ): identifier[LOGERROR] ( literal[string] ) keyword[return] identifier[finiteind] = identifier[np] . identifier[isfinite] ( identifier[mags] )& identifier[np] . identifier[isfinite] ( identifier[phases] ) identifier[finite_phases] = identifier[phases] [ identifier[finiteind] ] identifier[finite_mags] = identifier[mags] [ identifier[finiteind] ] identifier[nbins] = identifier[int] ( identifier[np] . identifier[ceil] (( identifier[np] . identifier[nanmax] ( identifier[finite_phases] )- identifier[np] . identifier[nanmin] ( identifier[finite_phases] ))/ identifier[binsize] )+ literal[int] ) identifier[minphase] = identifier[np] . identifier[nanmin] ( identifier[finite_phases] ) identifier[phasebins] =[( identifier[minphase] + identifier[x] * identifier[binsize] ) keyword[for] identifier[x] keyword[in] identifier[range] ( identifier[nbins] )] identifier[time_coords] = identifier[np] . identifier[array] ([[ identifier[x] , literal[int] ] keyword[for] identifier[x] keyword[in] identifier[finite_phases] ]) identifier[phasetree] = identifier[cKDTree] ( identifier[time_coords] ) identifier[binned_finite_phaseseries_indices] =[] identifier[collected_binned_mags] ={} keyword[for] identifier[phase] keyword[in] identifier[phasebins] : identifier[bin_indices] = identifier[phasetree] . identifier[query_ball_point] ( identifier[np] . identifier[array] ([ identifier[phase] , literal[int] ]), identifier[binsize] / literal[int] , identifier[p] = literal[int] ) keyword[if] ( identifier[bin_indices] keyword[not] keyword[in] identifier[binned_finite_phaseseries_indices] keyword[and] identifier[len] ( identifier[bin_indices] )>= identifier[minbinelems] ): identifier[binned_finite_phaseseries_indices] . identifier[append] ( identifier[bin_indices] ) identifier[binned_finite_phaseseries_indices] =[ identifier[np] . identifier[array] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[binned_finite_phaseseries_indices] ] identifier[collected_binned_mags] [ literal[string] ]=( identifier[binned_finite_phaseseries_indices] ) identifier[collected_binned_mags] [ literal[string] ]= identifier[phasebins] identifier[collected_binned_mags] [ literal[string] ]= identifier[len] ( identifier[binned_finite_phaseseries_indices] ) identifier[binned_phase] = identifier[np] . identifier[array] ([ identifier[np] . identifier[median] ( identifier[finite_phases] [ identifier[x] ]) keyword[for] identifier[x] keyword[in] identifier[binned_finite_phaseseries_indices] ]) identifier[collected_binned_mags] [ literal[string] ]= identifier[binned_phase] identifier[collected_binned_mags] [ literal[string] ]= identifier[binsize] identifier[collected_binned_mags] [ literal[string] ]=( identifier[np] . identifier[array] ([ identifier[np] . identifier[median] ( identifier[finite_mags] [ identifier[x] ]) keyword[for] identifier[x] keyword[in] identifier[binned_finite_phaseseries_indices] ]) ) keyword[return] identifier[collected_binned_mags]
def phase_bin_magseries(phases, mags, binsize=0.005, minbinelems=7): """Bins a phased magnitude/flux time-series using the bin size provided. Parameters ---------- phases,mags : np.array The phased magnitude/flux time-series to bin in phase. Non-finite elements will be removed from these arrays. At least 10 elements in each array are required for this function to operate. binsize : float The bin size to use to group together measurements closer than this amount in phase. This is in units of phase. minbinelems : int The minimum number of elements required per bin to include it in the output. Returns ------- dict A dict of the following form is returned:: {'phasebin_indices': a list of the index arrays into the nan-filtered input arrays per each bin, 'phasebins': list of bin boundaries for each bin, 'nbins': the number of bins generated, 'binnedphases': the phase values associated with each phase bin; this is the median of the phase value in each bin, 'binnedmags': the mag/flux values associated with each phase bin; this is the median of the mags/fluxes in each bin} """ # check if the input arrays are ok if not (phases.shape and mags.shape and (len(phases) > 10) and (len(mags) > 10)): LOGERROR("input time/mag arrays don't have enough elements") return # depends on [control=['if'], data=[]] # find all the finite values of the magnitudes and phases finiteind = np.isfinite(mags) & np.isfinite(phases) finite_phases = phases[finiteind] finite_mags = mags[finiteind] nbins = int(np.ceil((np.nanmax(finite_phases) - np.nanmin(finite_phases)) / binsize) + 1) minphase = np.nanmin(finite_phases) phasebins = [minphase + x * binsize for x in range(nbins)] # make a KD-tree on the PHASEs so we can do fast distance calculations. we # need to add a bogus y coord to make this a problem that KD-trees can # solve. time_coords = np.array([[x, 1.0] for x in finite_phases]) phasetree = cKDTree(time_coords) binned_finite_phaseseries_indices = [] collected_binned_mags = {} for phase in phasebins: # find all bin indices close to within binsize of this point using the # cKDTree query. we use the p-norm = 1 for pairwise Euclidean distance. bin_indices = phasetree.query_ball_point(np.array([phase, 1.0]), binsize / 2.0, p=1.0) # if the bin_indices have already been collected, then we're # done with this bin, move to the next one. if they haven't, # then this is the start of a new bin. if bin_indices not in binned_finite_phaseseries_indices and len(bin_indices) >= minbinelems: binned_finite_phaseseries_indices.append(bin_indices) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['phase']] # convert to ndarrays binned_finite_phaseseries_indices = [np.array(x) for x in binned_finite_phaseseries_indices] collected_binned_mags['phasebins_indices'] = binned_finite_phaseseries_indices collected_binned_mags['phasebins'] = phasebins collected_binned_mags['nbins'] = len(binned_finite_phaseseries_indices) # collect the finite_phases binned_phase = np.array([np.median(finite_phases[x]) for x in binned_finite_phaseseries_indices]) collected_binned_mags['binnedphases'] = binned_phase collected_binned_mags['binsize'] = binsize # median bin the magnitudes according to the calculated indices collected_binned_mags['binnedmags'] = np.array([np.median(finite_mags[x]) for x in binned_finite_phaseseries_indices]) return collected_binned_mags
def slow_augment(n, ii, jj, idx, count, x, y, u, v, c): '''Perform the augmentation step to assign unassigned i and j n - the # of i and j, also the marker of unassigned x and y ii - the unassigned i jj - the ragged arrays of j for each i idx - the index of the first j for each i count - the number of j for each i x - the assignments of j for each i y - the assignments of i for each j u,v - the dual variables c - the costs ''' ################################################## # # Augment procedure: from the Jonker paper. # # Note: # cred [i,j] = c [i,j] - u [i] - v[j] # # procedure AUGMENT; # begin # for all unassigned i* do # begin # for j:= 1 ... n do # begin d[j] := c[i*,j] - v[j] ; pred[j] := i* end; # READY: = { ) ; SCAN: = { } ; TODO: = { 1 ... n} ; # repeat # if SCAN = { } then # begin # u = min {d[j] for j in TODO} ; # SCAN: = {j | d[j] = u} ; # TODO: = TODO - SCAN; # for j in SCAN do if y[j]==0 then go to augment # end; # select any j* in SCAN; # i := y[j*]; SCAN := SCAN - {j*} ; READY: = READY + {j*} ; # for all j in TODO do if u + cred[i,j] < d[j] then # begin # d[j] := u + cred[i,j]; pred[j] := i; # if d[j] = u then # if y[j] is unassigned then go to augment else # begin SCAN: = SCAN + {j} ; TODO: = TODO - {j} end # end # until false; (* repeat always ends with go to augment *) #augment: # (* price updating *) # for k in READY do v[k]: = v[k] + d[k] - u; # (* augmentation *) # repeat # i: = pred[j]; y[ j ] := i ; k:=j; j:=x[i]; x[i]:= k # until i = i* # end #end inf = np.sum(c) + 1 d = np.zeros(n) cc = np.zeros((n,n)) cc[:,:] = inf for i in range(n): cc[i,jj[idx[i]:(idx[i]+count[i])]] = c[idx[i]:(idx[i]+count[i])] c = cc for i in ii: print("Processing i=%d" % i) j = jj[idx[i]:(idx[i] + count[i])] d = c[i,:] - v pred = np.ones(n, int) * i on_deck = [] ready = [] scan = [] to_do = list(range(n)) try: while True: print("Evaluating i=%d, n_scan = %d" % (i, len(scan))) if len(scan) == 0: ready += on_deck on_deck = [] umin = np.min([d[jjj] for jjj in to_do]) print("umin = %f" % umin) scan = [jjj for jjj in to_do if d[jjj] == umin] to_do = [jjj for jjj in to_do if d[jjj] != umin] for j1 in scan: if y[j1] == n: raise StopIteration() j1 = scan[0] iii = y[j1] print("Consider replacing i=%d, j=%d" % (iii, j1)) scan = scan[1:] on_deck += [j1] u1 = c[iii, j1] - v[j1] - umin for j1 in list(to_do): h = c[iii, j1] - v[j1] - u1 print("Consider j=%d as replacement, c[%d,%d]=%f,v[%d]=%f,h=%f, d[j]= %f" % (j1,iii,j1,c[iii,j1],j1,v[j1],h,d[j1])) if h < d[j1]: print("Add to chain") pred[j1] = iii if h == umin: if y[j1] == n: raise StopIteration() print("Add to scan") scan += [j1] to_do.remove(j1) d[j1] = h except StopIteration: # Augment print("Augmenting %d" % j1) for k in ready: temp = v[k] v[k] = v[k] + d[k] - umin print("v[%d] %f -> %f" % (k, temp, v[k])) while True: iii = pred[j1] print("y[%d] %d -> %d" % (j1, y[j1], iii)) y[j1] = iii j1, x[iii] = x[iii], j1 if iii == i: break # # Re-establish slackness since we didn't pay attention to u # for i in range(n): j = x[i] u[i] = c[i,j] - v[j]
def function[slow_augment, parameter[n, ii, jj, idx, count, x, y, u, v, c]]: constant[Perform the augmentation step to assign unassigned i and j n - the # of i and j, also the marker of unassigned x and y ii - the unassigned i jj - the ragged arrays of j for each i idx - the index of the first j for each i count - the number of j for each i x - the assignments of j for each i y - the assignments of i for each j u,v - the dual variables c - the costs ] variable[inf] assign[=] binary_operation[call[name[np].sum, parameter[name[c]]] + constant[1]] variable[d] assign[=] call[name[np].zeros, parameter[name[n]]] variable[cc] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da20c6e7e20>, <ast.Name object at 0x7da20c6e5690>]]]] call[name[cc]][tuple[[<ast.Slice object at 0x7da1b052ba90>, <ast.Slice object at 0x7da1b0529780>]]] assign[=] name[inf] for taget[name[i]] in starred[call[name[range], parameter[name[n]]]] begin[:] call[name[cc]][tuple[[<ast.Name object at 0x7da1b0529ae0>, <ast.Subscript object at 0x7da1b0528220>]]] assign[=] call[name[c]][<ast.Slice object at 0x7da1b0528b80>] variable[c] assign[=] name[cc] for taget[name[i]] in starred[name[ii]] begin[:] call[name[print], parameter[binary_operation[constant[Processing i=%d] <ast.Mod object at 0x7da2590d6920> name[i]]]] variable[j] assign[=] call[name[jj]][<ast.Slice object at 0x7da18f00e290>] variable[d] assign[=] binary_operation[call[name[c]][tuple[[<ast.Name object at 0x7da18f00ceb0>, <ast.Slice object at 0x7da18f00f700>]]] - name[v]] variable[pred] assign[=] binary_operation[call[name[np].ones, parameter[name[n], name[int]]] * name[i]] variable[on_deck] assign[=] list[[]] variable[ready] assign[=] list[[]] variable[scan] assign[=] list[[]] variable[to_do] assign[=] call[name[list], parameter[call[name[range], parameter[name[n]]]]] <ast.Try object at 0x7da18f00d450> for taget[name[i]] in starred[call[name[range], parameter[name[n]]]] begin[:] variable[j] assign[=] call[name[x]][name[i]] call[name[u]][name[i]] assign[=] binary_operation[call[name[c]][tuple[[<ast.Name object at 0x7da1b05292d0>, <ast.Name object at 0x7da1b052b5b0>]]] - call[name[v]][name[j]]]
keyword[def] identifier[slow_augment] ( identifier[n] , identifier[ii] , identifier[jj] , identifier[idx] , identifier[count] , identifier[x] , identifier[y] , identifier[u] , identifier[v] , identifier[c] ): literal[string] identifier[inf] = identifier[np] . identifier[sum] ( identifier[c] )+ literal[int] identifier[d] = identifier[np] . identifier[zeros] ( identifier[n] ) identifier[cc] = identifier[np] . identifier[zeros] (( identifier[n] , identifier[n] )) identifier[cc] [:,:]= identifier[inf] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n] ): identifier[cc] [ identifier[i] , identifier[jj] [ identifier[idx] [ identifier[i] ]:( identifier[idx] [ identifier[i] ]+ identifier[count] [ identifier[i] ])]]= identifier[c] [ identifier[idx] [ identifier[i] ]:( identifier[idx] [ identifier[i] ]+ identifier[count] [ identifier[i] ])] identifier[c] = identifier[cc] keyword[for] identifier[i] keyword[in] identifier[ii] : identifier[print] ( literal[string] % identifier[i] ) identifier[j] = identifier[jj] [ identifier[idx] [ identifier[i] ]:( identifier[idx] [ identifier[i] ]+ identifier[count] [ identifier[i] ])] identifier[d] = identifier[c] [ identifier[i] ,:]- identifier[v] identifier[pred] = identifier[np] . identifier[ones] ( identifier[n] , identifier[int] )* identifier[i] identifier[on_deck] =[] identifier[ready] =[] identifier[scan] =[] identifier[to_do] = identifier[list] ( identifier[range] ( identifier[n] )) keyword[try] : keyword[while] keyword[True] : identifier[print] ( literal[string] %( identifier[i] , identifier[len] ( identifier[scan] ))) keyword[if] identifier[len] ( identifier[scan] )== literal[int] : identifier[ready] += identifier[on_deck] identifier[on_deck] =[] identifier[umin] = identifier[np] . identifier[min] ([ identifier[d] [ identifier[jjj] ] keyword[for] identifier[jjj] keyword[in] identifier[to_do] ]) identifier[print] ( literal[string] % identifier[umin] ) identifier[scan] =[ identifier[jjj] keyword[for] identifier[jjj] keyword[in] identifier[to_do] keyword[if] identifier[d] [ identifier[jjj] ]== identifier[umin] ] identifier[to_do] =[ identifier[jjj] keyword[for] identifier[jjj] keyword[in] identifier[to_do] keyword[if] identifier[d] [ identifier[jjj] ]!= identifier[umin] ] keyword[for] identifier[j1] keyword[in] identifier[scan] : keyword[if] identifier[y] [ identifier[j1] ]== identifier[n] : keyword[raise] identifier[StopIteration] () identifier[j1] = identifier[scan] [ literal[int] ] identifier[iii] = identifier[y] [ identifier[j1] ] identifier[print] ( literal[string] %( identifier[iii] , identifier[j1] )) identifier[scan] = identifier[scan] [ literal[int] :] identifier[on_deck] +=[ identifier[j1] ] identifier[u1] = identifier[c] [ identifier[iii] , identifier[j1] ]- identifier[v] [ identifier[j1] ]- identifier[umin] keyword[for] identifier[j1] keyword[in] identifier[list] ( identifier[to_do] ): identifier[h] = identifier[c] [ identifier[iii] , identifier[j1] ]- identifier[v] [ identifier[j1] ]- identifier[u1] identifier[print] ( literal[string] %( identifier[j1] , identifier[iii] , identifier[j1] , identifier[c] [ identifier[iii] , identifier[j1] ], identifier[j1] , identifier[v] [ identifier[j1] ], identifier[h] , identifier[d] [ identifier[j1] ])) keyword[if] identifier[h] < identifier[d] [ identifier[j1] ]: identifier[print] ( literal[string] ) identifier[pred] [ identifier[j1] ]= identifier[iii] keyword[if] identifier[h] == identifier[umin] : keyword[if] identifier[y] [ identifier[j1] ]== identifier[n] : keyword[raise] identifier[StopIteration] () identifier[print] ( literal[string] ) identifier[scan] +=[ identifier[j1] ] identifier[to_do] . identifier[remove] ( identifier[j1] ) identifier[d] [ identifier[j1] ]= identifier[h] keyword[except] identifier[StopIteration] : identifier[print] ( literal[string] % identifier[j1] ) keyword[for] identifier[k] keyword[in] identifier[ready] : identifier[temp] = identifier[v] [ identifier[k] ] identifier[v] [ identifier[k] ]= identifier[v] [ identifier[k] ]+ identifier[d] [ identifier[k] ]- identifier[umin] identifier[print] ( literal[string] %( identifier[k] , identifier[temp] , identifier[v] [ identifier[k] ])) keyword[while] keyword[True] : identifier[iii] = identifier[pred] [ identifier[j1] ] identifier[print] ( literal[string] %( identifier[j1] , identifier[y] [ identifier[j1] ], identifier[iii] )) identifier[y] [ identifier[j1] ]= identifier[iii] identifier[j1] , identifier[x] [ identifier[iii] ]= identifier[x] [ identifier[iii] ], identifier[j1] keyword[if] identifier[iii] == identifier[i] : keyword[break] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n] ): identifier[j] = identifier[x] [ identifier[i] ] identifier[u] [ identifier[i] ]= identifier[c] [ identifier[i] , identifier[j] ]- identifier[v] [ identifier[j] ]
def slow_augment(n, ii, jj, idx, count, x, y, u, v, c): """Perform the augmentation step to assign unassigned i and j n - the # of i and j, also the marker of unassigned x and y ii - the unassigned i jj - the ragged arrays of j for each i idx - the index of the first j for each i count - the number of j for each i x - the assignments of j for each i y - the assignments of i for each j u,v - the dual variables c - the costs """ ################################################## # # Augment procedure: from the Jonker paper. # # Note: # cred [i,j] = c [i,j] - u [i] - v[j] # # procedure AUGMENT; # begin # for all unassigned i* do # begin # for j:= 1 ... n do # begin d[j] := c[i*,j] - v[j] ; pred[j] := i* end; # READY: = { ) ; SCAN: = { } ; TODO: = { 1 ... n} ; # repeat # if SCAN = { } then # begin # u = min {d[j] for j in TODO} ; # SCAN: = {j | d[j] = u} ; # TODO: = TODO - SCAN; # for j in SCAN do if y[j]==0 then go to augment # end; # select any j* in SCAN; # i := y[j*]; SCAN := SCAN - {j*} ; READY: = READY + {j*} ; # for all j in TODO do if u + cred[i,j] < d[j] then # begin # d[j] := u + cred[i,j]; pred[j] := i; # if d[j] = u then # if y[j] is unassigned then go to augment else # begin SCAN: = SCAN + {j} ; TODO: = TODO - {j} end # end # until false; (* repeat always ends with go to augment *) #augment: # (* price updating *) # for k in READY do v[k]: = v[k] + d[k] - u; # (* augmentation *) # repeat # i: = pred[j]; y[ j ] := i ; k:=j; j:=x[i]; x[i]:= k # until i = i* # end #end inf = np.sum(c) + 1 d = np.zeros(n) cc = np.zeros((n, n)) cc[:, :] = inf for i in range(n): cc[i, jj[idx[i]:idx[i] + count[i]]] = c[idx[i]:idx[i] + count[i]] # depends on [control=['for'], data=['i']] c = cc for i in ii: print('Processing i=%d' % i) j = jj[idx[i]:idx[i] + count[i]] d = c[i, :] - v pred = np.ones(n, int) * i on_deck = [] ready = [] scan = [] to_do = list(range(n)) try: while True: print('Evaluating i=%d, n_scan = %d' % (i, len(scan))) if len(scan) == 0: ready += on_deck on_deck = [] umin = np.min([d[jjj] for jjj in to_do]) print('umin = %f' % umin) scan = [jjj for jjj in to_do if d[jjj] == umin] to_do = [jjj for jjj in to_do if d[jjj] != umin] for j1 in scan: if y[j1] == n: raise StopIteration() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['j1']] # depends on [control=['if'], data=[]] j1 = scan[0] iii = y[j1] print('Consider replacing i=%d, j=%d' % (iii, j1)) scan = scan[1:] on_deck += [j1] u1 = c[iii, j1] - v[j1] - umin for j1 in list(to_do): h = c[iii, j1] - v[j1] - u1 print('Consider j=%d as replacement, c[%d,%d]=%f,v[%d]=%f,h=%f, d[j]= %f' % (j1, iii, j1, c[iii, j1], j1, v[j1], h, d[j1])) if h < d[j1]: print('Add to chain') pred[j1] = iii if h == umin: if y[j1] == n: raise StopIteration() # depends on [control=['if'], data=[]] print('Add to scan') scan += [j1] to_do.remove(j1) # depends on [control=['if'], data=[]] d[j1] = h # depends on [control=['if'], data=['h']] # depends on [control=['for'], data=['j1']] # depends on [control=['while'], data=[]] # depends on [control=['try'], data=[]] except StopIteration: # Augment print('Augmenting %d' % j1) for k in ready: temp = v[k] v[k] = v[k] + d[k] - umin print('v[%d] %f -> %f' % (k, temp, v[k])) # depends on [control=['for'], data=['k']] while True: iii = pred[j1] print('y[%d] %d -> %d' % (j1, y[j1], iii)) y[j1] = iii (j1, x[iii]) = (x[iii], j1) if iii == i: break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['i']] # # Re-establish slackness since we didn't pay attention to u # for i in range(n): j = x[i] u[i] = c[i, j] - v[j] # depends on [control=['for'], data=['i']]
def make_html_tag(tag, text=None, **params): """Create an HTML tag string. tag The HTML tag to use (e.g. 'a', 'span' or 'div') text The text to enclose between opening and closing tag. If no text is specified then only the opening tag is returned. Example:: make_html_tag('a', text="Hello", href="/another/page") -> <a href="/another/page">Hello</a> To use reserved Python keywords like "class" as a parameter prepend it with an underscore. Instead of "class='green'" use "_class='green'". Warning: Quotes and apostrophes are not escaped.""" params_string = "" # Parameters are passed. Turn the dict into a string like "a=1 b=2 c=3" string. for key, value in sorted(params.items()): # Strip off a leading underscore from the attribute's key to allow attributes like '_class' # to be used as a CSS class specification instead of the reserved Python keyword 'class'. key = key.lstrip("_") params_string += u' {0}="{1}"'.format(key, value) # Create the tag string tag_string = u"<{0}{1}>".format(tag, params_string) # Add text and closing tag if required. if text: tag_string += u"{0}</{1}>".format(text, tag) return tag_string
def function[make_html_tag, parameter[tag, text]]: constant[Create an HTML tag string. tag The HTML tag to use (e.g. 'a', 'span' or 'div') text The text to enclose between opening and closing tag. If no text is specified then only the opening tag is returned. Example:: make_html_tag('a', text="Hello", href="/another/page") -> <a href="/another/page">Hello</a> To use reserved Python keywords like "class" as a parameter prepend it with an underscore. Instead of "class='green'" use "_class='green'". Warning: Quotes and apostrophes are not escaped.] variable[params_string] assign[=] constant[] for taget[tuple[[<ast.Name object at 0x7da18f58e0b0>, <ast.Name object at 0x7da18f58e050>]]] in starred[call[name[sorted], parameter[call[name[params].items, parameter[]]]]] begin[:] variable[key] assign[=] call[name[key].lstrip, parameter[constant[_]]] <ast.AugAssign object at 0x7da18f58ec20> variable[tag_string] assign[=] call[constant[<{0}{1}>].format, parameter[name[tag], name[params_string]]] if name[text] begin[:] <ast.AugAssign object at 0x7da18f58e380> return[name[tag_string]]
keyword[def] identifier[make_html_tag] ( identifier[tag] , identifier[text] = keyword[None] ,** identifier[params] ): literal[string] identifier[params_string] = literal[string] keyword[for] identifier[key] , identifier[value] keyword[in] identifier[sorted] ( identifier[params] . identifier[items] ()): identifier[key] = identifier[key] . identifier[lstrip] ( literal[string] ) identifier[params_string] += literal[string] . identifier[format] ( identifier[key] , identifier[value] ) identifier[tag_string] = literal[string] . identifier[format] ( identifier[tag] , identifier[params_string] ) keyword[if] identifier[text] : identifier[tag_string] += literal[string] . identifier[format] ( identifier[text] , identifier[tag] ) keyword[return] identifier[tag_string]
def make_html_tag(tag, text=None, **params): """Create an HTML tag string. tag The HTML tag to use (e.g. 'a', 'span' or 'div') text The text to enclose between opening and closing tag. If no text is specified then only the opening tag is returned. Example:: make_html_tag('a', text="Hello", href="/another/page") -> <a href="/another/page">Hello</a> To use reserved Python keywords like "class" as a parameter prepend it with an underscore. Instead of "class='green'" use "_class='green'". Warning: Quotes and apostrophes are not escaped.""" params_string = '' # Parameters are passed. Turn the dict into a string like "a=1 b=2 c=3" string. for (key, value) in sorted(params.items()): # Strip off a leading underscore from the attribute's key to allow attributes like '_class' # to be used as a CSS class specification instead of the reserved Python keyword 'class'. key = key.lstrip('_') params_string += u' {0}="{1}"'.format(key, value) # depends on [control=['for'], data=[]] # Create the tag string tag_string = u'<{0}{1}>'.format(tag, params_string) # Add text and closing tag if required. if text: tag_string += u'{0}</{1}>'.format(text, tag) # depends on [control=['if'], data=[]] return tag_string
def tas2eas(Vtas, H): """True Airspeed to Equivalent Airspeed""" rho = density(H) Veas = Vtas * np.sqrt(rho/rho0) return Veas
def function[tas2eas, parameter[Vtas, H]]: constant[True Airspeed to Equivalent Airspeed] variable[rho] assign[=] call[name[density], parameter[name[H]]] variable[Veas] assign[=] binary_operation[name[Vtas] * call[name[np].sqrt, parameter[binary_operation[name[rho] / name[rho0]]]]] return[name[Veas]]
keyword[def] identifier[tas2eas] ( identifier[Vtas] , identifier[H] ): literal[string] identifier[rho] = identifier[density] ( identifier[H] ) identifier[Veas] = identifier[Vtas] * identifier[np] . identifier[sqrt] ( identifier[rho] / identifier[rho0] ) keyword[return] identifier[Veas]
def tas2eas(Vtas, H): """True Airspeed to Equivalent Airspeed""" rho = density(H) Veas = Vtas * np.sqrt(rho / rho0) return Veas
def clusterStatus(self): """ Returns a dict of cluster nodes and their status information """ servers = yield self.getClusterServers() d = { 'workers': {}, 'crons': {}, 'queues': {} } now = time.time() reverse_map = {} for sname in servers: last = yield self._get_key('/server/%s/heartbeat' % sname) status = yield self._get_key('/server/%s/status' % sname) uuid = yield self._get_key('/server/%s/uuid' % sname) reverse_map[uuid] = sname if not last: last = 0 last = float(last) if (status == 'ready') and (now - last > 5): status = 'offline' if not sname in d['workers']: d['workers'][sname] = [] d['workers'][sname].append({ 'lastseen': last, 'status': status, 'id': uuid }) # Crons crons = yield self.keys('/crons') for queue in crons: if queue not in d['crons']: d['crons'][queue] = {'methods': {}} methods = yield self.keys('/crons/%s' % queue) for method in methods: last = yield self._get_key('/crons/%s/%s' % (queue, method)) if last: d['crons'][queue]['methods'][method] = float(last) uid = yield self._get_key('/croner/%s' % queue) if uid: d['crons'][queue]['master'] = '%s:%s' % (uid, reverse_map[uid]) # Queues queue_keys = yield self.keys('/qstats') for qname in queue_keys: if qname not in d['queues']: qlen = yield self.queueSize(qname) stats = yield self.getQueueMessageStats(qname) d['queues'][qname] = { 'waiting': qlen, 'messages': stats } defer.returnValue(d)
def function[clusterStatus, parameter[self]]: constant[ Returns a dict of cluster nodes and their status information ] variable[servers] assign[=] <ast.Yield object at 0x7da1b189fac0> variable[d] assign[=] dictionary[[<ast.Constant object at 0x7da1b189f640>, <ast.Constant object at 0x7da1b189d7e0>, <ast.Constant object at 0x7da1b189c460>], [<ast.Dict object at 0x7da1b189f850>, <ast.Dict object at 0x7da1b189cbb0>, <ast.Dict object at 0x7da1b189e560>]] variable[now] assign[=] call[name[time].time, parameter[]] variable[reverse_map] assign[=] dictionary[[], []] for taget[name[sname]] in starred[name[servers]] begin[:] variable[last] assign[=] <ast.Yield object at 0x7da1b189c580> variable[status] assign[=] <ast.Yield object at 0x7da1b189c7f0> variable[uuid] assign[=] <ast.Yield object at 0x7da1b189f490> call[name[reverse_map]][name[uuid]] assign[=] name[sname] if <ast.UnaryOp object at 0x7da1b189c610> begin[:] variable[last] assign[=] constant[0] variable[last] assign[=] call[name[float], parameter[name[last]]] if <ast.BoolOp object at 0x7da1b189c160> begin[:] variable[status] assign[=] constant[offline] if <ast.UnaryOp object at 0x7da1b189e320> begin[:] call[call[name[d]][constant[workers]]][name[sname]] assign[=] list[[]] call[call[call[name[d]][constant[workers]]][name[sname]].append, parameter[dictionary[[<ast.Constant object at 0x7da1b189cf40>, <ast.Constant object at 0x7da1b189f340>, <ast.Constant object at 0x7da1b189f130>], [<ast.Name object at 0x7da1b189fb20>, <ast.Name object at 0x7da1b189c8e0>, <ast.Name object at 0x7da1b189ca30>]]]] variable[crons] assign[=] <ast.Yield object at 0x7da1b189e860> for taget[name[queue]] in starred[name[crons]] begin[:] if compare[name[queue] <ast.NotIn object at 0x7da2590d7190> call[name[d]][constant[crons]]] begin[:] call[call[name[d]][constant[crons]]][name[queue]] assign[=] dictionary[[<ast.Constant object at 0x7da1b1973250>], [<ast.Dict object at 0x7da1b19730a0>]] variable[methods] assign[=] <ast.Yield object at 0x7da1b1835c60> for taget[name[method]] in starred[name[methods]] begin[:] variable[last] assign[=] <ast.Yield object at 0x7da1b1835e70> if name[last] begin[:] call[call[call[call[name[d]][constant[crons]]][name[queue]]][constant[methods]]][name[method]] assign[=] call[name[float], parameter[name[last]]] variable[uid] assign[=] <ast.Yield object at 0x7da1b18d99f0> if name[uid] begin[:] call[call[call[name[d]][constant[crons]]][name[queue]]][constant[master]] assign[=] binary_operation[constant[%s:%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b18d8fa0>, <ast.Subscript object at 0x7da1b18d8be0>]]] variable[queue_keys] assign[=] <ast.Yield object at 0x7da1b18d9420> for taget[name[qname]] in starred[name[queue_keys]] begin[:] if compare[name[qname] <ast.NotIn object at 0x7da2590d7190> call[name[d]][constant[queues]]] begin[:] variable[qlen] assign[=] <ast.Yield object at 0x7da1b18d9f90> variable[stats] assign[=] <ast.Yield object at 0x7da1b18d97e0> call[call[name[d]][constant[queues]]][name[qname]] assign[=] dictionary[[<ast.Constant object at 0x7da1b18d9720>, <ast.Constant object at 0x7da1b18d99c0>], [<ast.Name object at 0x7da1b18d9780>, <ast.Name object at 0x7da1b18d96f0>]] call[name[defer].returnValue, parameter[name[d]]]
keyword[def] identifier[clusterStatus] ( identifier[self] ): literal[string] identifier[servers] = keyword[yield] identifier[self] . identifier[getClusterServers] () identifier[d] ={ literal[string] :{}, literal[string] :{}, literal[string] :{} } identifier[now] = identifier[time] . identifier[time] () identifier[reverse_map] ={} keyword[for] identifier[sname] keyword[in] identifier[servers] : identifier[last] = keyword[yield] identifier[self] . identifier[_get_key] ( literal[string] % identifier[sname] ) identifier[status] = keyword[yield] identifier[self] . identifier[_get_key] ( literal[string] % identifier[sname] ) identifier[uuid] = keyword[yield] identifier[self] . identifier[_get_key] ( literal[string] % identifier[sname] ) identifier[reverse_map] [ identifier[uuid] ]= identifier[sname] keyword[if] keyword[not] identifier[last] : identifier[last] = literal[int] identifier[last] = identifier[float] ( identifier[last] ) keyword[if] ( identifier[status] == literal[string] ) keyword[and] ( identifier[now] - identifier[last] > literal[int] ): identifier[status] = literal[string] keyword[if] keyword[not] identifier[sname] keyword[in] identifier[d] [ literal[string] ]: identifier[d] [ literal[string] ][ identifier[sname] ]=[] identifier[d] [ literal[string] ][ identifier[sname] ]. identifier[append] ({ literal[string] : identifier[last] , literal[string] : identifier[status] , literal[string] : identifier[uuid] }) identifier[crons] = keyword[yield] identifier[self] . identifier[keys] ( literal[string] ) keyword[for] identifier[queue] keyword[in] identifier[crons] : keyword[if] identifier[queue] keyword[not] keyword[in] identifier[d] [ literal[string] ]: identifier[d] [ literal[string] ][ identifier[queue] ]={ literal[string] :{}} identifier[methods] = keyword[yield] identifier[self] . identifier[keys] ( literal[string] % identifier[queue] ) keyword[for] identifier[method] keyword[in] identifier[methods] : identifier[last] = keyword[yield] identifier[self] . identifier[_get_key] ( literal[string] %( identifier[queue] , identifier[method] )) keyword[if] identifier[last] : identifier[d] [ literal[string] ][ identifier[queue] ][ literal[string] ][ identifier[method] ]= identifier[float] ( identifier[last] ) identifier[uid] = keyword[yield] identifier[self] . identifier[_get_key] ( literal[string] % identifier[queue] ) keyword[if] identifier[uid] : identifier[d] [ literal[string] ][ identifier[queue] ][ literal[string] ]= literal[string] %( identifier[uid] , identifier[reverse_map] [ identifier[uid] ]) identifier[queue_keys] = keyword[yield] identifier[self] . identifier[keys] ( literal[string] ) keyword[for] identifier[qname] keyword[in] identifier[queue_keys] : keyword[if] identifier[qname] keyword[not] keyword[in] identifier[d] [ literal[string] ]: identifier[qlen] = keyword[yield] identifier[self] . identifier[queueSize] ( identifier[qname] ) identifier[stats] = keyword[yield] identifier[self] . identifier[getQueueMessageStats] ( identifier[qname] ) identifier[d] [ literal[string] ][ identifier[qname] ]={ literal[string] : identifier[qlen] , literal[string] : identifier[stats] } identifier[defer] . identifier[returnValue] ( identifier[d] )
def clusterStatus(self): """ Returns a dict of cluster nodes and their status information """ servers = (yield self.getClusterServers()) d = {'workers': {}, 'crons': {}, 'queues': {}} now = time.time() reverse_map = {} for sname in servers: last = (yield self._get_key('/server/%s/heartbeat' % sname)) status = (yield self._get_key('/server/%s/status' % sname)) uuid = (yield self._get_key('/server/%s/uuid' % sname)) reverse_map[uuid] = sname if not last: last = 0 # depends on [control=['if'], data=[]] last = float(last) if status == 'ready' and now - last > 5: status = 'offline' # depends on [control=['if'], data=[]] if not sname in d['workers']: d['workers'][sname] = [] # depends on [control=['if'], data=[]] d['workers'][sname].append({'lastseen': last, 'status': status, 'id': uuid}) # depends on [control=['for'], data=['sname']] # Crons crons = (yield self.keys('/crons')) for queue in crons: if queue not in d['crons']: d['crons'][queue] = {'methods': {}} # depends on [control=['if'], data=['queue']] methods = (yield self.keys('/crons/%s' % queue)) for method in methods: last = (yield self._get_key('/crons/%s/%s' % (queue, method))) if last: d['crons'][queue]['methods'][method] = float(last) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['method']] uid = (yield self._get_key('/croner/%s' % queue)) if uid: d['crons'][queue]['master'] = '%s:%s' % (uid, reverse_map[uid]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['queue']] # Queues queue_keys = (yield self.keys('/qstats')) for qname in queue_keys: if qname not in d['queues']: qlen = (yield self.queueSize(qname)) stats = (yield self.getQueueMessageStats(qname)) d['queues'][qname] = {'waiting': qlen, 'messages': stats} # depends on [control=['if'], data=['qname']] # depends on [control=['for'], data=['qname']] defer.returnValue(d)
def create_config(config_path="scriptworker.yaml"): """Create a config from DEFAULT_CONFIG, arguments, and config file. Then validate it and freeze it. Args: config_path (str, optional): the path to the config file. Defaults to "scriptworker.yaml" Returns: tuple: (config frozendict, credentials dict) Raises: SystemExit: on failure """ if not os.path.exists(config_path): print("{} doesn't exist! Exiting...".format(config_path), file=sys.stderr) sys.exit(1) with open(config_path, "r", encoding="utf-8") as fh: secrets = safe_load(fh) config = dict(deepcopy(DEFAULT_CONFIG)) if not secrets.get("credentials"): secrets['credentials'] = read_worker_creds() config.update(secrets) apply_product_config(config) messages = check_config(config, config_path) if messages: print('\n'.join(messages), file=sys.stderr) print("Exiting...", file=sys.stderr) sys.exit(1) credentials = get_frozen_copy(secrets['credentials']) del(config['credentials']) config = get_frozen_copy(config) return config, credentials
def function[create_config, parameter[config_path]]: constant[Create a config from DEFAULT_CONFIG, arguments, and config file. Then validate it and freeze it. Args: config_path (str, optional): the path to the config file. Defaults to "scriptworker.yaml" Returns: tuple: (config frozendict, credentials dict) Raises: SystemExit: on failure ] if <ast.UnaryOp object at 0x7da18dc98f10> begin[:] call[name[print], parameter[call[constant[{} doesn't exist! Exiting...].format, parameter[name[config_path]]]]] call[name[sys].exit, parameter[constant[1]]] with call[name[open], parameter[name[config_path], constant[r]]] begin[:] variable[secrets] assign[=] call[name[safe_load], parameter[name[fh]]] variable[config] assign[=] call[name[dict], parameter[call[name[deepcopy], parameter[name[DEFAULT_CONFIG]]]]] if <ast.UnaryOp object at 0x7da18dc98d30> begin[:] call[name[secrets]][constant[credentials]] assign[=] call[name[read_worker_creds], parameter[]] call[name[config].update, parameter[name[secrets]]] call[name[apply_product_config], parameter[name[config]]] variable[messages] assign[=] call[name[check_config], parameter[name[config], name[config_path]]] if name[messages] begin[:] call[name[print], parameter[call[constant[ ].join, parameter[name[messages]]]]] call[name[print], parameter[constant[Exiting...]]] call[name[sys].exit, parameter[constant[1]]] variable[credentials] assign[=] call[name[get_frozen_copy], parameter[call[name[secrets]][constant[credentials]]]] <ast.Delete object at 0x7da1b0e9c4f0> variable[config] assign[=] call[name[get_frozen_copy], parameter[name[config]]] return[tuple[[<ast.Name object at 0x7da1b0e9c6a0>, <ast.Name object at 0x7da1b0e9f970>]]]
keyword[def] identifier[create_config] ( identifier[config_path] = literal[string] ): literal[string] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[config_path] ): identifier[print] ( literal[string] . identifier[format] ( identifier[config_path] ), identifier[file] = identifier[sys] . identifier[stderr] ) identifier[sys] . identifier[exit] ( literal[int] ) keyword[with] identifier[open] ( identifier[config_path] , literal[string] , identifier[encoding] = literal[string] ) keyword[as] identifier[fh] : identifier[secrets] = identifier[safe_load] ( identifier[fh] ) identifier[config] = identifier[dict] ( identifier[deepcopy] ( identifier[DEFAULT_CONFIG] )) keyword[if] keyword[not] identifier[secrets] . identifier[get] ( literal[string] ): identifier[secrets] [ literal[string] ]= identifier[read_worker_creds] () identifier[config] . identifier[update] ( identifier[secrets] ) identifier[apply_product_config] ( identifier[config] ) identifier[messages] = identifier[check_config] ( identifier[config] , identifier[config_path] ) keyword[if] identifier[messages] : identifier[print] ( literal[string] . identifier[join] ( identifier[messages] ), identifier[file] = identifier[sys] . identifier[stderr] ) identifier[print] ( literal[string] , identifier[file] = identifier[sys] . identifier[stderr] ) identifier[sys] . identifier[exit] ( literal[int] ) identifier[credentials] = identifier[get_frozen_copy] ( identifier[secrets] [ literal[string] ]) keyword[del] ( identifier[config] [ literal[string] ]) identifier[config] = identifier[get_frozen_copy] ( identifier[config] ) keyword[return] identifier[config] , identifier[credentials]
def create_config(config_path='scriptworker.yaml'): """Create a config from DEFAULT_CONFIG, arguments, and config file. Then validate it and freeze it. Args: config_path (str, optional): the path to the config file. Defaults to "scriptworker.yaml" Returns: tuple: (config frozendict, credentials dict) Raises: SystemExit: on failure """ if not os.path.exists(config_path): print("{} doesn't exist! Exiting...".format(config_path), file=sys.stderr) sys.exit(1) # depends on [control=['if'], data=[]] with open(config_path, 'r', encoding='utf-8') as fh: secrets = safe_load(fh) # depends on [control=['with'], data=['fh']] config = dict(deepcopy(DEFAULT_CONFIG)) if not secrets.get('credentials'): secrets['credentials'] = read_worker_creds() # depends on [control=['if'], data=[]] config.update(secrets) apply_product_config(config) messages = check_config(config, config_path) if messages: print('\n'.join(messages), file=sys.stderr) print('Exiting...', file=sys.stderr) sys.exit(1) # depends on [control=['if'], data=[]] credentials = get_frozen_copy(secrets['credentials']) del config['credentials'] config = get_frozen_copy(config) return (config, credentials)
def get_region(): """Use the environment to get the current region""" global _REGION if _REGION is None: region_name = os.getenv("AWS_DEFAULT_REGION") or "us-east-1" region_dict = {r.name: r for r in boto.regioninfo.get_regions("ec2")} if region_name not in region_dict: raise ValueError("No such EC2 region: {}. Check AWS_DEFAULT_REGION " "environment variable".format(region_name)) _REGION = region_dict[region_name] return _REGION
def function[get_region, parameter[]]: constant[Use the environment to get the current region] <ast.Global object at 0x7da20cabf430> if compare[name[_REGION] is constant[None]] begin[:] variable[region_name] assign[=] <ast.BoolOp object at 0x7da20cabd6c0> variable[region_dict] assign[=] <ast.DictComp object at 0x7da20cabe890> if compare[name[region_name] <ast.NotIn object at 0x7da2590d7190> name[region_dict]] begin[:] <ast.Raise object at 0x7da18f00db10> variable[_REGION] assign[=] call[name[region_dict]][name[region_name]] return[name[_REGION]]
keyword[def] identifier[get_region] (): literal[string] keyword[global] identifier[_REGION] keyword[if] identifier[_REGION] keyword[is] keyword[None] : identifier[region_name] = identifier[os] . identifier[getenv] ( literal[string] ) keyword[or] literal[string] identifier[region_dict] ={ identifier[r] . identifier[name] : identifier[r] keyword[for] identifier[r] keyword[in] identifier[boto] . identifier[regioninfo] . identifier[get_regions] ( literal[string] )} keyword[if] identifier[region_name] keyword[not] keyword[in] identifier[region_dict] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[region_name] )) identifier[_REGION] = identifier[region_dict] [ identifier[region_name] ] keyword[return] identifier[_REGION]
def get_region(): """Use the environment to get the current region""" global _REGION if _REGION is None: region_name = os.getenv('AWS_DEFAULT_REGION') or 'us-east-1' region_dict = {r.name: r for r in boto.regioninfo.get_regions('ec2')} if region_name not in region_dict: raise ValueError('No such EC2 region: {}. Check AWS_DEFAULT_REGION environment variable'.format(region_name)) # depends on [control=['if'], data=['region_name']] _REGION = region_dict[region_name] # depends on [control=['if'], data=['_REGION']] return _REGION
def update(self, use_template=False, **metadata_defaults): """ OVERRIDDEN: Prevents writing multiple CharacterStrings per XPATH property """ self.validate() tree_to_update = self._xml_tree if not use_template else self._get_template(**metadata_defaults) supported_props = self._metadata_props # Iterate over keys, and extract non-primitive root for all XPATHs # xroot = identificationInfo/MD_DataIdentification/abstract/ # xpath = identificationInfo/MD_DataIdentification/abstract/CharacterString # # This prevents multiple primitive tags from being inserted under an element for prop, xpath in iteritems(self._data_map): if not prop.startswith('_') or prop.strip('_') in supported_props: # Send only public or alternate properties xroot = self._trim_xpath(xpath, prop) values = getattr(self, prop, u'') update_property(tree_to_update, xroot, xpath, prop, values, supported_props) return tree_to_update
def function[update, parameter[self, use_template]]: constant[ OVERRIDDEN: Prevents writing multiple CharacterStrings per XPATH property ] call[name[self].validate, parameter[]] variable[tree_to_update] assign[=] <ast.IfExp object at 0x7da1b25d45e0> variable[supported_props] assign[=] name[self]._metadata_props for taget[tuple[[<ast.Name object at 0x7da1b25d78b0>, <ast.Name object at 0x7da1b25d7580>]]] in starred[call[name[iteritems], parameter[name[self]._data_map]]] begin[:] if <ast.BoolOp object at 0x7da1b25d6e60> begin[:] variable[xroot] assign[=] call[name[self]._trim_xpath, parameter[name[xpath], name[prop]]] variable[values] assign[=] call[name[getattr], parameter[name[self], name[prop], constant[]]] call[name[update_property], parameter[name[tree_to_update], name[xroot], name[xpath], name[prop], name[values], name[supported_props]]] return[name[tree_to_update]]
keyword[def] identifier[update] ( identifier[self] , identifier[use_template] = keyword[False] ,** identifier[metadata_defaults] ): literal[string] identifier[self] . identifier[validate] () identifier[tree_to_update] = identifier[self] . identifier[_xml_tree] keyword[if] keyword[not] identifier[use_template] keyword[else] identifier[self] . identifier[_get_template] (** identifier[metadata_defaults] ) identifier[supported_props] = identifier[self] . identifier[_metadata_props] keyword[for] identifier[prop] , identifier[xpath] keyword[in] identifier[iteritems] ( identifier[self] . identifier[_data_map] ): keyword[if] keyword[not] identifier[prop] . identifier[startswith] ( literal[string] ) keyword[or] identifier[prop] . identifier[strip] ( literal[string] ) keyword[in] identifier[supported_props] : identifier[xroot] = identifier[self] . identifier[_trim_xpath] ( identifier[xpath] , identifier[prop] ) identifier[values] = identifier[getattr] ( identifier[self] , identifier[prop] , literal[string] ) identifier[update_property] ( identifier[tree_to_update] , identifier[xroot] , identifier[xpath] , identifier[prop] , identifier[values] , identifier[supported_props] ) keyword[return] identifier[tree_to_update]
def update(self, use_template=False, **metadata_defaults): """ OVERRIDDEN: Prevents writing multiple CharacterStrings per XPATH property """ self.validate() tree_to_update = self._xml_tree if not use_template else self._get_template(**metadata_defaults) supported_props = self._metadata_props # Iterate over keys, and extract non-primitive root for all XPATHs # xroot = identificationInfo/MD_DataIdentification/abstract/ # xpath = identificationInfo/MD_DataIdentification/abstract/CharacterString # # This prevents multiple primitive tags from being inserted under an element for (prop, xpath) in iteritems(self._data_map): if not prop.startswith('_') or prop.strip('_') in supported_props: # Send only public or alternate properties xroot = self._trim_xpath(xpath, prop) values = getattr(self, prop, u'') update_property(tree_to_update, xroot, xpath, prop, values, supported_props) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return tree_to_update
def fields2jsonschema(self, fields, ordered=False, partial=None): """Return the JSON Schema Object given a mapping between field names and :class:`Field <marshmallow.Field>` objects. :param dict fields: A dictionary of field name field object pairs :param bool ordered: Whether to preserve the order in which fields were declared :param bool|tuple partial: Whether to override a field's required flag. If `True` no fields will be set as required. If an iterable fields in the iterable will not be marked as required. :rtype: dict, a JSON Schema Object """ jsonschema = {"type": "object", "properties": OrderedDict() if ordered else {}} for field_name, field_obj in iteritems(fields): observed_field_name = self._observed_name(field_obj, field_name) property = self.field2property(field_obj) jsonschema["properties"][observed_field_name] = property if field_obj.required: if not partial or ( is_collection(partial) and field_name not in partial ): jsonschema.setdefault("required", []).append(observed_field_name) if "required" in jsonschema: jsonschema["required"].sort() return jsonschema
def function[fields2jsonschema, parameter[self, fields, ordered, partial]]: constant[Return the JSON Schema Object given a mapping between field names and :class:`Field <marshmallow.Field>` objects. :param dict fields: A dictionary of field name field object pairs :param bool ordered: Whether to preserve the order in which fields were declared :param bool|tuple partial: Whether to override a field's required flag. If `True` no fields will be set as required. If an iterable fields in the iterable will not be marked as required. :rtype: dict, a JSON Schema Object ] variable[jsonschema] assign[=] dictionary[[<ast.Constant object at 0x7da1b18e6890>, <ast.Constant object at 0x7da1b18e4100>], [<ast.Constant object at 0x7da1b18e4dc0>, <ast.IfExp object at 0x7da1b18e4f70>]] for taget[tuple[[<ast.Name object at 0x7da1b18e7c40>, <ast.Name object at 0x7da1b18e6560>]]] in starred[call[name[iteritems], parameter[name[fields]]]] begin[:] variable[observed_field_name] assign[=] call[name[self]._observed_name, parameter[name[field_obj], name[field_name]]] variable[property] assign[=] call[name[self].field2property, parameter[name[field_obj]]] call[call[name[jsonschema]][constant[properties]]][name[observed_field_name]] assign[=] name[property] if name[field_obj].required begin[:] if <ast.BoolOp object at 0x7da1b18e7100> begin[:] call[call[name[jsonschema].setdefault, parameter[constant[required], list[[]]]].append, parameter[name[observed_field_name]]] if compare[constant[required] in name[jsonschema]] begin[:] call[call[name[jsonschema]][constant[required]].sort, parameter[]] return[name[jsonschema]]
keyword[def] identifier[fields2jsonschema] ( identifier[self] , identifier[fields] , identifier[ordered] = keyword[False] , identifier[partial] = keyword[None] ): literal[string] identifier[jsonschema] ={ literal[string] : literal[string] , literal[string] : identifier[OrderedDict] () keyword[if] identifier[ordered] keyword[else] {}} keyword[for] identifier[field_name] , identifier[field_obj] keyword[in] identifier[iteritems] ( identifier[fields] ): identifier[observed_field_name] = identifier[self] . identifier[_observed_name] ( identifier[field_obj] , identifier[field_name] ) identifier[property] = identifier[self] . identifier[field2property] ( identifier[field_obj] ) identifier[jsonschema] [ literal[string] ][ identifier[observed_field_name] ]= identifier[property] keyword[if] identifier[field_obj] . identifier[required] : keyword[if] keyword[not] identifier[partial] keyword[or] ( identifier[is_collection] ( identifier[partial] ) keyword[and] identifier[field_name] keyword[not] keyword[in] identifier[partial] ): identifier[jsonschema] . identifier[setdefault] ( literal[string] ,[]). identifier[append] ( identifier[observed_field_name] ) keyword[if] literal[string] keyword[in] identifier[jsonschema] : identifier[jsonschema] [ literal[string] ]. identifier[sort] () keyword[return] identifier[jsonschema]
def fields2jsonschema(self, fields, ordered=False, partial=None): """Return the JSON Schema Object given a mapping between field names and :class:`Field <marshmallow.Field>` objects. :param dict fields: A dictionary of field name field object pairs :param bool ordered: Whether to preserve the order in which fields were declared :param bool|tuple partial: Whether to override a field's required flag. If `True` no fields will be set as required. If an iterable fields in the iterable will not be marked as required. :rtype: dict, a JSON Schema Object """ jsonschema = {'type': 'object', 'properties': OrderedDict() if ordered else {}} for (field_name, field_obj) in iteritems(fields): observed_field_name = self._observed_name(field_obj, field_name) property = self.field2property(field_obj) jsonschema['properties'][observed_field_name] = property if field_obj.required: if not partial or (is_collection(partial) and field_name not in partial): jsonschema.setdefault('required', []).append(observed_field_name) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] if 'required' in jsonschema: jsonschema['required'].sort() # depends on [control=['if'], data=['jsonschema']] return jsonschema
def _stop_instance(self): """ Stop the instance. """ try: vm_stop = self.compute.virtual_machines.power_off( self.running_instance_id, self.running_instance_id ) except Exception as error: raise AzureCloudException( 'Unable to stop instance: {0}.'.format(error) ) vm_stop.wait()
def function[_stop_instance, parameter[self]]: constant[ Stop the instance. ] <ast.Try object at 0x7da1b1a229b0> call[name[vm_stop].wait, parameter[]]
keyword[def] identifier[_stop_instance] ( identifier[self] ): literal[string] keyword[try] : identifier[vm_stop] = identifier[self] . identifier[compute] . identifier[virtual_machines] . identifier[power_off] ( identifier[self] . identifier[running_instance_id] , identifier[self] . identifier[running_instance_id] ) keyword[except] identifier[Exception] keyword[as] identifier[error] : keyword[raise] identifier[AzureCloudException] ( literal[string] . identifier[format] ( identifier[error] ) ) identifier[vm_stop] . identifier[wait] ()
def _stop_instance(self): """ Stop the instance. """ try: vm_stop = self.compute.virtual_machines.power_off(self.running_instance_id, self.running_instance_id) # depends on [control=['try'], data=[]] except Exception as error: raise AzureCloudException('Unable to stop instance: {0}.'.format(error)) # depends on [control=['except'], data=['error']] vm_stop.wait()
def formdata_post(url, fields): """Send an HTTP request with a multipart/form-data body for the given URL and return the data returned by the server. """ content_type, data = formdata_encode(fields) req = urllib2.Request(url, data) req.add_header('Content-Type', content_type) return urllib2.urlopen(req).read()
def function[formdata_post, parameter[url, fields]]: constant[Send an HTTP request with a multipart/form-data body for the given URL and return the data returned by the server. ] <ast.Tuple object at 0x7da1b0b0de10> assign[=] call[name[formdata_encode], parameter[name[fields]]] variable[req] assign[=] call[name[urllib2].Request, parameter[name[url], name[data]]] call[name[req].add_header, parameter[constant[Content-Type], name[content_type]]] return[call[call[name[urllib2].urlopen, parameter[name[req]]].read, parameter[]]]
keyword[def] identifier[formdata_post] ( identifier[url] , identifier[fields] ): literal[string] identifier[content_type] , identifier[data] = identifier[formdata_encode] ( identifier[fields] ) identifier[req] = identifier[urllib2] . identifier[Request] ( identifier[url] , identifier[data] ) identifier[req] . identifier[add_header] ( literal[string] , identifier[content_type] ) keyword[return] identifier[urllib2] . identifier[urlopen] ( identifier[req] ). identifier[read] ()
def formdata_post(url, fields): """Send an HTTP request with a multipart/form-data body for the given URL and return the data returned by the server. """ (content_type, data) = formdata_encode(fields) req = urllib2.Request(url, data) req.add_header('Content-Type', content_type) return urllib2.urlopen(req).read()
def _filehandler(configurable): """Default logging file handler.""" filename = configurable.log_name.replace('.', sep) path = join(configurable.log_path, '{0}.log'.format(filename)) return FileHandler(path, mode='a+')
def function[_filehandler, parameter[configurable]]: constant[Default logging file handler.] variable[filename] assign[=] call[name[configurable].log_name.replace, parameter[constant[.], name[sep]]] variable[path] assign[=] call[name[join], parameter[name[configurable].log_path, call[constant[{0}.log].format, parameter[name[filename]]]]] return[call[name[FileHandler], parameter[name[path]]]]
keyword[def] identifier[_filehandler] ( identifier[configurable] ): literal[string] identifier[filename] = identifier[configurable] . identifier[log_name] . identifier[replace] ( literal[string] , identifier[sep] ) identifier[path] = identifier[join] ( identifier[configurable] . identifier[log_path] , literal[string] . identifier[format] ( identifier[filename] )) keyword[return] identifier[FileHandler] ( identifier[path] , identifier[mode] = literal[string] )
def _filehandler(configurable): """Default logging file handler.""" filename = configurable.log_name.replace('.', sep) path = join(configurable.log_path, '{0}.log'.format(filename)) return FileHandler(path, mode='a+')
def hide(self): """Hides all annotation artists associated with the DataCursor. Returns self to allow "chaining". (e.g. ``datacursor.hide().disable()``)""" self._hidden = True for artist in self.annotations.values(): artist.set_visible(False) for fig in self.figures: fig.canvas.draw() return self
def function[hide, parameter[self]]: constant[Hides all annotation artists associated with the DataCursor. Returns self to allow "chaining". (e.g. ``datacursor.hide().disable()``)] name[self]._hidden assign[=] constant[True] for taget[name[artist]] in starred[call[name[self].annotations.values, parameter[]]] begin[:] call[name[artist].set_visible, parameter[constant[False]]] for taget[name[fig]] in starred[name[self].figures] begin[:] call[name[fig].canvas.draw, parameter[]] return[name[self]]
keyword[def] identifier[hide] ( identifier[self] ): literal[string] identifier[self] . identifier[_hidden] = keyword[True] keyword[for] identifier[artist] keyword[in] identifier[self] . identifier[annotations] . identifier[values] (): identifier[artist] . identifier[set_visible] ( keyword[False] ) keyword[for] identifier[fig] keyword[in] identifier[self] . identifier[figures] : identifier[fig] . identifier[canvas] . identifier[draw] () keyword[return] identifier[self]
def hide(self): """Hides all annotation artists associated with the DataCursor. Returns self to allow "chaining". (e.g. ``datacursor.hide().disable()``)""" self._hidden = True for artist in self.annotations.values(): artist.set_visible(False) # depends on [control=['for'], data=['artist']] for fig in self.figures: fig.canvas.draw() # depends on [control=['for'], data=['fig']] return self
def set_options(self, options): """ Configure all the many options we'll need to make this happen. """ self.verbosity = int(options.get('verbosity')) # Will we be gzipping? self.gzip = getattr(settings, 'BAKERY_GZIP', False) # And if so what content types will we be gzipping? self.gzip_content_types = getattr( settings, 'GZIP_CONTENT_TYPES', DEFAULT_GZIP_CONTENT_TYPES ) # What ACL (i.e. security permissions) will be giving the files on S3? self.acl = getattr(settings, 'DEFAULT_ACL', self.DEFAULT_ACL) # Should we set cache-control headers? self.cache_control = getattr(settings, 'BAKERY_CACHE_CONTROL', {}) # If the user specifies a build directory... if options.get('build_dir'): # ... validate that it is good. if not os.path.exists(options.get('build_dir')): raise CommandError(self.build_missing_msg) # Go ahead and use it self.build_dir = options.get("build_dir") # If the user does not specify a build dir... else: # Check if it is set in settings.py if not hasattr(settings, 'BUILD_DIR'): raise CommandError(self.build_unconfig_msg) # Then make sure it actually exists if not os.path.exists(settings.BUILD_DIR): raise CommandError(self.build_missing_msg) # Go ahead and use it self.build_dir = settings.BUILD_DIR # If the user provides a bucket name, use that. if options.get("aws_bucket_name"): self.aws_bucket_name = options.get("aws_bucket_name") else: # Otherwise try to find it the settings if not hasattr(settings, 'AWS_BUCKET_NAME'): raise CommandError(self.bucket_unconfig_msg) self.aws_bucket_name = settings.AWS_BUCKET_NAME # The bucket prefix, if it exists self.aws_bucket_prefix = options.get("aws_bucket_prefix") # If the user sets the --force option if options.get('force'): self.force_publish = True else: self.force_publish = False # set the --dry-run option if options.get('dry_run'): self.dry_run = True if self.verbosity > 0: logger.info("Executing with the --dry-run option set.") else: self.dry_run = False self.no_delete = options.get('no_delete') self.no_pooling = options.get('no_pooling')
def function[set_options, parameter[self, options]]: constant[ Configure all the many options we'll need to make this happen. ] name[self].verbosity assign[=] call[name[int], parameter[call[name[options].get, parameter[constant[verbosity]]]]] name[self].gzip assign[=] call[name[getattr], parameter[name[settings], constant[BAKERY_GZIP], constant[False]]] name[self].gzip_content_types assign[=] call[name[getattr], parameter[name[settings], constant[GZIP_CONTENT_TYPES], name[DEFAULT_GZIP_CONTENT_TYPES]]] name[self].acl assign[=] call[name[getattr], parameter[name[settings], constant[DEFAULT_ACL], name[self].DEFAULT_ACL]] name[self].cache_control assign[=] call[name[getattr], parameter[name[settings], constant[BAKERY_CACHE_CONTROL], dictionary[[], []]]] if call[name[options].get, parameter[constant[build_dir]]] begin[:] if <ast.UnaryOp object at 0x7da2041d8700> begin[:] <ast.Raise object at 0x7da2041db5b0> name[self].build_dir assign[=] call[name[options].get, parameter[constant[build_dir]]] if call[name[options].get, parameter[constant[aws_bucket_name]]] begin[:] name[self].aws_bucket_name assign[=] call[name[options].get, parameter[constant[aws_bucket_name]]] name[self].aws_bucket_prefix assign[=] call[name[options].get, parameter[constant[aws_bucket_prefix]]] if call[name[options].get, parameter[constant[force]]] begin[:] name[self].force_publish assign[=] constant[True] if call[name[options].get, parameter[constant[dry_run]]] begin[:] name[self].dry_run assign[=] constant[True] if compare[name[self].verbosity greater[>] constant[0]] begin[:] call[name[logger].info, parameter[constant[Executing with the --dry-run option set.]]] name[self].no_delete assign[=] call[name[options].get, parameter[constant[no_delete]]] name[self].no_pooling assign[=] call[name[options].get, parameter[constant[no_pooling]]]
keyword[def] identifier[set_options] ( identifier[self] , identifier[options] ): literal[string] identifier[self] . identifier[verbosity] = identifier[int] ( identifier[options] . identifier[get] ( literal[string] )) identifier[self] . identifier[gzip] = identifier[getattr] ( identifier[settings] , literal[string] , keyword[False] ) identifier[self] . identifier[gzip_content_types] = identifier[getattr] ( identifier[settings] , literal[string] , identifier[DEFAULT_GZIP_CONTENT_TYPES] ) identifier[self] . identifier[acl] = identifier[getattr] ( identifier[settings] , literal[string] , identifier[self] . identifier[DEFAULT_ACL] ) identifier[self] . identifier[cache_control] = identifier[getattr] ( identifier[settings] , literal[string] ,{}) keyword[if] identifier[options] . identifier[get] ( literal[string] ): keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[options] . identifier[get] ( literal[string] )): keyword[raise] identifier[CommandError] ( identifier[self] . identifier[build_missing_msg] ) identifier[self] . identifier[build_dir] = identifier[options] . identifier[get] ( literal[string] ) keyword[else] : keyword[if] keyword[not] identifier[hasattr] ( identifier[settings] , literal[string] ): keyword[raise] identifier[CommandError] ( identifier[self] . identifier[build_unconfig_msg] ) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[settings] . identifier[BUILD_DIR] ): keyword[raise] identifier[CommandError] ( identifier[self] . identifier[build_missing_msg] ) identifier[self] . identifier[build_dir] = identifier[settings] . identifier[BUILD_DIR] keyword[if] identifier[options] . identifier[get] ( literal[string] ): identifier[self] . identifier[aws_bucket_name] = identifier[options] . identifier[get] ( literal[string] ) keyword[else] : keyword[if] keyword[not] identifier[hasattr] ( identifier[settings] , literal[string] ): keyword[raise] identifier[CommandError] ( identifier[self] . identifier[bucket_unconfig_msg] ) identifier[self] . identifier[aws_bucket_name] = identifier[settings] . identifier[AWS_BUCKET_NAME] identifier[self] . identifier[aws_bucket_prefix] = identifier[options] . identifier[get] ( literal[string] ) keyword[if] identifier[options] . identifier[get] ( literal[string] ): identifier[self] . identifier[force_publish] = keyword[True] keyword[else] : identifier[self] . identifier[force_publish] = keyword[False] keyword[if] identifier[options] . identifier[get] ( literal[string] ): identifier[self] . identifier[dry_run] = keyword[True] keyword[if] identifier[self] . identifier[verbosity] > literal[int] : identifier[logger] . identifier[info] ( literal[string] ) keyword[else] : identifier[self] . identifier[dry_run] = keyword[False] identifier[self] . identifier[no_delete] = identifier[options] . identifier[get] ( literal[string] ) identifier[self] . identifier[no_pooling] = identifier[options] . identifier[get] ( literal[string] )
def set_options(self, options): """ Configure all the many options we'll need to make this happen. """ self.verbosity = int(options.get('verbosity')) # Will we be gzipping? self.gzip = getattr(settings, 'BAKERY_GZIP', False) # And if so what content types will we be gzipping? self.gzip_content_types = getattr(settings, 'GZIP_CONTENT_TYPES', DEFAULT_GZIP_CONTENT_TYPES) # What ACL (i.e. security permissions) will be giving the files on S3? self.acl = getattr(settings, 'DEFAULT_ACL', self.DEFAULT_ACL) # Should we set cache-control headers? self.cache_control = getattr(settings, 'BAKERY_CACHE_CONTROL', {}) # If the user specifies a build directory... if options.get('build_dir'): # ... validate that it is good. if not os.path.exists(options.get('build_dir')): raise CommandError(self.build_missing_msg) # depends on [control=['if'], data=[]] # Go ahead and use it self.build_dir = options.get('build_dir') # depends on [control=['if'], data=[]] else: # If the user does not specify a build dir... # Check if it is set in settings.py if not hasattr(settings, 'BUILD_DIR'): raise CommandError(self.build_unconfig_msg) # depends on [control=['if'], data=[]] # Then make sure it actually exists if not os.path.exists(settings.BUILD_DIR): raise CommandError(self.build_missing_msg) # depends on [control=['if'], data=[]] # Go ahead and use it self.build_dir = settings.BUILD_DIR # If the user provides a bucket name, use that. if options.get('aws_bucket_name'): self.aws_bucket_name = options.get('aws_bucket_name') # depends on [control=['if'], data=[]] else: # Otherwise try to find it the settings if not hasattr(settings, 'AWS_BUCKET_NAME'): raise CommandError(self.bucket_unconfig_msg) # depends on [control=['if'], data=[]] self.aws_bucket_name = settings.AWS_BUCKET_NAME # The bucket prefix, if it exists self.aws_bucket_prefix = options.get('aws_bucket_prefix') # If the user sets the --force option if options.get('force'): self.force_publish = True # depends on [control=['if'], data=[]] else: self.force_publish = False # set the --dry-run option if options.get('dry_run'): self.dry_run = True if self.verbosity > 0: logger.info('Executing with the --dry-run option set.') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: self.dry_run = False self.no_delete = options.get('no_delete') self.no_pooling = options.get('no_pooling')
def load_from_dict(self, conf_dict=None): """ Load the configuration from a dictionary. Args: conf_dict (dict): Dictionary with the configuration. """ self.set_to_default() self._update_dict(self._config, conf_dict) self._update_python_paths()
def function[load_from_dict, parameter[self, conf_dict]]: constant[ Load the configuration from a dictionary. Args: conf_dict (dict): Dictionary with the configuration. ] call[name[self].set_to_default, parameter[]] call[name[self]._update_dict, parameter[name[self]._config, name[conf_dict]]] call[name[self]._update_python_paths, parameter[]]
keyword[def] identifier[load_from_dict] ( identifier[self] , identifier[conf_dict] = keyword[None] ): literal[string] identifier[self] . identifier[set_to_default] () identifier[self] . identifier[_update_dict] ( identifier[self] . identifier[_config] , identifier[conf_dict] ) identifier[self] . identifier[_update_python_paths] ()
def load_from_dict(self, conf_dict=None): """ Load the configuration from a dictionary. Args: conf_dict (dict): Dictionary with the configuration. """ self.set_to_default() self._update_dict(self._config, conf_dict) self._update_python_paths()
def create_parser(self, prog_name, subcommand): """ Customize the parser to include option groups. """ parser = optparse.OptionParser( prog=prog_name, usage=self.usage(subcommand), version=self.get_version(), option_list=self.get_option_list()) for name, description, option_list in self.get_option_groups(): group = optparse.OptionGroup(parser, name, description); list(map(group.add_option, option_list)) parser.add_option_group(group) return parser
def function[create_parser, parameter[self, prog_name, subcommand]]: constant[ Customize the parser to include option groups. ] variable[parser] assign[=] call[name[optparse].OptionParser, parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b14d02e0>, <ast.Name object at 0x7da1b14d2a10>, <ast.Name object at 0x7da1b14d18a0>]]] in starred[call[name[self].get_option_groups, parameter[]]] begin[:] variable[group] assign[=] call[name[optparse].OptionGroup, parameter[name[parser], name[name], name[description]]] call[name[list], parameter[call[name[map], parameter[name[group].add_option, name[option_list]]]]] call[name[parser].add_option_group, parameter[name[group]]] return[name[parser]]
keyword[def] identifier[create_parser] ( identifier[self] , identifier[prog_name] , identifier[subcommand] ): literal[string] identifier[parser] = identifier[optparse] . identifier[OptionParser] ( identifier[prog] = identifier[prog_name] , identifier[usage] = identifier[self] . identifier[usage] ( identifier[subcommand] ), identifier[version] = identifier[self] . identifier[get_version] (), identifier[option_list] = identifier[self] . identifier[get_option_list] ()) keyword[for] identifier[name] , identifier[description] , identifier[option_list] keyword[in] identifier[self] . identifier[get_option_groups] (): identifier[group] = identifier[optparse] . identifier[OptionGroup] ( identifier[parser] , identifier[name] , identifier[description] ); identifier[list] ( identifier[map] ( identifier[group] . identifier[add_option] , identifier[option_list] )) identifier[parser] . identifier[add_option_group] ( identifier[group] ) keyword[return] identifier[parser]
def create_parser(self, prog_name, subcommand): """ Customize the parser to include option groups. """ parser = optparse.OptionParser(prog=prog_name, usage=self.usage(subcommand), version=self.get_version(), option_list=self.get_option_list()) for (name, description, option_list) in self.get_option_groups(): group = optparse.OptionGroup(parser, name, description) list(map(group.add_option, option_list)) parser.add_option_group(group) # depends on [control=['for'], data=[]] return parser
def _check_transition_validity(self, check_transition): """ Transition of BarrierConcurrencyStates must least fulfill the condition of a ContainerState. Start transitions are forbidden in the ConcurrencyState :param check_transition: the transition to check for validity :return: """ valid, message = super(PreemptiveConcurrencyState, self)._check_transition_validity(check_transition) if not valid: return False, message # Only transitions to the parent state are allowed if check_transition.to_state != self.state_id: return False, "Only transitions to the parent state are allowed" return True, message
def function[_check_transition_validity, parameter[self, check_transition]]: constant[ Transition of BarrierConcurrencyStates must least fulfill the condition of a ContainerState. Start transitions are forbidden in the ConcurrencyState :param check_transition: the transition to check for validity :return: ] <ast.Tuple object at 0x7da1b192c8e0> assign[=] call[call[name[super], parameter[name[PreemptiveConcurrencyState], name[self]]]._check_transition_validity, parameter[name[check_transition]]] if <ast.UnaryOp object at 0x7da1b192e080> begin[:] return[tuple[[<ast.Constant object at 0x7da1b192c1f0>, <ast.Name object at 0x7da18bc72dd0>]]] if compare[name[check_transition].to_state not_equal[!=] name[self].state_id] begin[:] return[tuple[[<ast.Constant object at 0x7da18bc71960>, <ast.Constant object at 0x7da18bc71d50>]]] return[tuple[[<ast.Constant object at 0x7da18bc70ac0>, <ast.Name object at 0x7da18bc70250>]]]
keyword[def] identifier[_check_transition_validity] ( identifier[self] , identifier[check_transition] ): literal[string] identifier[valid] , identifier[message] = identifier[super] ( identifier[PreemptiveConcurrencyState] , identifier[self] ). identifier[_check_transition_validity] ( identifier[check_transition] ) keyword[if] keyword[not] identifier[valid] : keyword[return] keyword[False] , identifier[message] keyword[if] identifier[check_transition] . identifier[to_state] != identifier[self] . identifier[state_id] : keyword[return] keyword[False] , literal[string] keyword[return] keyword[True] , identifier[message]
def _check_transition_validity(self, check_transition): """ Transition of BarrierConcurrencyStates must least fulfill the condition of a ContainerState. Start transitions are forbidden in the ConcurrencyState :param check_transition: the transition to check for validity :return: """ (valid, message) = super(PreemptiveConcurrencyState, self)._check_transition_validity(check_transition) if not valid: return (False, message) # depends on [control=['if'], data=[]] # Only transitions to the parent state are allowed if check_transition.to_state != self.state_id: return (False, 'Only transitions to the parent state are allowed') # depends on [control=['if'], data=[]] return (True, message)
def resolve(self, current_file, rel_path): """Search the filesystem.""" search_path = [path.dirname(current_file)] + self.search_path target_path = None for search in search_path: if self.exists(path.join(search, rel_path)): target_path = path.normpath(path.join(search, rel_path)) break if not target_path: raise exceptions.EvaluationError('No such file: %r, searched %s' % (rel_path, ':'.join(search_path))) return target_path, path.abspath(target_path)
def function[resolve, parameter[self, current_file, rel_path]]: constant[Search the filesystem.] variable[search_path] assign[=] binary_operation[list[[<ast.Call object at 0x7da20c76e890>]] + name[self].search_path] variable[target_path] assign[=] constant[None] for taget[name[search]] in starred[name[search_path]] begin[:] if call[name[self].exists, parameter[call[name[path].join, parameter[name[search], name[rel_path]]]]] begin[:] variable[target_path] assign[=] call[name[path].normpath, parameter[call[name[path].join, parameter[name[search], name[rel_path]]]]] break if <ast.UnaryOp object at 0x7da20c76e590> begin[:] <ast.Raise object at 0x7da20c76d060> return[tuple[[<ast.Name object at 0x7da20c76eb60>, <ast.Call object at 0x7da20c76d420>]]]
keyword[def] identifier[resolve] ( identifier[self] , identifier[current_file] , identifier[rel_path] ): literal[string] identifier[search_path] =[ identifier[path] . identifier[dirname] ( identifier[current_file] )]+ identifier[self] . identifier[search_path] identifier[target_path] = keyword[None] keyword[for] identifier[search] keyword[in] identifier[search_path] : keyword[if] identifier[self] . identifier[exists] ( identifier[path] . identifier[join] ( identifier[search] , identifier[rel_path] )): identifier[target_path] = identifier[path] . identifier[normpath] ( identifier[path] . identifier[join] ( identifier[search] , identifier[rel_path] )) keyword[break] keyword[if] keyword[not] identifier[target_path] : keyword[raise] identifier[exceptions] . identifier[EvaluationError] ( literal[string] % ( identifier[rel_path] , literal[string] . identifier[join] ( identifier[search_path] ))) keyword[return] identifier[target_path] , identifier[path] . identifier[abspath] ( identifier[target_path] )
def resolve(self, current_file, rel_path): """Search the filesystem.""" search_path = [path.dirname(current_file)] + self.search_path target_path = None for search in search_path: if self.exists(path.join(search, rel_path)): target_path = path.normpath(path.join(search, rel_path)) break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['search']] if not target_path: raise exceptions.EvaluationError('No such file: %r, searched %s' % (rel_path, ':'.join(search_path))) # depends on [control=['if'], data=[]] return (target_path, path.abspath(target_path))
def parse(src): """Note: src should be ascii string""" rt = libparser.parse(byref(post), src) return ( rt, string_at(post.title, post.tsz), string_at(post.tpic, post.tpsz), post.body )
def function[parse, parameter[src]]: constant[Note: src should be ascii string] variable[rt] assign[=] call[name[libparser].parse, parameter[call[name[byref], parameter[name[post]]], name[src]]] return[tuple[[<ast.Name object at 0x7da18bcc9330>, <ast.Call object at 0x7da18bcc9db0>, <ast.Call object at 0x7da18bccbe50>, <ast.Attribute object at 0x7da18bccba60>]]]
keyword[def] identifier[parse] ( identifier[src] ): literal[string] identifier[rt] = identifier[libparser] . identifier[parse] ( identifier[byref] ( identifier[post] ), identifier[src] ) keyword[return] ( identifier[rt] , identifier[string_at] ( identifier[post] . identifier[title] , identifier[post] . identifier[tsz] ), identifier[string_at] ( identifier[post] . identifier[tpic] , identifier[post] . identifier[tpsz] ), identifier[post] . identifier[body] )
def parse(src): """Note: src should be ascii string""" rt = libparser.parse(byref(post), src) return (rt, string_at(post.title, post.tsz), string_at(post.tpic, post.tpsz), post.body)
def Match(self, file_entry): """Determines if a file entry matches the filter. Args: file_entry (dfvfs.FileEntry): a file entry. Returns: bool: True if the file entry matches the filter. """ if not file_entry: return False filename = file_entry.name.lower() return filename == self._filename
def function[Match, parameter[self, file_entry]]: constant[Determines if a file entry matches the filter. Args: file_entry (dfvfs.FileEntry): a file entry. Returns: bool: True if the file entry matches the filter. ] if <ast.UnaryOp object at 0x7da207f9be80> begin[:] return[constant[False]] variable[filename] assign[=] call[name[file_entry].name.lower, parameter[]] return[compare[name[filename] equal[==] name[self]._filename]]
keyword[def] identifier[Match] ( identifier[self] , identifier[file_entry] ): literal[string] keyword[if] keyword[not] identifier[file_entry] : keyword[return] keyword[False] identifier[filename] = identifier[file_entry] . identifier[name] . identifier[lower] () keyword[return] identifier[filename] == identifier[self] . identifier[_filename]
def Match(self, file_entry): """Determines if a file entry matches the filter. Args: file_entry (dfvfs.FileEntry): a file entry. Returns: bool: True if the file entry matches the filter. """ if not file_entry: return False # depends on [control=['if'], data=[]] filename = file_entry.name.lower() return filename == self._filename
def FromTimedelta(self, td): """Convertd timedelta to Duration.""" self._NormalizeDuration(td.seconds + td.days * _SECONDS_PER_DAY, td.microseconds * _NANOS_PER_MICROSECOND)
def function[FromTimedelta, parameter[self, td]]: constant[Convertd timedelta to Duration.] call[name[self]._NormalizeDuration, parameter[binary_operation[name[td].seconds + binary_operation[name[td].days * name[_SECONDS_PER_DAY]]], binary_operation[name[td].microseconds * name[_NANOS_PER_MICROSECOND]]]]
keyword[def] identifier[FromTimedelta] ( identifier[self] , identifier[td] ): literal[string] identifier[self] . identifier[_NormalizeDuration] ( identifier[td] . identifier[seconds] + identifier[td] . identifier[days] * identifier[_SECONDS_PER_DAY] , identifier[td] . identifier[microseconds] * identifier[_NANOS_PER_MICROSECOND] )
def FromTimedelta(self, td): """Convertd timedelta to Duration.""" self._NormalizeDuration(td.seconds + td.days * _SECONDS_PER_DAY, td.microseconds * _NANOS_PER_MICROSECOND)
def _check_inputs(self, operators, weights): """ Check Inputs This method cheks that the input operators and weights are correctly formatted Parameters ---------- operators : list, tuple or np.ndarray List of linear operator class instances weights : list, tuple or np.ndarray List of weights for combining the linear adjoint operator results Returns ------- tuple operators and weights Raises ------ ValueError If the number of weights does not match the number of operators TypeError If the individual weight values are not floats """ operators = self._check_type(operators) for operator in operators: if not hasattr(operator, 'op'): raise ValueError('Operators must contain "op" method.') if not hasattr(operator, 'adj_op'): raise ValueError('Operators must contain "adj_op" method.') operator.op = check_callable(operator.op) operator.cost = check_callable(operator.adj_op) if not isinstance(weights, type(None)): weights = self._check_type(weights) if weights.size != operators.size: raise ValueError('The number of weights must match the ' 'number of operators.') if not np.issubdtype(weights.dtype, np.floating): raise TypeError('The weights must be a list of float values.') return operators, weights
def function[_check_inputs, parameter[self, operators, weights]]: constant[ Check Inputs This method cheks that the input operators and weights are correctly formatted Parameters ---------- operators : list, tuple or np.ndarray List of linear operator class instances weights : list, tuple or np.ndarray List of weights for combining the linear adjoint operator results Returns ------- tuple operators and weights Raises ------ ValueError If the number of weights does not match the number of operators TypeError If the individual weight values are not floats ] variable[operators] assign[=] call[name[self]._check_type, parameter[name[operators]]] for taget[name[operator]] in starred[name[operators]] begin[:] if <ast.UnaryOp object at 0x7da1b0ebdfc0> begin[:] <ast.Raise object at 0x7da1b0ebc970> if <ast.UnaryOp object at 0x7da1b0ebdab0> begin[:] <ast.Raise object at 0x7da1b0ebe5c0> name[operator].op assign[=] call[name[check_callable], parameter[name[operator].op]] name[operator].cost assign[=] call[name[check_callable], parameter[name[operator].adj_op]] if <ast.UnaryOp object at 0x7da1b0ebc9d0> begin[:] variable[weights] assign[=] call[name[self]._check_type, parameter[name[weights]]] if compare[name[weights].size not_equal[!=] name[operators].size] begin[:] <ast.Raise object at 0x7da1b0ebc8e0> if <ast.UnaryOp object at 0x7da1b0ebd9c0> begin[:] <ast.Raise object at 0x7da20e957340> return[tuple[[<ast.Name object at 0x7da20e955cf0>, <ast.Name object at 0x7da20e954c70>]]]
keyword[def] identifier[_check_inputs] ( identifier[self] , identifier[operators] , identifier[weights] ): literal[string] identifier[operators] = identifier[self] . identifier[_check_type] ( identifier[operators] ) keyword[for] identifier[operator] keyword[in] identifier[operators] : keyword[if] keyword[not] identifier[hasattr] ( identifier[operator] , literal[string] ): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] keyword[not] identifier[hasattr] ( identifier[operator] , literal[string] ): keyword[raise] identifier[ValueError] ( literal[string] ) identifier[operator] . identifier[op] = identifier[check_callable] ( identifier[operator] . identifier[op] ) identifier[operator] . identifier[cost] = identifier[check_callable] ( identifier[operator] . identifier[adj_op] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[weights] , identifier[type] ( keyword[None] )): identifier[weights] = identifier[self] . identifier[_check_type] ( identifier[weights] ) keyword[if] identifier[weights] . identifier[size] != identifier[operators] . identifier[size] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] ) keyword[if] keyword[not] identifier[np] . identifier[issubdtype] ( identifier[weights] . identifier[dtype] , identifier[np] . identifier[floating] ): keyword[raise] identifier[TypeError] ( literal[string] ) keyword[return] identifier[operators] , identifier[weights]
def _check_inputs(self, operators, weights): """ Check Inputs This method cheks that the input operators and weights are correctly formatted Parameters ---------- operators : list, tuple or np.ndarray List of linear operator class instances weights : list, tuple or np.ndarray List of weights for combining the linear adjoint operator results Returns ------- tuple operators and weights Raises ------ ValueError If the number of weights does not match the number of operators TypeError If the individual weight values are not floats """ operators = self._check_type(operators) for operator in operators: if not hasattr(operator, 'op'): raise ValueError('Operators must contain "op" method.') # depends on [control=['if'], data=[]] if not hasattr(operator, 'adj_op'): raise ValueError('Operators must contain "adj_op" method.') # depends on [control=['if'], data=[]] operator.op = check_callable(operator.op) operator.cost = check_callable(operator.adj_op) # depends on [control=['for'], data=['operator']] if not isinstance(weights, type(None)): weights = self._check_type(weights) if weights.size != operators.size: raise ValueError('The number of weights must match the number of operators.') # depends on [control=['if'], data=[]] if not np.issubdtype(weights.dtype, np.floating): raise TypeError('The weights must be a list of float values.') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return (operators, weights)
def check_jobs(jobs): """Validate number of jobs.""" if jobs == 0: raise click.UsageError("Jobs must be >= 1 or == -1") elif jobs < 0: import multiprocessing jobs = multiprocessing.cpu_count() return jobs
def function[check_jobs, parameter[jobs]]: constant[Validate number of jobs.] if compare[name[jobs] equal[==] constant[0]] begin[:] <ast.Raise object at 0x7da18eb56da0> return[name[jobs]]
keyword[def] identifier[check_jobs] ( identifier[jobs] ): literal[string] keyword[if] identifier[jobs] == literal[int] : keyword[raise] identifier[click] . identifier[UsageError] ( literal[string] ) keyword[elif] identifier[jobs] < literal[int] : keyword[import] identifier[multiprocessing] identifier[jobs] = identifier[multiprocessing] . identifier[cpu_count] () keyword[return] identifier[jobs]
def check_jobs(jobs): """Validate number of jobs.""" if jobs == 0: raise click.UsageError('Jobs must be >= 1 or == -1') # depends on [control=['if'], data=[]] elif jobs < 0: import multiprocessing jobs = multiprocessing.cpu_count() # depends on [control=['if'], data=['jobs']] return jobs
def get_distance_function(distance): """ Returns the distance function from the string name provided :param distance: The string name of the distributions :return: """ # If we provided distance function ourselves, use it if callable(distance): return distance try: return _supported_distances_lookup()[distance] except KeyError: raise KeyError('Unsupported distance function {0!r}'.format(distance.lower()))
def function[get_distance_function, parameter[distance]]: constant[ Returns the distance function from the string name provided :param distance: The string name of the distributions :return: ] if call[name[callable], parameter[name[distance]]] begin[:] return[name[distance]] <ast.Try object at 0x7da18ede75e0>
keyword[def] identifier[get_distance_function] ( identifier[distance] ): literal[string] keyword[if] identifier[callable] ( identifier[distance] ): keyword[return] identifier[distance] keyword[try] : keyword[return] identifier[_supported_distances_lookup] ()[ identifier[distance] ] keyword[except] identifier[KeyError] : keyword[raise] identifier[KeyError] ( literal[string] . identifier[format] ( identifier[distance] . identifier[lower] ()))
def get_distance_function(distance): """ Returns the distance function from the string name provided :param distance: The string name of the distributions :return: """ # If we provided distance function ourselves, use it if callable(distance): return distance # depends on [control=['if'], data=[]] try: return _supported_distances_lookup()[distance] # depends on [control=['try'], data=[]] except KeyError: raise KeyError('Unsupported distance function {0!r}'.format(distance.lower())) # depends on [control=['except'], data=[]]
def check_privatenet(self): """ Check if privatenet is running, and if container is same as the current Chains/privnet database. Raises: PrivnetConnectionError: if the private net couldn't be reached or the nonce does not match """ rpc_settings.setup(self.RPC_LIST) client = RPCClient() try: version = client.get_version() except NEORPCException: raise PrivnetConnectionError("Error: private network container doesn't seem to be running, or RPC is not enabled.") print("Privatenet useragent '%s', nonce: %s" % (version["useragent"], version["nonce"])) # Now check if nonce is the same as in the chain path nonce_container = str(version["nonce"]) neopy_chain_meta_filename = os.path.join(self.chain_leveldb_path, ".privnet-nonce") if os.path.isfile(neopy_chain_meta_filename): nonce_chain = open(neopy_chain_meta_filename, "r").read() if nonce_chain != nonce_container: raise PrivnetConnectionError( "Chain database in Chains/privnet is for a different private network than the current container. " "Consider deleting the Chain directory with 'rm -rf %s*'." % self.chain_leveldb_path ) else: # When the Chains/privnet folder is removed, we need to create the directory if not os.path.isdir(self.chain_leveldb_path): os.mkdir(self.chain_leveldb_path) # Write the nonce to the meta file with open(neopy_chain_meta_filename, "w") as f: f.write(nonce_container)
def function[check_privatenet, parameter[self]]: constant[ Check if privatenet is running, and if container is same as the current Chains/privnet database. Raises: PrivnetConnectionError: if the private net couldn't be reached or the nonce does not match ] call[name[rpc_settings].setup, parameter[name[self].RPC_LIST]] variable[client] assign[=] call[name[RPCClient], parameter[]] <ast.Try object at 0x7da20e9b0d00> call[name[print], parameter[binary_operation[constant[Privatenet useragent '%s', nonce: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da20e9b3a30>, <ast.Subscript object at 0x7da20e9b28f0>]]]]] variable[nonce_container] assign[=] call[name[str], parameter[call[name[version]][constant[nonce]]]] variable[neopy_chain_meta_filename] assign[=] call[name[os].path.join, parameter[name[self].chain_leveldb_path, constant[.privnet-nonce]]] if call[name[os].path.isfile, parameter[name[neopy_chain_meta_filename]]] begin[:] variable[nonce_chain] assign[=] call[call[name[open], parameter[name[neopy_chain_meta_filename], constant[r]]].read, parameter[]] if compare[name[nonce_chain] not_equal[!=] name[nonce_container]] begin[:] <ast.Raise object at 0x7da2041db6a0>
keyword[def] identifier[check_privatenet] ( identifier[self] ): literal[string] identifier[rpc_settings] . identifier[setup] ( identifier[self] . identifier[RPC_LIST] ) identifier[client] = identifier[RPCClient] () keyword[try] : identifier[version] = identifier[client] . identifier[get_version] () keyword[except] identifier[NEORPCException] : keyword[raise] identifier[PrivnetConnectionError] ( literal[string] ) identifier[print] ( literal[string] %( identifier[version] [ literal[string] ], identifier[version] [ literal[string] ])) identifier[nonce_container] = identifier[str] ( identifier[version] [ literal[string] ]) identifier[neopy_chain_meta_filename] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[chain_leveldb_path] , literal[string] ) keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[neopy_chain_meta_filename] ): identifier[nonce_chain] = identifier[open] ( identifier[neopy_chain_meta_filename] , literal[string] ). identifier[read] () keyword[if] identifier[nonce_chain] != identifier[nonce_container] : keyword[raise] identifier[PrivnetConnectionError] ( literal[string] literal[string] % identifier[self] . identifier[chain_leveldb_path] ) keyword[else] : keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[self] . identifier[chain_leveldb_path] ): identifier[os] . identifier[mkdir] ( identifier[self] . identifier[chain_leveldb_path] ) keyword[with] identifier[open] ( identifier[neopy_chain_meta_filename] , literal[string] ) keyword[as] identifier[f] : identifier[f] . identifier[write] ( identifier[nonce_container] )
def check_privatenet(self): """ Check if privatenet is running, and if container is same as the current Chains/privnet database. Raises: PrivnetConnectionError: if the private net couldn't be reached or the nonce does not match """ rpc_settings.setup(self.RPC_LIST) client = RPCClient() try: version = client.get_version() # depends on [control=['try'], data=[]] except NEORPCException: raise PrivnetConnectionError("Error: private network container doesn't seem to be running, or RPC is not enabled.") # depends on [control=['except'], data=[]] print("Privatenet useragent '%s', nonce: %s" % (version['useragent'], version['nonce'])) # Now check if nonce is the same as in the chain path nonce_container = str(version['nonce']) neopy_chain_meta_filename = os.path.join(self.chain_leveldb_path, '.privnet-nonce') if os.path.isfile(neopy_chain_meta_filename): nonce_chain = open(neopy_chain_meta_filename, 'r').read() if nonce_chain != nonce_container: raise PrivnetConnectionError("Chain database in Chains/privnet is for a different private network than the current container. Consider deleting the Chain directory with 'rm -rf %s*'." % self.chain_leveldb_path) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: # When the Chains/privnet folder is removed, we need to create the directory if not os.path.isdir(self.chain_leveldb_path): os.mkdir(self.chain_leveldb_path) # depends on [control=['if'], data=[]] # Write the nonce to the meta file with open(neopy_chain_meta_filename, 'w') as f: f.write(nonce_container) # depends on [control=['with'], data=['f']]
def receive(self): """I receive data+hash, check for a match, confirm or not confirm to the sender, and return the data payload. """ def _receive(input_message): self.data = input_message[:-64] _hash = input_message[-64:] if h.sha256(self.data).hexdigest() == _hash: self._w.send_message('Confirmed!') else: self._w.send_message('Not Confirmed!') yield self.start_tor() self._w = wormhole.create(u'axotor', RENDEZVOUS_RELAY, self._reactor, tor=self._tor, timing=self._timing) self._w.set_code(self._code) yield self._w.get_message().addCallback(_receive) yield self._w.close() self._reactor.stop() return
def function[receive, parameter[self]]: constant[I receive data+hash, check for a match, confirm or not confirm to the sender, and return the data payload. ] def function[_receive, parameter[input_message]]: name[self].data assign[=] call[name[input_message]][<ast.Slice object at 0x7da1b26d4610>] variable[_hash] assign[=] call[name[input_message]][<ast.Slice object at 0x7da1b26d45b0>] if compare[call[call[name[h].sha256, parameter[name[self].data]].hexdigest, parameter[]] equal[==] name[_hash]] begin[:] call[name[self]._w.send_message, parameter[constant[Confirmed!]]] <ast.Yield object at 0x7da1b26d7730> name[self]._w assign[=] call[name[wormhole].create, parameter[constant[axotor], name[RENDEZVOUS_RELAY], name[self]._reactor]] call[name[self]._w.set_code, parameter[name[self]._code]] <ast.Yield object at 0x7da1b26d70d0> <ast.Yield object at 0x7da1b26d6800> call[name[self]._reactor.stop, parameter[]] return[None]
keyword[def] identifier[receive] ( identifier[self] ): literal[string] keyword[def] identifier[_receive] ( identifier[input_message] ): identifier[self] . identifier[data] = identifier[input_message] [:- literal[int] ] identifier[_hash] = identifier[input_message] [- literal[int] :] keyword[if] identifier[h] . identifier[sha256] ( identifier[self] . identifier[data] ). identifier[hexdigest] ()== identifier[_hash] : identifier[self] . identifier[_w] . identifier[send_message] ( literal[string] ) keyword[else] : identifier[self] . identifier[_w] . identifier[send_message] ( literal[string] ) keyword[yield] identifier[self] . identifier[start_tor] () identifier[self] . identifier[_w] = identifier[wormhole] . identifier[create] ( literal[string] , identifier[RENDEZVOUS_RELAY] , identifier[self] . identifier[_reactor] , identifier[tor] = identifier[self] . identifier[_tor] , identifier[timing] = identifier[self] . identifier[_timing] ) identifier[self] . identifier[_w] . identifier[set_code] ( identifier[self] . identifier[_code] ) keyword[yield] identifier[self] . identifier[_w] . identifier[get_message] (). identifier[addCallback] ( identifier[_receive] ) keyword[yield] identifier[self] . identifier[_w] . identifier[close] () identifier[self] . identifier[_reactor] . identifier[stop] () keyword[return]
def receive(self): """I receive data+hash, check for a match, confirm or not confirm to the sender, and return the data payload. """ def _receive(input_message): self.data = input_message[:-64] _hash = input_message[-64:] if h.sha256(self.data).hexdigest() == _hash: self._w.send_message('Confirmed!') # depends on [control=['if'], data=[]] else: self._w.send_message('Not Confirmed!') yield self.start_tor() self._w = wormhole.create(u'axotor', RENDEZVOUS_RELAY, self._reactor, tor=self._tor, timing=self._timing) self._w.set_code(self._code) yield self._w.get_message().addCallback(_receive) yield self._w.close() self._reactor.stop() return
def fullvars(obj): ''' like `vars()` but support `__slots__`. ''' try: return vars(obj) except TypeError: pass # __slots__ slotsnames = set() for cls in type(obj).__mro__: __slots__ = getattr(cls, '__slots__', None) if __slots__: if isinstance(__slots__, str): slotsnames.add(__slots__) else: slotsnames.update(__slots__) return _SlotsProxy(obj, slotsnames)
def function[fullvars, parameter[obj]]: constant[ like `vars()` but support `__slots__`. ] <ast.Try object at 0x7da1b002caf0> variable[slotsnames] assign[=] call[name[set], parameter[]] for taget[name[cls]] in starred[call[name[type], parameter[name[obj]]].__mro__] begin[:] variable[__slots__] assign[=] call[name[getattr], parameter[name[cls], constant[__slots__], constant[None]]] if name[__slots__] begin[:] if call[name[isinstance], parameter[name[__slots__], name[str]]] begin[:] call[name[slotsnames].add, parameter[name[__slots__]]] return[call[name[_SlotsProxy], parameter[name[obj], name[slotsnames]]]]
keyword[def] identifier[fullvars] ( identifier[obj] ): literal[string] keyword[try] : keyword[return] identifier[vars] ( identifier[obj] ) keyword[except] identifier[TypeError] : keyword[pass] identifier[slotsnames] = identifier[set] () keyword[for] identifier[cls] keyword[in] identifier[type] ( identifier[obj] ). identifier[__mro__] : identifier[__slots__] = identifier[getattr] ( identifier[cls] , literal[string] , keyword[None] ) keyword[if] identifier[__slots__] : keyword[if] identifier[isinstance] ( identifier[__slots__] , identifier[str] ): identifier[slotsnames] . identifier[add] ( identifier[__slots__] ) keyword[else] : identifier[slotsnames] . identifier[update] ( identifier[__slots__] ) keyword[return] identifier[_SlotsProxy] ( identifier[obj] , identifier[slotsnames] )
def fullvars(obj): """ like `vars()` but support `__slots__`. """ try: return vars(obj) # depends on [control=['try'], data=[]] except TypeError: pass # depends on [control=['except'], data=[]] # __slots__ slotsnames = set() for cls in type(obj).__mro__: __slots__ = getattr(cls, '__slots__', None) if __slots__: if isinstance(__slots__, str): slotsnames.add(__slots__) # depends on [control=['if'], data=[]] else: slotsnames.update(__slots__) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['cls']] return _SlotsProxy(obj, slotsnames)
def activateCells(self, activeColumns, basalReinforceCandidates, apicalReinforceCandidates, basalGrowthCandidates, apicalGrowthCandidates, learn=True): """ Activate cells in the specified columns, using the result of the previous 'depolarizeCells' as predictions. Then learn. @param activeColumns (numpy array) List of active columns @param basalReinforceCandidates (numpy array) List of bits that the active cells may reinforce basal synapses to. @param apicalReinforceCandidates (numpy array) List of bits that the active cells may reinforce apical synapses to. @param basalGrowthCandidates (numpy array or None) List of bits that the active cells may grow new basal synapses to. @param apicalGrowthCandidates (numpy array or None) List of bits that the active cells may grow new apical synapses to @param learn (bool) Whether to grow / reinforce / punish synapses """ # Calculate active cells (correctPredictedCells, burstingColumns) = np2.setCompare(self.predictedCells, activeColumns, self.predictedCells / self.cellsPerColumn, rightMinusLeft=True) newActiveCells = np.concatenate((correctPredictedCells, np2.getAllCellsInColumns( burstingColumns, self.cellsPerColumn))) # Calculate learning (learningActiveBasalSegments, learningActiveApicalSegments, learningMatchingBasalSegments, learningMatchingApicalSegments, basalSegmentsToPunish, apicalSegmentsToPunish, newSegmentCells, learningCells) = self._calculateLearning(activeColumns, burstingColumns, correctPredictedCells, self.activeBasalSegments, self.activeApicalSegments, self.matchingBasalSegments, self.matchingApicalSegments, self.basalPotentialOverlaps, self.apicalPotentialOverlaps) if learn: # Learn on existing segments for learningSegments in (learningActiveBasalSegments, learningMatchingBasalSegments): self._learn(self.basalConnections, self.rng, learningSegments, basalReinforceCandidates, basalGrowthCandidates, self.basalPotentialOverlaps, self.initialPermanence, self.sampleSize, self.permanenceIncrement, self.permanenceDecrement, self.maxSynapsesPerSegment) for learningSegments in (learningActiveApicalSegments, learningMatchingApicalSegments): self._learn(self.apicalConnections, self.rng, learningSegments, apicalReinforceCandidates, apicalGrowthCandidates, self.apicalPotentialOverlaps, self.initialPermanence, self.sampleSize, self.permanenceIncrement, self.permanenceDecrement, self.maxSynapsesPerSegment) # Punish incorrect predictions if self.basalPredictedSegmentDecrement != 0.0: self.basalConnections.adjustActiveSynapses( basalSegmentsToPunish, basalReinforceCandidates, -self.basalPredictedSegmentDecrement) if self.apicalPredictedSegmentDecrement != 0.0: self.apicalConnections.adjustActiveSynapses( apicalSegmentsToPunish, apicalReinforceCandidates, -self.apicalPredictedSegmentDecrement) # Only grow segments if there is basal *and* apical input. if len(basalGrowthCandidates) > 0 and len(apicalGrowthCandidates) > 0: self._learnOnNewSegments(self.basalConnections, self.rng, newSegmentCells, basalGrowthCandidates, self.initialPermanence, self.sampleSize, self.maxSynapsesPerSegment) self._learnOnNewSegments(self.apicalConnections, self.rng, newSegmentCells, apicalGrowthCandidates, self.initialPermanence, self.sampleSize, self.maxSynapsesPerSegment) # Save the results newActiveCells.sort() learningCells.sort() self.activeCells = newActiveCells self.winnerCells = learningCells self.predictedActiveCells = correctPredictedCells
def function[activateCells, parameter[self, activeColumns, basalReinforceCandidates, apicalReinforceCandidates, basalGrowthCandidates, apicalGrowthCandidates, learn]]: constant[ Activate cells in the specified columns, using the result of the previous 'depolarizeCells' as predictions. Then learn. @param activeColumns (numpy array) List of active columns @param basalReinforceCandidates (numpy array) List of bits that the active cells may reinforce basal synapses to. @param apicalReinforceCandidates (numpy array) List of bits that the active cells may reinforce apical synapses to. @param basalGrowthCandidates (numpy array or None) List of bits that the active cells may grow new basal synapses to. @param apicalGrowthCandidates (numpy array or None) List of bits that the active cells may grow new apical synapses to @param learn (bool) Whether to grow / reinforce / punish synapses ] <ast.Tuple object at 0x7da1b08627d0> assign[=] call[name[np2].setCompare, parameter[name[self].predictedCells, name[activeColumns], binary_operation[name[self].predictedCells / name[self].cellsPerColumn]]] variable[newActiveCells] assign[=] call[name[np].concatenate, parameter[tuple[[<ast.Name object at 0x7da1b0862b30>, <ast.Call object at 0x7da1b08605e0>]]]] <ast.Tuple object at 0x7da1b08628f0> assign[=] call[name[self]._calculateLearning, parameter[name[activeColumns], name[burstingColumns], name[correctPredictedCells], name[self].activeBasalSegments, name[self].activeApicalSegments, name[self].matchingBasalSegments, name[self].matchingApicalSegments, name[self].basalPotentialOverlaps, name[self].apicalPotentialOverlaps]] if name[learn] begin[:] for taget[name[learningSegments]] in starred[tuple[[<ast.Name object at 0x7da1b088a830>, <ast.Name object at 0x7da1b088bfd0>]]] begin[:] call[name[self]._learn, parameter[name[self].basalConnections, name[self].rng, name[learningSegments], name[basalReinforceCandidates], name[basalGrowthCandidates], name[self].basalPotentialOverlaps, name[self].initialPermanence, name[self].sampleSize, name[self].permanenceIncrement, name[self].permanenceDecrement, name[self].maxSynapsesPerSegment]] for taget[name[learningSegments]] in starred[tuple[[<ast.Name object at 0x7da1b0833e50>, <ast.Name object at 0x7da1b0833eb0>]]] begin[:] call[name[self]._learn, parameter[name[self].apicalConnections, name[self].rng, name[learningSegments], name[apicalReinforceCandidates], name[apicalGrowthCandidates], name[self].apicalPotentialOverlaps, name[self].initialPermanence, name[self].sampleSize, name[self].permanenceIncrement, name[self].permanenceDecrement, name[self].maxSynapsesPerSegment]] if compare[name[self].basalPredictedSegmentDecrement not_equal[!=] constant[0.0]] begin[:] call[name[self].basalConnections.adjustActiveSynapses, parameter[name[basalSegmentsToPunish], name[basalReinforceCandidates], <ast.UnaryOp object at 0x7da1b08316f0>]] if compare[name[self].apicalPredictedSegmentDecrement not_equal[!=] constant[0.0]] begin[:] call[name[self].apicalConnections.adjustActiveSynapses, parameter[name[apicalSegmentsToPunish], name[apicalReinforceCandidates], <ast.UnaryOp object at 0x7da1b0831510>]] if <ast.BoolOp object at 0x7da1b0831f30> begin[:] call[name[self]._learnOnNewSegments, parameter[name[self].basalConnections, name[self].rng, name[newSegmentCells], name[basalGrowthCandidates], name[self].initialPermanence, name[self].sampleSize, name[self].maxSynapsesPerSegment]] call[name[self]._learnOnNewSegments, parameter[name[self].apicalConnections, name[self].rng, name[newSegmentCells], name[apicalGrowthCandidates], name[self].initialPermanence, name[self].sampleSize, name[self].maxSynapsesPerSegment]] call[name[newActiveCells].sort, parameter[]] call[name[learningCells].sort, parameter[]] name[self].activeCells assign[=] name[newActiveCells] name[self].winnerCells assign[=] name[learningCells] name[self].predictedActiveCells assign[=] name[correctPredictedCells]
keyword[def] identifier[activateCells] ( identifier[self] , identifier[activeColumns] , identifier[basalReinforceCandidates] , identifier[apicalReinforceCandidates] , identifier[basalGrowthCandidates] , identifier[apicalGrowthCandidates] , identifier[learn] = keyword[True] ): literal[string] ( identifier[correctPredictedCells] , identifier[burstingColumns] )= identifier[np2] . identifier[setCompare] ( identifier[self] . identifier[predictedCells] , identifier[activeColumns] , identifier[self] . identifier[predictedCells] / identifier[self] . identifier[cellsPerColumn] , identifier[rightMinusLeft] = keyword[True] ) identifier[newActiveCells] = identifier[np] . identifier[concatenate] (( identifier[correctPredictedCells] , identifier[np2] . identifier[getAllCellsInColumns] ( identifier[burstingColumns] , identifier[self] . identifier[cellsPerColumn] ))) ( identifier[learningActiveBasalSegments] , identifier[learningActiveApicalSegments] , identifier[learningMatchingBasalSegments] , identifier[learningMatchingApicalSegments] , identifier[basalSegmentsToPunish] , identifier[apicalSegmentsToPunish] , identifier[newSegmentCells] , identifier[learningCells] )= identifier[self] . identifier[_calculateLearning] ( identifier[activeColumns] , identifier[burstingColumns] , identifier[correctPredictedCells] , identifier[self] . identifier[activeBasalSegments] , identifier[self] . identifier[activeApicalSegments] , identifier[self] . identifier[matchingBasalSegments] , identifier[self] . identifier[matchingApicalSegments] , identifier[self] . identifier[basalPotentialOverlaps] , identifier[self] . identifier[apicalPotentialOverlaps] ) keyword[if] identifier[learn] : keyword[for] identifier[learningSegments] keyword[in] ( identifier[learningActiveBasalSegments] , identifier[learningMatchingBasalSegments] ): identifier[self] . identifier[_learn] ( identifier[self] . identifier[basalConnections] , identifier[self] . identifier[rng] , identifier[learningSegments] , identifier[basalReinforceCandidates] , identifier[basalGrowthCandidates] , identifier[self] . identifier[basalPotentialOverlaps] , identifier[self] . identifier[initialPermanence] , identifier[self] . identifier[sampleSize] , identifier[self] . identifier[permanenceIncrement] , identifier[self] . identifier[permanenceDecrement] , identifier[self] . identifier[maxSynapsesPerSegment] ) keyword[for] identifier[learningSegments] keyword[in] ( identifier[learningActiveApicalSegments] , identifier[learningMatchingApicalSegments] ): identifier[self] . identifier[_learn] ( identifier[self] . identifier[apicalConnections] , identifier[self] . identifier[rng] , identifier[learningSegments] , identifier[apicalReinforceCandidates] , identifier[apicalGrowthCandidates] , identifier[self] . identifier[apicalPotentialOverlaps] , identifier[self] . identifier[initialPermanence] , identifier[self] . identifier[sampleSize] , identifier[self] . identifier[permanenceIncrement] , identifier[self] . identifier[permanenceDecrement] , identifier[self] . identifier[maxSynapsesPerSegment] ) keyword[if] identifier[self] . identifier[basalPredictedSegmentDecrement] != literal[int] : identifier[self] . identifier[basalConnections] . identifier[adjustActiveSynapses] ( identifier[basalSegmentsToPunish] , identifier[basalReinforceCandidates] , - identifier[self] . identifier[basalPredictedSegmentDecrement] ) keyword[if] identifier[self] . identifier[apicalPredictedSegmentDecrement] != literal[int] : identifier[self] . identifier[apicalConnections] . identifier[adjustActiveSynapses] ( identifier[apicalSegmentsToPunish] , identifier[apicalReinforceCandidates] , - identifier[self] . identifier[apicalPredictedSegmentDecrement] ) keyword[if] identifier[len] ( identifier[basalGrowthCandidates] )> literal[int] keyword[and] identifier[len] ( identifier[apicalGrowthCandidates] )> literal[int] : identifier[self] . identifier[_learnOnNewSegments] ( identifier[self] . identifier[basalConnections] , identifier[self] . identifier[rng] , identifier[newSegmentCells] , identifier[basalGrowthCandidates] , identifier[self] . identifier[initialPermanence] , identifier[self] . identifier[sampleSize] , identifier[self] . identifier[maxSynapsesPerSegment] ) identifier[self] . identifier[_learnOnNewSegments] ( identifier[self] . identifier[apicalConnections] , identifier[self] . identifier[rng] , identifier[newSegmentCells] , identifier[apicalGrowthCandidates] , identifier[self] . identifier[initialPermanence] , identifier[self] . identifier[sampleSize] , identifier[self] . identifier[maxSynapsesPerSegment] ) identifier[newActiveCells] . identifier[sort] () identifier[learningCells] . identifier[sort] () identifier[self] . identifier[activeCells] = identifier[newActiveCells] identifier[self] . identifier[winnerCells] = identifier[learningCells] identifier[self] . identifier[predictedActiveCells] = identifier[correctPredictedCells]
def activateCells(self, activeColumns, basalReinforceCandidates, apicalReinforceCandidates, basalGrowthCandidates, apicalGrowthCandidates, learn=True): """ Activate cells in the specified columns, using the result of the previous 'depolarizeCells' as predictions. Then learn. @param activeColumns (numpy array) List of active columns @param basalReinforceCandidates (numpy array) List of bits that the active cells may reinforce basal synapses to. @param apicalReinforceCandidates (numpy array) List of bits that the active cells may reinforce apical synapses to. @param basalGrowthCandidates (numpy array or None) List of bits that the active cells may grow new basal synapses to. @param apicalGrowthCandidates (numpy array or None) List of bits that the active cells may grow new apical synapses to @param learn (bool) Whether to grow / reinforce / punish synapses """ # Calculate active cells (correctPredictedCells, burstingColumns) = np2.setCompare(self.predictedCells, activeColumns, self.predictedCells / self.cellsPerColumn, rightMinusLeft=True) newActiveCells = np.concatenate((correctPredictedCells, np2.getAllCellsInColumns(burstingColumns, self.cellsPerColumn))) # Calculate learning (learningActiveBasalSegments, learningActiveApicalSegments, learningMatchingBasalSegments, learningMatchingApicalSegments, basalSegmentsToPunish, apicalSegmentsToPunish, newSegmentCells, learningCells) = self._calculateLearning(activeColumns, burstingColumns, correctPredictedCells, self.activeBasalSegments, self.activeApicalSegments, self.matchingBasalSegments, self.matchingApicalSegments, self.basalPotentialOverlaps, self.apicalPotentialOverlaps) if learn: # Learn on existing segments for learningSegments in (learningActiveBasalSegments, learningMatchingBasalSegments): self._learn(self.basalConnections, self.rng, learningSegments, basalReinforceCandidates, basalGrowthCandidates, self.basalPotentialOverlaps, self.initialPermanence, self.sampleSize, self.permanenceIncrement, self.permanenceDecrement, self.maxSynapsesPerSegment) # depends on [control=['for'], data=['learningSegments']] for learningSegments in (learningActiveApicalSegments, learningMatchingApicalSegments): self._learn(self.apicalConnections, self.rng, learningSegments, apicalReinforceCandidates, apicalGrowthCandidates, self.apicalPotentialOverlaps, self.initialPermanence, self.sampleSize, self.permanenceIncrement, self.permanenceDecrement, self.maxSynapsesPerSegment) # depends on [control=['for'], data=['learningSegments']] # Punish incorrect predictions if self.basalPredictedSegmentDecrement != 0.0: self.basalConnections.adjustActiveSynapses(basalSegmentsToPunish, basalReinforceCandidates, -self.basalPredictedSegmentDecrement) # depends on [control=['if'], data=[]] if self.apicalPredictedSegmentDecrement != 0.0: self.apicalConnections.adjustActiveSynapses(apicalSegmentsToPunish, apicalReinforceCandidates, -self.apicalPredictedSegmentDecrement) # depends on [control=['if'], data=[]] # Only grow segments if there is basal *and* apical input. if len(basalGrowthCandidates) > 0 and len(apicalGrowthCandidates) > 0: self._learnOnNewSegments(self.basalConnections, self.rng, newSegmentCells, basalGrowthCandidates, self.initialPermanence, self.sampleSize, self.maxSynapsesPerSegment) self._learnOnNewSegments(self.apicalConnections, self.rng, newSegmentCells, apicalGrowthCandidates, self.initialPermanence, self.sampleSize, self.maxSynapsesPerSegment) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # Save the results newActiveCells.sort() learningCells.sort() self.activeCells = newActiveCells self.winnerCells = learningCells self.predictedActiveCells = correctPredictedCells
def finish_response(self): """ Completes the response and performs the following tasks: - Remove the `'ws4py.socket'` and `'ws4py.websocket'` environ keys. - Attach the returned websocket, if any, to the WSGI server using its ``link_websocket_to_server`` method. """ # force execution of the result iterator until first actual content rest = iter(self.result) first = list(itertools.islice(rest, 1)) self.result = itertools.chain(first, rest) # now it's safe to look if environ was modified ws = None if self.environ: self.environ.pop('ws4py.socket', None) ws = self.environ.pop('ws4py.websocket', None) try: SimpleHandler.finish_response(self) except: if ws: ws.close(1011, reason='Something broke') raise else: if ws: self.request_handler.server.link_websocket_to_server(ws)
def function[finish_response, parameter[self]]: constant[ Completes the response and performs the following tasks: - Remove the `'ws4py.socket'` and `'ws4py.websocket'` environ keys. - Attach the returned websocket, if any, to the WSGI server using its ``link_websocket_to_server`` method. ] variable[rest] assign[=] call[name[iter], parameter[name[self].result]] variable[first] assign[=] call[name[list], parameter[call[name[itertools].islice, parameter[name[rest], constant[1]]]]] name[self].result assign[=] call[name[itertools].chain, parameter[name[first], name[rest]]] variable[ws] assign[=] constant[None] if name[self].environ begin[:] call[name[self].environ.pop, parameter[constant[ws4py.socket], constant[None]]] variable[ws] assign[=] call[name[self].environ.pop, parameter[constant[ws4py.websocket], constant[None]]] <ast.Try object at 0x7da1b0d494e0>
keyword[def] identifier[finish_response] ( identifier[self] ): literal[string] identifier[rest] = identifier[iter] ( identifier[self] . identifier[result] ) identifier[first] = identifier[list] ( identifier[itertools] . identifier[islice] ( identifier[rest] , literal[int] )) identifier[self] . identifier[result] = identifier[itertools] . identifier[chain] ( identifier[first] , identifier[rest] ) identifier[ws] = keyword[None] keyword[if] identifier[self] . identifier[environ] : identifier[self] . identifier[environ] . identifier[pop] ( literal[string] , keyword[None] ) identifier[ws] = identifier[self] . identifier[environ] . identifier[pop] ( literal[string] , keyword[None] ) keyword[try] : identifier[SimpleHandler] . identifier[finish_response] ( identifier[self] ) keyword[except] : keyword[if] identifier[ws] : identifier[ws] . identifier[close] ( literal[int] , identifier[reason] = literal[string] ) keyword[raise] keyword[else] : keyword[if] identifier[ws] : identifier[self] . identifier[request_handler] . identifier[server] . identifier[link_websocket_to_server] ( identifier[ws] )
def finish_response(self): """ Completes the response and performs the following tasks: - Remove the `'ws4py.socket'` and `'ws4py.websocket'` environ keys. - Attach the returned websocket, if any, to the WSGI server using its ``link_websocket_to_server`` method. """ # force execution of the result iterator until first actual content rest = iter(self.result) first = list(itertools.islice(rest, 1)) self.result = itertools.chain(first, rest) # now it's safe to look if environ was modified ws = None if self.environ: self.environ.pop('ws4py.socket', None) ws = self.environ.pop('ws4py.websocket', None) # depends on [control=['if'], data=[]] try: SimpleHandler.finish_response(self) # depends on [control=['try'], data=[]] except: if ws: ws.close(1011, reason='Something broke') # depends on [control=['if'], data=[]] raise # depends on [control=['except'], data=[]] else: if ws: self.request_handler.server.link_websocket_to_server(ws) # depends on [control=['if'], data=[]]
def voronoi(script, region_num=10, overlap=False): """Voronoi Atlas parameterization """ filter_xml = ''.join([ ' <filter name="Parametrization: Voronoi Atlas">\n', ' <Param name="regionNum"', 'value="%d"' % region_num, 'description="Approx. Region Num"', 'type="RichInt"', 'tooltip="An estimation of the number of regions that must be generated. Smaller regions could lead to parametrizations with smaller distortion."', '/>\n', ' <Param name="overlapFlag"', 'value="%s"' % str(overlap).lower(), 'description="Overlap"', 'type="RichBool"', 'tooltip="If checked the resulting parametrization will be composed by overlapping regions, e.g. the resulting mesh will have duplicated faces: each region will have a ring of ovelapping duplicate faces that will ensure that border regions will be parametrized in the atlas twice. This is quite useful for building mipmap robust atlases"', '/>\n', ' </filter>\n']) util.write_filter(script, filter_xml) return None
def function[voronoi, parameter[script, region_num, overlap]]: constant[Voronoi Atlas parameterization ] variable[filter_xml] assign[=] call[constant[].join, parameter[list[[<ast.Constant object at 0x7da1b0295270>, <ast.Constant object at 0x7da1b0297790>, <ast.BinOp object at 0x7da1b0297a00>, <ast.Constant object at 0x7da1b0294610>, <ast.Constant object at 0x7da1b0295030>, <ast.Constant object at 0x7da1b0295360>, <ast.Constant object at 0x7da1b02952d0>, <ast.Constant object at 0x7da1b0294d00>, <ast.BinOp object at 0x7da1b02942b0>, <ast.Constant object at 0x7da1b02979a0>, <ast.Constant object at 0x7da1b0294850>, <ast.Constant object at 0x7da1b0295a50>, <ast.Constant object at 0x7da1b02978b0>, <ast.Constant object at 0x7da1b0294400>]]]] call[name[util].write_filter, parameter[name[script], name[filter_xml]]] return[constant[None]]
keyword[def] identifier[voronoi] ( identifier[script] , identifier[region_num] = literal[int] , identifier[overlap] = keyword[False] ): literal[string] identifier[filter_xml] = literal[string] . identifier[join] ([ literal[string] , literal[string] , literal[string] % identifier[region_num] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] % identifier[str] ( identifier[overlap] ). identifier[lower] (), literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]) identifier[util] . identifier[write_filter] ( identifier[script] , identifier[filter_xml] ) keyword[return] keyword[None]
def voronoi(script, region_num=10, overlap=False): """Voronoi Atlas parameterization """ filter_xml = ''.join([' <filter name="Parametrization: Voronoi Atlas">\n', ' <Param name="regionNum"', 'value="%d"' % region_num, 'description="Approx. Region Num"', 'type="RichInt"', 'tooltip="An estimation of the number of regions that must be generated. Smaller regions could lead to parametrizations with smaller distortion."', '/>\n', ' <Param name="overlapFlag"', 'value="%s"' % str(overlap).lower(), 'description="Overlap"', 'type="RichBool"', 'tooltip="If checked the resulting parametrization will be composed by overlapping regions, e.g. the resulting mesh will have duplicated faces: each region will have a ring of ovelapping duplicate faces that will ensure that border regions will be parametrized in the atlas twice. This is quite useful for building mipmap robust atlases"', '/>\n', ' </filter>\n']) util.write_filter(script, filter_xml) return None
def dump_nodes(self): """Dump current screen UI to list Returns: List of UINode object, For example: [UINode( bounds=Bounds(left=0, top=0, right=480, bottom=168), checkable=False, class_name='android.view.View', text='', resource_id='', package='com.sonyericsson.advancedwidget.clock')] """ xmldata = self._uiauto.dump() dom = xml.dom.minidom.parseString(xmldata.encode('utf-8')) root = dom.documentElement nodes = root.getElementsByTagName('node') ui_nodes = [] for node in nodes: ui_nodes.append(self._parse_xml_node(node)) return ui_nodes
def function[dump_nodes, parameter[self]]: constant[Dump current screen UI to list Returns: List of UINode object, For example: [UINode( bounds=Bounds(left=0, top=0, right=480, bottom=168), checkable=False, class_name='android.view.View', text='', resource_id='', package='com.sonyericsson.advancedwidget.clock')] ] variable[xmldata] assign[=] call[name[self]._uiauto.dump, parameter[]] variable[dom] assign[=] call[name[xml].dom.minidom.parseString, parameter[call[name[xmldata].encode, parameter[constant[utf-8]]]]] variable[root] assign[=] name[dom].documentElement variable[nodes] assign[=] call[name[root].getElementsByTagName, parameter[constant[node]]] variable[ui_nodes] assign[=] list[[]] for taget[name[node]] in starred[name[nodes]] begin[:] call[name[ui_nodes].append, parameter[call[name[self]._parse_xml_node, parameter[name[node]]]]] return[name[ui_nodes]]
keyword[def] identifier[dump_nodes] ( identifier[self] ): literal[string] identifier[xmldata] = identifier[self] . identifier[_uiauto] . identifier[dump] () identifier[dom] = identifier[xml] . identifier[dom] . identifier[minidom] . identifier[parseString] ( identifier[xmldata] . identifier[encode] ( literal[string] )) identifier[root] = identifier[dom] . identifier[documentElement] identifier[nodes] = identifier[root] . identifier[getElementsByTagName] ( literal[string] ) identifier[ui_nodes] =[] keyword[for] identifier[node] keyword[in] identifier[nodes] : identifier[ui_nodes] . identifier[append] ( identifier[self] . identifier[_parse_xml_node] ( identifier[node] )) keyword[return] identifier[ui_nodes]
def dump_nodes(self): """Dump current screen UI to list Returns: List of UINode object, For example: [UINode( bounds=Bounds(left=0, top=0, right=480, bottom=168), checkable=False, class_name='android.view.View', text='', resource_id='', package='com.sonyericsson.advancedwidget.clock')] """ xmldata = self._uiauto.dump() dom = xml.dom.minidom.parseString(xmldata.encode('utf-8')) root = dom.documentElement nodes = root.getElementsByTagName('node') ui_nodes = [] for node in nodes: ui_nodes.append(self._parse_xml_node(node)) # depends on [control=['for'], data=['node']] return ui_nodes
def delete_server_cert(cert_name, region=None, key=None, keyid=None, profile=None): ''' Deletes a certificate from Amazon. .. versionadded:: 2015.8.0 CLI Example: .. code-block:: bash salt myminion boto_iam.delete_server_cert mycert_name ''' conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) try: return conn.delete_server_cert(cert_name) except boto.exception.BotoServerError as e: log.debug(e) log.error('Failed to delete certificate %s.', cert_name) return False
def function[delete_server_cert, parameter[cert_name, region, key, keyid, profile]]: constant[ Deletes a certificate from Amazon. .. versionadded:: 2015.8.0 CLI Example: .. code-block:: bash salt myminion boto_iam.delete_server_cert mycert_name ] variable[conn] assign[=] call[name[_get_conn], parameter[]] <ast.Try object at 0x7da1b21e9510>
keyword[def] identifier[delete_server_cert] ( identifier[cert_name] , identifier[region] = keyword[None] , identifier[key] = keyword[None] , identifier[keyid] = keyword[None] , identifier[profile] = keyword[None] ): literal[string] identifier[conn] = identifier[_get_conn] ( identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] ) keyword[try] : keyword[return] identifier[conn] . identifier[delete_server_cert] ( identifier[cert_name] ) keyword[except] identifier[boto] . identifier[exception] . identifier[BotoServerError] keyword[as] identifier[e] : identifier[log] . identifier[debug] ( identifier[e] ) identifier[log] . identifier[error] ( literal[string] , identifier[cert_name] ) keyword[return] keyword[False]
def delete_server_cert(cert_name, region=None, key=None, keyid=None, profile=None): """ Deletes a certificate from Amazon. .. versionadded:: 2015.8.0 CLI Example: .. code-block:: bash salt myminion boto_iam.delete_server_cert mycert_name """ conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) try: return conn.delete_server_cert(cert_name) # depends on [control=['try'], data=[]] except boto.exception.BotoServerError as e: log.debug(e) log.error('Failed to delete certificate %s.', cert_name) return False # depends on [control=['except'], data=['e']]
def show_lbaas_l7rule(self, l7rule, l7policy, **_params): """Fetches information of a certain L7 policy's rule.""" return self.get(self.lbaas_l7rule_path % (l7policy, l7rule), params=_params)
def function[show_lbaas_l7rule, parameter[self, l7rule, l7policy]]: constant[Fetches information of a certain L7 policy's rule.] return[call[name[self].get, parameter[binary_operation[name[self].lbaas_l7rule_path <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18f00e710>, <ast.Name object at 0x7da18f00f130>]]]]]]
keyword[def] identifier[show_lbaas_l7rule] ( identifier[self] , identifier[l7rule] , identifier[l7policy] ,** identifier[_params] ): literal[string] keyword[return] identifier[self] . identifier[get] ( identifier[self] . identifier[lbaas_l7rule_path] %( identifier[l7policy] , identifier[l7rule] ), identifier[params] = identifier[_params] )
def show_lbaas_l7rule(self, l7rule, l7policy, **_params): """Fetches information of a certain L7 policy's rule.""" return self.get(self.lbaas_l7rule_path % (l7policy, l7rule), params=_params)
def shift(txt, indent = ' ', prepend = ''): """Return a list corresponding to the lines of text in the `txt` list indented by `indent`. Prepend instead the string given in `prepend` to the beginning of the first line. Note that if len(prepend) > len(indent), then `prepend` will be truncated (doing better is tricky!). This preserves a special '' entry at the end of `txt` (see `do_para` for the meaning). """ if type(indent) is int: indent = indent * ' ' special_end = txt[-1:] == [''] lines = ''.join(txt).splitlines(True) for i in range(1,len(lines)): if lines[i].strip() or indent.strip(): lines[i] = indent + lines[i] if not lines: return prepend prepend = prepend[:len(indent)] indent = indent[len(prepend):] lines[0] = prepend + indent + lines[0] ret = [''.join(lines)] if special_end: ret.append('') return ret
def function[shift, parameter[txt, indent, prepend]]: constant[Return a list corresponding to the lines of text in the `txt` list indented by `indent`. Prepend instead the string given in `prepend` to the beginning of the first line. Note that if len(prepend) > len(indent), then `prepend` will be truncated (doing better is tricky!). This preserves a special '' entry at the end of `txt` (see `do_para` for the meaning). ] if compare[call[name[type], parameter[name[indent]]] is name[int]] begin[:] variable[indent] assign[=] binary_operation[name[indent] * constant[ ]] variable[special_end] assign[=] compare[call[name[txt]][<ast.Slice object at 0x7da1b1686ec0>] equal[==] list[[<ast.Constant object at 0x7da1b1686050>]]] variable[lines] assign[=] call[call[constant[].join, parameter[name[txt]]].splitlines, parameter[constant[True]]] for taget[name[i]] in starred[call[name[range], parameter[constant[1], call[name[len], parameter[name[lines]]]]]] begin[:] if <ast.BoolOp object at 0x7da1b16841c0> begin[:] call[name[lines]][name[i]] assign[=] binary_operation[name[indent] + call[name[lines]][name[i]]] if <ast.UnaryOp object at 0x7da1b16b02e0> begin[:] return[name[prepend]] variable[prepend] assign[=] call[name[prepend]][<ast.Slice object at 0x7da1b16b0d30>] variable[indent] assign[=] call[name[indent]][<ast.Slice object at 0x7da1b16b0760>] call[name[lines]][constant[0]] assign[=] binary_operation[binary_operation[name[prepend] + name[indent]] + call[name[lines]][constant[0]]] variable[ret] assign[=] list[[<ast.Call object at 0x7da1b17279a0>]] if name[special_end] begin[:] call[name[ret].append, parameter[constant[]]] return[name[ret]]
keyword[def] identifier[shift] ( identifier[txt] , identifier[indent] = literal[string] , identifier[prepend] = literal[string] ): literal[string] keyword[if] identifier[type] ( identifier[indent] ) keyword[is] identifier[int] : identifier[indent] = identifier[indent] * literal[string] identifier[special_end] = identifier[txt] [- literal[int] :]==[ literal[string] ] identifier[lines] = literal[string] . identifier[join] ( identifier[txt] ). identifier[splitlines] ( keyword[True] ) keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[lines] )): keyword[if] identifier[lines] [ identifier[i] ]. identifier[strip] () keyword[or] identifier[indent] . identifier[strip] (): identifier[lines] [ identifier[i] ]= identifier[indent] + identifier[lines] [ identifier[i] ] keyword[if] keyword[not] identifier[lines] : keyword[return] identifier[prepend] identifier[prepend] = identifier[prepend] [: identifier[len] ( identifier[indent] )] identifier[indent] = identifier[indent] [ identifier[len] ( identifier[prepend] ):] identifier[lines] [ literal[int] ]= identifier[prepend] + identifier[indent] + identifier[lines] [ literal[int] ] identifier[ret] =[ literal[string] . identifier[join] ( identifier[lines] )] keyword[if] identifier[special_end] : identifier[ret] . identifier[append] ( literal[string] ) keyword[return] identifier[ret]
def shift(txt, indent=' ', prepend=''): """Return a list corresponding to the lines of text in the `txt` list indented by `indent`. Prepend instead the string given in `prepend` to the beginning of the first line. Note that if len(prepend) > len(indent), then `prepend` will be truncated (doing better is tricky!). This preserves a special '' entry at the end of `txt` (see `do_para` for the meaning). """ if type(indent) is int: indent = indent * ' ' # depends on [control=['if'], data=[]] special_end = txt[-1:] == [''] lines = ''.join(txt).splitlines(True) for i in range(1, len(lines)): if lines[i].strip() or indent.strip(): lines[i] = indent + lines[i] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] if not lines: return prepend # depends on [control=['if'], data=[]] prepend = prepend[:len(indent)] indent = indent[len(prepend):] lines[0] = prepend + indent + lines[0] ret = [''.join(lines)] if special_end: ret.append('') # depends on [control=['if'], data=[]] return ret
def get_idxs(exprs): """ Finds sympy.tensor.indexed.Idx instances and returns them. """ idxs = set() for expr in (exprs): for i in expr.find(sympy.Idx): idxs.add(i) return sorted(idxs, key=str)
def function[get_idxs, parameter[exprs]]: constant[ Finds sympy.tensor.indexed.Idx instances and returns them. ] variable[idxs] assign[=] call[name[set], parameter[]] for taget[name[expr]] in starred[name[exprs]] begin[:] for taget[name[i]] in starred[call[name[expr].find, parameter[name[sympy].Idx]]] begin[:] call[name[idxs].add, parameter[name[i]]] return[call[name[sorted], parameter[name[idxs]]]]
keyword[def] identifier[get_idxs] ( identifier[exprs] ): literal[string] identifier[idxs] = identifier[set] () keyword[for] identifier[expr] keyword[in] ( identifier[exprs] ): keyword[for] identifier[i] keyword[in] identifier[expr] . identifier[find] ( identifier[sympy] . identifier[Idx] ): identifier[idxs] . identifier[add] ( identifier[i] ) keyword[return] identifier[sorted] ( identifier[idxs] , identifier[key] = identifier[str] )
def get_idxs(exprs): """ Finds sympy.tensor.indexed.Idx instances and returns them. """ idxs = set() for expr in exprs: for i in expr.find(sympy.Idx): idxs.add(i) # depends on [control=['for'], data=['i']] # depends on [control=['for'], data=['expr']] return sorted(idxs, key=str)
def uniquify_list(L): """Same order unique list using only a list compression.""" return [e for i, e in enumerate(L) if L.index(e) == i]
def function[uniquify_list, parameter[L]]: constant[Same order unique list using only a list compression.] return[<ast.ListComp object at 0x7da20c6aba00>]
keyword[def] identifier[uniquify_list] ( identifier[L] ): literal[string] keyword[return] [ identifier[e] keyword[for] identifier[i] , identifier[e] keyword[in] identifier[enumerate] ( identifier[L] ) keyword[if] identifier[L] . identifier[index] ( identifier[e] )== identifier[i] ]
def uniquify_list(L): """Same order unique list using only a list compression.""" return [e for (i, e) in enumerate(L) if L.index(e) == i]
def redistribute_threads(blockdimx, blockdimy, blockdimz, dimx, dimy, dimz): """ Redistribute threads from the Z dimension towards the X dimension. Also clamp number of threads to the problem dimension size, if necessary """ # Shift threads from the z dimension # into the y dimension while blockdimz > dimz: tmp = blockdimz // 2 if tmp < dimz: break blockdimy *= 2 blockdimz = tmp # Shift threads from the y dimension # into the x dimension while blockdimy > dimy: tmp = blockdimy // 2 if tmp < dimy: break blockdimx *= 2 blockdimy = tmp # Clamp the block dimensions # if necessary if dimx < blockdimx: blockdimx = dimx if dimy < blockdimy: blockdimy = dimy if dimz < blockdimz: blockdimz = dimz return blockdimx, blockdimy, blockdimz
def function[redistribute_threads, parameter[blockdimx, blockdimy, blockdimz, dimx, dimy, dimz]]: constant[ Redistribute threads from the Z dimension towards the X dimension. Also clamp number of threads to the problem dimension size, if necessary ] while compare[name[blockdimz] greater[>] name[dimz]] begin[:] variable[tmp] assign[=] binary_operation[name[blockdimz] <ast.FloorDiv object at 0x7da2590d6bc0> constant[2]] if compare[name[tmp] less[<] name[dimz]] begin[:] break <ast.AugAssign object at 0x7da1b0f2c850> variable[blockdimz] assign[=] name[tmp] while compare[name[blockdimy] greater[>] name[dimy]] begin[:] variable[tmp] assign[=] binary_operation[name[blockdimy] <ast.FloorDiv object at 0x7da2590d6bc0> constant[2]] if compare[name[tmp] less[<] name[dimy]] begin[:] break <ast.AugAssign object at 0x7da1b0f2e650> variable[blockdimy] assign[=] name[tmp] if compare[name[dimx] less[<] name[blockdimx]] begin[:] variable[blockdimx] assign[=] name[dimx] if compare[name[dimy] less[<] name[blockdimy]] begin[:] variable[blockdimy] assign[=] name[dimy] if compare[name[dimz] less[<] name[blockdimz]] begin[:] variable[blockdimz] assign[=] name[dimz] return[tuple[[<ast.Name object at 0x7da1b0f2c0a0>, <ast.Name object at 0x7da1b0f2c280>, <ast.Name object at 0x7da1b0f2ca60>]]]
keyword[def] identifier[redistribute_threads] ( identifier[blockdimx] , identifier[blockdimy] , identifier[blockdimz] , identifier[dimx] , identifier[dimy] , identifier[dimz] ): literal[string] keyword[while] identifier[blockdimz] > identifier[dimz] : identifier[tmp] = identifier[blockdimz] // literal[int] keyword[if] identifier[tmp] < identifier[dimz] : keyword[break] identifier[blockdimy] *= literal[int] identifier[blockdimz] = identifier[tmp] keyword[while] identifier[blockdimy] > identifier[dimy] : identifier[tmp] = identifier[blockdimy] // literal[int] keyword[if] identifier[tmp] < identifier[dimy] : keyword[break] identifier[blockdimx] *= literal[int] identifier[blockdimy] = identifier[tmp] keyword[if] identifier[dimx] < identifier[blockdimx] : identifier[blockdimx] = identifier[dimx] keyword[if] identifier[dimy] < identifier[blockdimy] : identifier[blockdimy] = identifier[dimy] keyword[if] identifier[dimz] < identifier[blockdimz] : identifier[blockdimz] = identifier[dimz] keyword[return] identifier[blockdimx] , identifier[blockdimy] , identifier[blockdimz]
def redistribute_threads(blockdimx, blockdimy, blockdimz, dimx, dimy, dimz): """ Redistribute threads from the Z dimension towards the X dimension. Also clamp number of threads to the problem dimension size, if necessary """ # Shift threads from the z dimension # into the y dimension while blockdimz > dimz: tmp = blockdimz // 2 if tmp < dimz: break # depends on [control=['if'], data=[]] blockdimy *= 2 blockdimz = tmp # depends on [control=['while'], data=['blockdimz', 'dimz']] # Shift threads from the y dimension # into the x dimension while blockdimy > dimy: tmp = blockdimy // 2 if tmp < dimy: break # depends on [control=['if'], data=[]] blockdimx *= 2 blockdimy = tmp # depends on [control=['while'], data=['blockdimy', 'dimy']] # Clamp the block dimensions # if necessary if dimx < blockdimx: blockdimx = dimx # depends on [control=['if'], data=['dimx', 'blockdimx']] if dimy < blockdimy: blockdimy = dimy # depends on [control=['if'], data=['dimy', 'blockdimy']] if dimz < blockdimz: blockdimz = dimz # depends on [control=['if'], data=['dimz', 'blockdimz']] return (blockdimx, blockdimy, blockdimz)
def GetConfiguredUsers(self): """Retrieve the list of configured Google user accounts. Returns: list, the username strings of users congfigured by Google. """ if os.path.exists(self.google_users_file): users = open(self.google_users_file).readlines() else: users = [] return [user.strip() for user in users]
def function[GetConfiguredUsers, parameter[self]]: constant[Retrieve the list of configured Google user accounts. Returns: list, the username strings of users congfigured by Google. ] if call[name[os].path.exists, parameter[name[self].google_users_file]] begin[:] variable[users] assign[=] call[call[name[open], parameter[name[self].google_users_file]].readlines, parameter[]] return[<ast.ListComp object at 0x7da204960760>]
keyword[def] identifier[GetConfiguredUsers] ( identifier[self] ): literal[string] keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[self] . identifier[google_users_file] ): identifier[users] = identifier[open] ( identifier[self] . identifier[google_users_file] ). identifier[readlines] () keyword[else] : identifier[users] =[] keyword[return] [ identifier[user] . identifier[strip] () keyword[for] identifier[user] keyword[in] identifier[users] ]
def GetConfiguredUsers(self): """Retrieve the list of configured Google user accounts. Returns: list, the username strings of users congfigured by Google. """ if os.path.exists(self.google_users_file): users = open(self.google_users_file).readlines() # depends on [control=['if'], data=[]] else: users = [] return [user.strip() for user in users]
def to_python(obj, in_dict, str_keys=None, date_keys=None, int_keys=None, object_map=None, bool_keys=None, dict_keys=None, **kwargs): """Extends a given object for API Consumption. :param obj: Object to extend. :param in_dict: Dict to extract data from. :param string_keys: List of in_dict keys that will be extracted as strings. :param date_keys: List of in_dict keys that will be extrad as datetimes. :param object_map: Dict of {key, obj} map, for nested object results. """ d = dict() if str_keys: for in_key in str_keys: d[in_key] = in_dict.get(in_key) if date_keys: for in_key in date_keys: in_date = in_dict.get(in_key) try: out_date = parse_datetime(in_date) except TypeError as e: raise e out_date = None d[in_key] = out_date if int_keys: for in_key in int_keys: if (in_dict is not None) and (in_dict.get(in_key) is not None): d[in_key] = int(in_dict.get(in_key)) if bool_keys: for in_key in bool_keys: if in_dict.get(in_key) is not None: d[in_key] = bool(in_dict.get(in_key)) if dict_keys: for in_key in dict_keys: if in_dict.get(in_key) is not None: d[in_key] = dict(in_dict.get(in_key)) if object_map: for (k, v) in object_map.items(): if in_dict.get(k): d[k] = v.new_from_dict(in_dict.get(k)) obj.__dict__.update(d) obj.__dict__.update(kwargs) # Save the dictionary, for write comparisons. # obj._cache = d # obj.__cache = in_dict return obj
def function[to_python, parameter[obj, in_dict, str_keys, date_keys, int_keys, object_map, bool_keys, dict_keys]]: constant[Extends a given object for API Consumption. :param obj: Object to extend. :param in_dict: Dict to extract data from. :param string_keys: List of in_dict keys that will be extracted as strings. :param date_keys: List of in_dict keys that will be extrad as datetimes. :param object_map: Dict of {key, obj} map, for nested object results. ] variable[d] assign[=] call[name[dict], parameter[]] if name[str_keys] begin[:] for taget[name[in_key]] in starred[name[str_keys]] begin[:] call[name[d]][name[in_key]] assign[=] call[name[in_dict].get, parameter[name[in_key]]] if name[date_keys] begin[:] for taget[name[in_key]] in starred[name[date_keys]] begin[:] variable[in_date] assign[=] call[name[in_dict].get, parameter[name[in_key]]] <ast.Try object at 0x7da18fe91480> call[name[d]][name[in_key]] assign[=] name[out_date] if name[int_keys] begin[:] for taget[name[in_key]] in starred[name[int_keys]] begin[:] if <ast.BoolOp object at 0x7da18fe93280> begin[:] call[name[d]][name[in_key]] assign[=] call[name[int], parameter[call[name[in_dict].get, parameter[name[in_key]]]]] if name[bool_keys] begin[:] for taget[name[in_key]] in starred[name[bool_keys]] begin[:] if compare[call[name[in_dict].get, parameter[name[in_key]]] is_not constant[None]] begin[:] call[name[d]][name[in_key]] assign[=] call[name[bool], parameter[call[name[in_dict].get, parameter[name[in_key]]]]] if name[dict_keys] begin[:] for taget[name[in_key]] in starred[name[dict_keys]] begin[:] if compare[call[name[in_dict].get, parameter[name[in_key]]] is_not constant[None]] begin[:] call[name[d]][name[in_key]] assign[=] call[name[dict], parameter[call[name[in_dict].get, parameter[name[in_key]]]]] if name[object_map] begin[:] for taget[tuple[[<ast.Name object at 0x7da18fe91cf0>, <ast.Name object at 0x7da18fe92f50>]]] in starred[call[name[object_map].items, parameter[]]] begin[:] if call[name[in_dict].get, parameter[name[k]]] begin[:] call[name[d]][name[k]] assign[=] call[name[v].new_from_dict, parameter[call[name[in_dict].get, parameter[name[k]]]]] call[name[obj].__dict__.update, parameter[name[d]]] call[name[obj].__dict__.update, parameter[name[kwargs]]] return[name[obj]]
keyword[def] identifier[to_python] ( identifier[obj] , identifier[in_dict] , identifier[str_keys] = keyword[None] , identifier[date_keys] = keyword[None] , identifier[int_keys] = keyword[None] , identifier[object_map] = keyword[None] , identifier[bool_keys] = keyword[None] , identifier[dict_keys] = keyword[None] , ** identifier[kwargs] ): literal[string] identifier[d] = identifier[dict] () keyword[if] identifier[str_keys] : keyword[for] identifier[in_key] keyword[in] identifier[str_keys] : identifier[d] [ identifier[in_key] ]= identifier[in_dict] . identifier[get] ( identifier[in_key] ) keyword[if] identifier[date_keys] : keyword[for] identifier[in_key] keyword[in] identifier[date_keys] : identifier[in_date] = identifier[in_dict] . identifier[get] ( identifier[in_key] ) keyword[try] : identifier[out_date] = identifier[parse_datetime] ( identifier[in_date] ) keyword[except] identifier[TypeError] keyword[as] identifier[e] : keyword[raise] identifier[e] identifier[out_date] = keyword[None] identifier[d] [ identifier[in_key] ]= identifier[out_date] keyword[if] identifier[int_keys] : keyword[for] identifier[in_key] keyword[in] identifier[int_keys] : keyword[if] ( identifier[in_dict] keyword[is] keyword[not] keyword[None] ) keyword[and] ( identifier[in_dict] . identifier[get] ( identifier[in_key] ) keyword[is] keyword[not] keyword[None] ): identifier[d] [ identifier[in_key] ]= identifier[int] ( identifier[in_dict] . identifier[get] ( identifier[in_key] )) keyword[if] identifier[bool_keys] : keyword[for] identifier[in_key] keyword[in] identifier[bool_keys] : keyword[if] identifier[in_dict] . identifier[get] ( identifier[in_key] ) keyword[is] keyword[not] keyword[None] : identifier[d] [ identifier[in_key] ]= identifier[bool] ( identifier[in_dict] . identifier[get] ( identifier[in_key] )) keyword[if] identifier[dict_keys] : keyword[for] identifier[in_key] keyword[in] identifier[dict_keys] : keyword[if] identifier[in_dict] . identifier[get] ( identifier[in_key] ) keyword[is] keyword[not] keyword[None] : identifier[d] [ identifier[in_key] ]= identifier[dict] ( identifier[in_dict] . identifier[get] ( identifier[in_key] )) keyword[if] identifier[object_map] : keyword[for] ( identifier[k] , identifier[v] ) keyword[in] identifier[object_map] . identifier[items] (): keyword[if] identifier[in_dict] . identifier[get] ( identifier[k] ): identifier[d] [ identifier[k] ]= identifier[v] . identifier[new_from_dict] ( identifier[in_dict] . identifier[get] ( identifier[k] )) identifier[obj] . identifier[__dict__] . identifier[update] ( identifier[d] ) identifier[obj] . identifier[__dict__] . identifier[update] ( identifier[kwargs] ) keyword[return] identifier[obj]
def to_python(obj, in_dict, str_keys=None, date_keys=None, int_keys=None, object_map=None, bool_keys=None, dict_keys=None, **kwargs): """Extends a given object for API Consumption. :param obj: Object to extend. :param in_dict: Dict to extract data from. :param string_keys: List of in_dict keys that will be extracted as strings. :param date_keys: List of in_dict keys that will be extrad as datetimes. :param object_map: Dict of {key, obj} map, for nested object results. """ d = dict() if str_keys: for in_key in str_keys: d[in_key] = in_dict.get(in_key) # depends on [control=['for'], data=['in_key']] # depends on [control=['if'], data=[]] if date_keys: for in_key in date_keys: in_date = in_dict.get(in_key) try: out_date = parse_datetime(in_date) # depends on [control=['try'], data=[]] except TypeError as e: raise e out_date = None # depends on [control=['except'], data=['e']] d[in_key] = out_date # depends on [control=['for'], data=['in_key']] # depends on [control=['if'], data=[]] if int_keys: for in_key in int_keys: if in_dict is not None and in_dict.get(in_key) is not None: d[in_key] = int(in_dict.get(in_key)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['in_key']] # depends on [control=['if'], data=[]] if bool_keys: for in_key in bool_keys: if in_dict.get(in_key) is not None: d[in_key] = bool(in_dict.get(in_key)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['in_key']] # depends on [control=['if'], data=[]] if dict_keys: for in_key in dict_keys: if in_dict.get(in_key) is not None: d[in_key] = dict(in_dict.get(in_key)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['in_key']] # depends on [control=['if'], data=[]] if object_map: for (k, v) in object_map.items(): if in_dict.get(k): d[k] = v.new_from_dict(in_dict.get(k)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] obj.__dict__.update(d) obj.__dict__.update(kwargs) # Save the dictionary, for write comparisons. # obj._cache = d # obj.__cache = in_dict return obj
def close(self): """ closes render window """ # must close out axes marker if hasattr(self, 'axes_widget'): del self.axes_widget # reset scalar bar stuff self._scalar_bar_slots = set(range(MAX_N_COLOR_BARS)) self._scalar_bar_slot_lookup = {} self._scalar_bar_ranges = {} self._scalar_bar_mappers = {} if hasattr(self, 'ren_win'): self.ren_win.Finalize() del self.ren_win if hasattr(self, '_style'): del self._style if hasattr(self, 'iren'): self.iren.RemoveAllObservers() del self.iren if hasattr(self, 'textActor'): del self.textActor # end movie if hasattr(self, 'mwriter'): try: self.mwriter.close() except BaseException: pass
def function[close, parameter[self]]: constant[ closes render window ] if call[name[hasattr], parameter[name[self], constant[axes_widget]]] begin[:] <ast.Delete object at 0x7da20e961f60> name[self]._scalar_bar_slots assign[=] call[name[set], parameter[call[name[range], parameter[name[MAX_N_COLOR_BARS]]]]] name[self]._scalar_bar_slot_lookup assign[=] dictionary[[], []] name[self]._scalar_bar_ranges assign[=] dictionary[[], []] name[self]._scalar_bar_mappers assign[=] dictionary[[], []] if call[name[hasattr], parameter[name[self], constant[ren_win]]] begin[:] call[name[self].ren_win.Finalize, parameter[]] <ast.Delete object at 0x7da18f00c040> if call[name[hasattr], parameter[name[self], constant[_style]]] begin[:] <ast.Delete object at 0x7da18f00c730> if call[name[hasattr], parameter[name[self], constant[iren]]] begin[:] call[name[self].iren.RemoveAllObservers, parameter[]] <ast.Delete object at 0x7da18f00c5e0> if call[name[hasattr], parameter[name[self], constant[textActor]]] begin[:] <ast.Delete object at 0x7da18f00e530> if call[name[hasattr], parameter[name[self], constant[mwriter]]] begin[:] <ast.Try object at 0x7da18f00ee00>
keyword[def] identifier[close] ( identifier[self] ): literal[string] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): keyword[del] identifier[self] . identifier[axes_widget] identifier[self] . identifier[_scalar_bar_slots] = identifier[set] ( identifier[range] ( identifier[MAX_N_COLOR_BARS] )) identifier[self] . identifier[_scalar_bar_slot_lookup] ={} identifier[self] . identifier[_scalar_bar_ranges] ={} identifier[self] . identifier[_scalar_bar_mappers] ={} keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[ren_win] . identifier[Finalize] () keyword[del] identifier[self] . identifier[ren_win] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): keyword[del] identifier[self] . identifier[_style] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[iren] . identifier[RemoveAllObservers] () keyword[del] identifier[self] . identifier[iren] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): keyword[del] identifier[self] . identifier[textActor] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): keyword[try] : identifier[self] . identifier[mwriter] . identifier[close] () keyword[except] identifier[BaseException] : keyword[pass]
def close(self): """ closes render window """ # must close out axes marker if hasattr(self, 'axes_widget'): del self.axes_widget # depends on [control=['if'], data=[]] # reset scalar bar stuff self._scalar_bar_slots = set(range(MAX_N_COLOR_BARS)) self._scalar_bar_slot_lookup = {} self._scalar_bar_ranges = {} self._scalar_bar_mappers = {} if hasattr(self, 'ren_win'): self.ren_win.Finalize() del self.ren_win # depends on [control=['if'], data=[]] if hasattr(self, '_style'): del self._style # depends on [control=['if'], data=[]] if hasattr(self, 'iren'): self.iren.RemoveAllObservers() del self.iren # depends on [control=['if'], data=[]] if hasattr(self, 'textActor'): del self.textActor # depends on [control=['if'], data=[]] # end movie if hasattr(self, 'mwriter'): try: self.mwriter.close() # depends on [control=['try'], data=[]] except BaseException: pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
def trace(self, s, active=None, verbose=False): """ Rewrite string *s* like `apply()`, but yield each rewrite step. Args: s (str): the input string to process active (optional): a collection of external module names that may be applied if called verbose (bool, optional): if `False`, only output rules or groups that matched the input Yields: a :class:`REPPStep` object for each intermediate rewrite step, and finally a :class:`REPPResult` object after the last rewrite """ if active is None: active = self.active return self.group.trace(s, active=active, verbose=verbose)
def function[trace, parameter[self, s, active, verbose]]: constant[ Rewrite string *s* like `apply()`, but yield each rewrite step. Args: s (str): the input string to process active (optional): a collection of external module names that may be applied if called verbose (bool, optional): if `False`, only output rules or groups that matched the input Yields: a :class:`REPPStep` object for each intermediate rewrite step, and finally a :class:`REPPResult` object after the last rewrite ] if compare[name[active] is constant[None]] begin[:] variable[active] assign[=] name[self].active return[call[name[self].group.trace, parameter[name[s]]]]
keyword[def] identifier[trace] ( identifier[self] , identifier[s] , identifier[active] = keyword[None] , identifier[verbose] = keyword[False] ): literal[string] keyword[if] identifier[active] keyword[is] keyword[None] : identifier[active] = identifier[self] . identifier[active] keyword[return] identifier[self] . identifier[group] . identifier[trace] ( identifier[s] , identifier[active] = identifier[active] , identifier[verbose] = identifier[verbose] )
def trace(self, s, active=None, verbose=False): """ Rewrite string *s* like `apply()`, but yield each rewrite step. Args: s (str): the input string to process active (optional): a collection of external module names that may be applied if called verbose (bool, optional): if `False`, only output rules or groups that matched the input Yields: a :class:`REPPStep` object for each intermediate rewrite step, and finally a :class:`REPPResult` object after the last rewrite """ if active is None: active = self.active # depends on [control=['if'], data=['active']] return self.group.trace(s, active=active, verbose=verbose)
def visit_augassign(self, node, parent): """visit a AugAssign node by returning a fresh instance of it""" newnode = nodes.AugAssign( self._bin_op_classes[type(node.op)] + "=", node.lineno, node.col_offset, parent, ) newnode.postinit( self.visit(node.target, newnode), self.visit(node.value, newnode) ) return newnode
def function[visit_augassign, parameter[self, node, parent]]: constant[visit a AugAssign node by returning a fresh instance of it] variable[newnode] assign[=] call[name[nodes].AugAssign, parameter[binary_operation[call[name[self]._bin_op_classes][call[name[type], parameter[name[node].op]]] + constant[=]], name[node].lineno, name[node].col_offset, name[parent]]] call[name[newnode].postinit, parameter[call[name[self].visit, parameter[name[node].target, name[newnode]]], call[name[self].visit, parameter[name[node].value, name[newnode]]]]] return[name[newnode]]
keyword[def] identifier[visit_augassign] ( identifier[self] , identifier[node] , identifier[parent] ): literal[string] identifier[newnode] = identifier[nodes] . identifier[AugAssign] ( identifier[self] . identifier[_bin_op_classes] [ identifier[type] ( identifier[node] . identifier[op] )]+ literal[string] , identifier[node] . identifier[lineno] , identifier[node] . identifier[col_offset] , identifier[parent] , ) identifier[newnode] . identifier[postinit] ( identifier[self] . identifier[visit] ( identifier[node] . identifier[target] , identifier[newnode] ), identifier[self] . identifier[visit] ( identifier[node] . identifier[value] , identifier[newnode] ) ) keyword[return] identifier[newnode]
def visit_augassign(self, node, parent): """visit a AugAssign node by returning a fresh instance of it""" newnode = nodes.AugAssign(self._bin_op_classes[type(node.op)] + '=', node.lineno, node.col_offset, parent) newnode.postinit(self.visit(node.target, newnode), self.visit(node.value, newnode)) return newnode
def buttons(self, master): """Adds 'OK' and 'Cancel' buttons to standard button frame. Override if need for different configuration. """ subframe = tk.Frame(master) subframe.pack(side=tk.RIGHT) ttk.Button( subframe, text="OK", width=10, command=self.ok, default=tk.ACTIVE ).pack(side=tk.LEFT, padx=5, pady=5) ttk.Button( subframe, text="Cancel", width=10, command=self.cancel, default=tk.ACTIVE ).pack(side=tk.LEFT, padx=5, pady=5) self.bind("<Return>", self.ok) self.bind("<Escape>", self.cancel)
def function[buttons, parameter[self, master]]: constant[Adds 'OK' and 'Cancel' buttons to standard button frame. Override if need for different configuration. ] variable[subframe] assign[=] call[name[tk].Frame, parameter[name[master]]] call[name[subframe].pack, parameter[]] call[call[name[ttk].Button, parameter[name[subframe]]].pack, parameter[]] call[call[name[ttk].Button, parameter[name[subframe]]].pack, parameter[]] call[name[self].bind, parameter[constant[<Return>], name[self].ok]] call[name[self].bind, parameter[constant[<Escape>], name[self].cancel]]
keyword[def] identifier[buttons] ( identifier[self] , identifier[master] ): literal[string] identifier[subframe] = identifier[tk] . identifier[Frame] ( identifier[master] ) identifier[subframe] . identifier[pack] ( identifier[side] = identifier[tk] . identifier[RIGHT] ) identifier[ttk] . identifier[Button] ( identifier[subframe] , identifier[text] = literal[string] , identifier[width] = literal[int] , identifier[command] = identifier[self] . identifier[ok] , identifier[default] = identifier[tk] . identifier[ACTIVE] ). identifier[pack] ( identifier[side] = identifier[tk] . identifier[LEFT] , identifier[padx] = literal[int] , identifier[pady] = literal[int] ) identifier[ttk] . identifier[Button] ( identifier[subframe] , identifier[text] = literal[string] , identifier[width] = literal[int] , identifier[command] = identifier[self] . identifier[cancel] , identifier[default] = identifier[tk] . identifier[ACTIVE] ). identifier[pack] ( identifier[side] = identifier[tk] . identifier[LEFT] , identifier[padx] = literal[int] , identifier[pady] = literal[int] ) identifier[self] . identifier[bind] ( literal[string] , identifier[self] . identifier[ok] ) identifier[self] . identifier[bind] ( literal[string] , identifier[self] . identifier[cancel] )
def buttons(self, master): """Adds 'OK' and 'Cancel' buttons to standard button frame. Override if need for different configuration. """ subframe = tk.Frame(master) subframe.pack(side=tk.RIGHT) ttk.Button(subframe, text='OK', width=10, command=self.ok, default=tk.ACTIVE).pack(side=tk.LEFT, padx=5, pady=5) ttk.Button(subframe, text='Cancel', width=10, command=self.cancel, default=tk.ACTIVE).pack(side=tk.LEFT, padx=5, pady=5) self.bind('<Return>', self.ok) self.bind('<Escape>', self.cancel)
def on_canvas_slave__electrode_selected(self, slave, data): ''' .. versionchanged:: 0.11 Clear any temporary routes (drawn while mouse is down) from routes list. .. versionchanged:: 0.11.3 Clear temporary routes by setting ``df_routes`` property of :attr:`canvas_slave`. ''' if self.plugin is None: return # XXX Negative `route_i` corresponds to temporary route being # drawn. Since electrode selection terminates route drawing, clear any # rows corresponding to negative `route_i` values from the routes # table. slave.df_routes = slave.df_routes.loc[slave.df_routes.route_i >= 0].copy() state = self.canvas_slave.electrode_states.get(data['electrode_id'], 0) self.plugin.execute_async('microdrop.electrode_controller_plugin', 'set_electrode_states', electrode_states=pd .Series([not state], index=[data['electrode_id']]))
def function[on_canvas_slave__electrode_selected, parameter[self, slave, data]]: constant[ .. versionchanged:: 0.11 Clear any temporary routes (drawn while mouse is down) from routes list. .. versionchanged:: 0.11.3 Clear temporary routes by setting ``df_routes`` property of :attr:`canvas_slave`. ] if compare[name[self].plugin is constant[None]] begin[:] return[None] name[slave].df_routes assign[=] call[call[name[slave].df_routes.loc][compare[name[slave].df_routes.route_i greater_or_equal[>=] constant[0]]].copy, parameter[]] variable[state] assign[=] call[name[self].canvas_slave.electrode_states.get, parameter[call[name[data]][constant[electrode_id]], constant[0]]] call[name[self].plugin.execute_async, parameter[constant[microdrop.electrode_controller_plugin], constant[set_electrode_states]]]
keyword[def] identifier[on_canvas_slave__electrode_selected] ( identifier[self] , identifier[slave] , identifier[data] ): literal[string] keyword[if] identifier[self] . identifier[plugin] keyword[is] keyword[None] : keyword[return] identifier[slave] . identifier[df_routes] = identifier[slave] . identifier[df_routes] . identifier[loc] [ identifier[slave] . identifier[df_routes] . identifier[route_i] >= literal[int] ]. identifier[copy] () identifier[state] = identifier[self] . identifier[canvas_slave] . identifier[electrode_states] . identifier[get] ( identifier[data] [ literal[string] ], literal[int] ) identifier[self] . identifier[plugin] . identifier[execute_async] ( literal[string] , literal[string] , identifier[electrode_states] = identifier[pd] . identifier[Series] ([ keyword[not] identifier[state] ], identifier[index] =[ identifier[data] [ literal[string] ]]))
def on_canvas_slave__electrode_selected(self, slave, data): """ .. versionchanged:: 0.11 Clear any temporary routes (drawn while mouse is down) from routes list. .. versionchanged:: 0.11.3 Clear temporary routes by setting ``df_routes`` property of :attr:`canvas_slave`. """ if self.plugin is None: return # depends on [control=['if'], data=[]] # XXX Negative `route_i` corresponds to temporary route being # drawn. Since electrode selection terminates route drawing, clear any # rows corresponding to negative `route_i` values from the routes # table. slave.df_routes = slave.df_routes.loc[slave.df_routes.route_i >= 0].copy() state = self.canvas_slave.electrode_states.get(data['electrode_id'], 0) self.plugin.execute_async('microdrop.electrode_controller_plugin', 'set_electrode_states', electrode_states=pd.Series([not state], index=[data['electrode_id']]))
def _array_2d_repr(self): """creates a 2D array that has nmax + 1 rows and 2*mmax + 1 columns and provides a representation for the coefficients that makes plotting easier""" sc_array = np.zeros((self.nmax + 1, 2 * self.mmax + 1), dtype=np.complex128) lst = self._reshape_n_vecs() sc_array[0:self.nmax + 1, self.mmax] = lst[0] for m in xrange(1, self.mmax + 1): sc_array[m:self.nmax + 1, self.mmax - m] = lst[2 * m - 1] sc_array[m:self.nmax + 1, self.mmax + m] = lst[2 * m] return sc_array
def function[_array_2d_repr, parameter[self]]: constant[creates a 2D array that has nmax + 1 rows and 2*mmax + 1 columns and provides a representation for the coefficients that makes plotting easier] variable[sc_array] assign[=] call[name[np].zeros, parameter[tuple[[<ast.BinOp object at 0x7da204623b50>, <ast.BinOp object at 0x7da204620e20>]]]] variable[lst] assign[=] call[name[self]._reshape_n_vecs, parameter[]] call[name[sc_array]][tuple[[<ast.Slice object at 0x7da1b0bb0130>, <ast.Attribute object at 0x7da1b0bb0e80>]]] assign[=] call[name[lst]][constant[0]] for taget[name[m]] in starred[call[name[xrange], parameter[constant[1], binary_operation[name[self].mmax + constant[1]]]]] begin[:] call[name[sc_array]][tuple[[<ast.Slice object at 0x7da204566320>, <ast.BinOp object at 0x7da204565870>]]] assign[=] call[name[lst]][binary_operation[binary_operation[constant[2] * name[m]] - constant[1]]] call[name[sc_array]][tuple[[<ast.Slice object at 0x7da204565ba0>, <ast.BinOp object at 0x7da204566590>]]] assign[=] call[name[lst]][binary_operation[constant[2] * name[m]]] return[name[sc_array]]
keyword[def] identifier[_array_2d_repr] ( identifier[self] ): literal[string] identifier[sc_array] = identifier[np] . identifier[zeros] (( identifier[self] . identifier[nmax] + literal[int] , literal[int] * identifier[self] . identifier[mmax] + literal[int] ), identifier[dtype] = identifier[np] . identifier[complex128] ) identifier[lst] = identifier[self] . identifier[_reshape_n_vecs] () identifier[sc_array] [ literal[int] : identifier[self] . identifier[nmax] + literal[int] , identifier[self] . identifier[mmax] ]= identifier[lst] [ literal[int] ] keyword[for] identifier[m] keyword[in] identifier[xrange] ( literal[int] , identifier[self] . identifier[mmax] + literal[int] ): identifier[sc_array] [ identifier[m] : identifier[self] . identifier[nmax] + literal[int] , identifier[self] . identifier[mmax] - identifier[m] ]= identifier[lst] [ literal[int] * identifier[m] - literal[int] ] identifier[sc_array] [ identifier[m] : identifier[self] . identifier[nmax] + literal[int] , identifier[self] . identifier[mmax] + identifier[m] ]= identifier[lst] [ literal[int] * identifier[m] ] keyword[return] identifier[sc_array]
def _array_2d_repr(self): """creates a 2D array that has nmax + 1 rows and 2*mmax + 1 columns and provides a representation for the coefficients that makes plotting easier""" sc_array = np.zeros((self.nmax + 1, 2 * self.mmax + 1), dtype=np.complex128) lst = self._reshape_n_vecs() sc_array[0:self.nmax + 1, self.mmax] = lst[0] for m in xrange(1, self.mmax + 1): sc_array[m:self.nmax + 1, self.mmax - m] = lst[2 * m - 1] sc_array[m:self.nmax + 1, self.mmax + m] = lst[2 * m] # depends on [control=['for'], data=['m']] return sc_array
def render_rootURL(self, ctx, data): """ Add the WebSite's root URL as a child of the given tag. """ return ctx.tag[ ixmantissa.ISiteURLGenerator(self.store).rootURL(IRequest(ctx))]
def function[render_rootURL, parameter[self, ctx, data]]: constant[ Add the WebSite's root URL as a child of the given tag. ] return[call[name[ctx].tag][call[call[name[ixmantissa].ISiteURLGenerator, parameter[name[self].store]].rootURL, parameter[call[name[IRequest], parameter[name[ctx]]]]]]]
keyword[def] identifier[render_rootURL] ( identifier[self] , identifier[ctx] , identifier[data] ): literal[string] keyword[return] identifier[ctx] . identifier[tag] [ identifier[ixmantissa] . identifier[ISiteURLGenerator] ( identifier[self] . identifier[store] ). identifier[rootURL] ( identifier[IRequest] ( identifier[ctx] ))]
def render_rootURL(self, ctx, data): """ Add the WebSite's root URL as a child of the given tag. """ return ctx.tag[ixmantissa.ISiteURLGenerator(self.store).rootURL(IRequest(ctx))]
def parse(self, limit=None): """ :param limit: :return: """ if limit is not None: LOG.info("Only parsing first %s rows fo each file", str(limit)) LOG.info("Parsing files...") if self.test_only: self.test_mode = True self._process_diseases(limit) self._process_genes(limit) self._process_genes_kegg2ncbi(limit) self._process_omim2gene(limit) self._process_omim2disease(limit) self._process_kegg_disease2gene(limit) self._process_pathways(limit) self._process_pathway_pubmed(limit) # self._process_pathway_pathway(limit) self._process_pathway_disease(limit) self._process_pathway_ko(limit) self._process_ortholog_classes(limit) # TODO add in when refactoring for #141 # for f in ['hsa_orthologs', 'mmu_orthologs', 'rno_orthologs', # 'dme_orthologs','dre_orthologs','cel_orthologs']: # file = '/'.join((self.rawdir, self.files[f]['file'])) # self._process_orthologs(file, limit) # DONE # LOG.info("Finished parsing") return
def function[parse, parameter[self, limit]]: constant[ :param limit: :return: ] if compare[name[limit] is_not constant[None]] begin[:] call[name[LOG].info, parameter[constant[Only parsing first %s rows fo each file], call[name[str], parameter[name[limit]]]]] call[name[LOG].info, parameter[constant[Parsing files...]]] if name[self].test_only begin[:] name[self].test_mode assign[=] constant[True] call[name[self]._process_diseases, parameter[name[limit]]] call[name[self]._process_genes, parameter[name[limit]]] call[name[self]._process_genes_kegg2ncbi, parameter[name[limit]]] call[name[self]._process_omim2gene, parameter[name[limit]]] call[name[self]._process_omim2disease, parameter[name[limit]]] call[name[self]._process_kegg_disease2gene, parameter[name[limit]]] call[name[self]._process_pathways, parameter[name[limit]]] call[name[self]._process_pathway_pubmed, parameter[name[limit]]] call[name[self]._process_pathway_disease, parameter[name[limit]]] call[name[self]._process_pathway_ko, parameter[name[limit]]] call[name[self]._process_ortholog_classes, parameter[name[limit]]] call[name[LOG].info, parameter[constant[Finished parsing]]] return[None]
keyword[def] identifier[parse] ( identifier[self] , identifier[limit] = keyword[None] ): literal[string] keyword[if] identifier[limit] keyword[is] keyword[not] keyword[None] : identifier[LOG] . identifier[info] ( literal[string] , identifier[str] ( identifier[limit] )) identifier[LOG] . identifier[info] ( literal[string] ) keyword[if] identifier[self] . identifier[test_only] : identifier[self] . identifier[test_mode] = keyword[True] identifier[self] . identifier[_process_diseases] ( identifier[limit] ) identifier[self] . identifier[_process_genes] ( identifier[limit] ) identifier[self] . identifier[_process_genes_kegg2ncbi] ( identifier[limit] ) identifier[self] . identifier[_process_omim2gene] ( identifier[limit] ) identifier[self] . identifier[_process_omim2disease] ( identifier[limit] ) identifier[self] . identifier[_process_kegg_disease2gene] ( identifier[limit] ) identifier[self] . identifier[_process_pathways] ( identifier[limit] ) identifier[self] . identifier[_process_pathway_pubmed] ( identifier[limit] ) identifier[self] . identifier[_process_pathway_disease] ( identifier[limit] ) identifier[self] . identifier[_process_pathway_ko] ( identifier[limit] ) identifier[self] . identifier[_process_ortholog_classes] ( identifier[limit] ) identifier[LOG] . identifier[info] ( literal[string] ) keyword[return]
def parse(self, limit=None): """ :param limit: :return: """ if limit is not None: LOG.info('Only parsing first %s rows fo each file', str(limit)) # depends on [control=['if'], data=['limit']] LOG.info('Parsing files...') if self.test_only: self.test_mode = True # depends on [control=['if'], data=[]] self._process_diseases(limit) self._process_genes(limit) self._process_genes_kegg2ncbi(limit) self._process_omim2gene(limit) self._process_omim2disease(limit) self._process_kegg_disease2gene(limit) self._process_pathways(limit) self._process_pathway_pubmed(limit) # self._process_pathway_pathway(limit) self._process_pathway_disease(limit) self._process_pathway_ko(limit) self._process_ortholog_classes(limit) # TODO add in when refactoring for #141 # for f in ['hsa_orthologs', 'mmu_orthologs', 'rno_orthologs', # 'dme_orthologs','dre_orthologs','cel_orthologs']: # file = '/'.join((self.rawdir, self.files[f]['file'])) # self._process_orthologs(file, limit) # DONE # LOG.info('Finished parsing') return
def set_volume(self, volume): """ Allows to set volume. Should be value between 0..1. Returns the new volume. """ volume = min(max(0, volume), 1) self.logger.info("Receiver:setting volume to %.1f", volume) self.send_message({MESSAGE_TYPE: 'SET_VOLUME', 'volume': {'level': volume}}) return volume
def function[set_volume, parameter[self, volume]]: constant[ Allows to set volume. Should be value between 0..1. Returns the new volume. ] variable[volume] assign[=] call[name[min], parameter[call[name[max], parameter[constant[0], name[volume]]], constant[1]]] call[name[self].logger.info, parameter[constant[Receiver:setting volume to %.1f], name[volume]]] call[name[self].send_message, parameter[dictionary[[<ast.Name object at 0x7da20c7c8310>, <ast.Constant object at 0x7da20c7cbbe0>], [<ast.Constant object at 0x7da20c7cbc40>, <ast.Dict object at 0x7da20c7cbf10>]]]] return[name[volume]]
keyword[def] identifier[set_volume] ( identifier[self] , identifier[volume] ): literal[string] identifier[volume] = identifier[min] ( identifier[max] ( literal[int] , identifier[volume] ), literal[int] ) identifier[self] . identifier[logger] . identifier[info] ( literal[string] , identifier[volume] ) identifier[self] . identifier[send_message] ({ identifier[MESSAGE_TYPE] : literal[string] , literal[string] :{ literal[string] : identifier[volume] }}) keyword[return] identifier[volume]
def set_volume(self, volume): """ Allows to set volume. Should be value between 0..1. Returns the new volume. """ volume = min(max(0, volume), 1) self.logger.info('Receiver:setting volume to %.1f', volume) self.send_message({MESSAGE_TYPE: 'SET_VOLUME', 'volume': {'level': volume}}) return volume
def attach(self, gui): """Attach the view to the GUI.""" super(CorrelogramView, self).attach(gui) self.actions.add(self.toggle_normalization, shortcut='n') self.actions.separator() self.actions.add(self.set_bin, alias='cb') self.actions.add(self.set_window, alias='cw')
def function[attach, parameter[self, gui]]: constant[Attach the view to the GUI.] call[call[name[super], parameter[name[CorrelogramView], name[self]]].attach, parameter[name[gui]]] call[name[self].actions.add, parameter[name[self].toggle_normalization]] call[name[self].actions.separator, parameter[]] call[name[self].actions.add, parameter[name[self].set_bin]] call[name[self].actions.add, parameter[name[self].set_window]]
keyword[def] identifier[attach] ( identifier[self] , identifier[gui] ): literal[string] identifier[super] ( identifier[CorrelogramView] , identifier[self] ). identifier[attach] ( identifier[gui] ) identifier[self] . identifier[actions] . identifier[add] ( identifier[self] . identifier[toggle_normalization] , identifier[shortcut] = literal[string] ) identifier[self] . identifier[actions] . identifier[separator] () identifier[self] . identifier[actions] . identifier[add] ( identifier[self] . identifier[set_bin] , identifier[alias] = literal[string] ) identifier[self] . identifier[actions] . identifier[add] ( identifier[self] . identifier[set_window] , identifier[alias] = literal[string] )
def attach(self, gui): """Attach the view to the GUI.""" super(CorrelogramView, self).attach(gui) self.actions.add(self.toggle_normalization, shortcut='n') self.actions.separator() self.actions.add(self.set_bin, alias='cb') self.actions.add(self.set_window, alias='cw')
def create_request(self, reset_wfs_iterator=False): """Set download requests Create a list of DownloadRequests for all Sentinel-2 acquisitions within request's time interval and acceptable cloud coverage. :param reset_wfs_iterator: When re-running the method this flag is used to reset/keep existing ``wfs_iterator`` (i.e. instance of ``WebFeatureService`` class). If the iterator is not reset you don't have to repeat a service call but tiles and dates will stay the same. :type reset_wfs_iterator: bool """ if reset_wfs_iterator: self.wfs_iterator = None ogc_service = OgcImageService(instance_id=self.instance_id) self.download_list = ogc_service.get_request(self) self.wfs_iterator = ogc_service.get_wfs_iterator()
def function[create_request, parameter[self, reset_wfs_iterator]]: constant[Set download requests Create a list of DownloadRequests for all Sentinel-2 acquisitions within request's time interval and acceptable cloud coverage. :param reset_wfs_iterator: When re-running the method this flag is used to reset/keep existing ``wfs_iterator`` (i.e. instance of ``WebFeatureService`` class). If the iterator is not reset you don't have to repeat a service call but tiles and dates will stay the same. :type reset_wfs_iterator: bool ] if name[reset_wfs_iterator] begin[:] name[self].wfs_iterator assign[=] constant[None] variable[ogc_service] assign[=] call[name[OgcImageService], parameter[]] name[self].download_list assign[=] call[name[ogc_service].get_request, parameter[name[self]]] name[self].wfs_iterator assign[=] call[name[ogc_service].get_wfs_iterator, parameter[]]
keyword[def] identifier[create_request] ( identifier[self] , identifier[reset_wfs_iterator] = keyword[False] ): literal[string] keyword[if] identifier[reset_wfs_iterator] : identifier[self] . identifier[wfs_iterator] = keyword[None] identifier[ogc_service] = identifier[OgcImageService] ( identifier[instance_id] = identifier[self] . identifier[instance_id] ) identifier[self] . identifier[download_list] = identifier[ogc_service] . identifier[get_request] ( identifier[self] ) identifier[self] . identifier[wfs_iterator] = identifier[ogc_service] . identifier[get_wfs_iterator] ()
def create_request(self, reset_wfs_iterator=False): """Set download requests Create a list of DownloadRequests for all Sentinel-2 acquisitions within request's time interval and acceptable cloud coverage. :param reset_wfs_iterator: When re-running the method this flag is used to reset/keep existing ``wfs_iterator`` (i.e. instance of ``WebFeatureService`` class). If the iterator is not reset you don't have to repeat a service call but tiles and dates will stay the same. :type reset_wfs_iterator: bool """ if reset_wfs_iterator: self.wfs_iterator = None # depends on [control=['if'], data=[]] ogc_service = OgcImageService(instance_id=self.instance_id) self.download_list = ogc_service.get_request(self) self.wfs_iterator = ogc_service.get_wfs_iterator()
def validate_aggregation(agg): """Validate an aggregation for use in Vega-Lite. Translate agg to one of the following supported named aggregations: ['mean', 'sum', 'median', 'min', 'max', 'count'] Parameters ---------- agg : string or callable A string Supported reductions are ['mean', 'sum', 'median', 'min', 'max', 'count']. If agg is a numpy function, the return value is the string representation. If agg is unrecognized, raise a ValueError """ if agg is None: return agg supported_aggs = ['mean', 'sum', 'median', 'min', 'max', 'count'] numpy_aggs = {getattr(np, a): a for a in ['mean', 'sum', 'median', 'min', 'max']} builtin_aggs = {min: 'min', max: 'max', sum: 'sum'} agg = numpy_aggs.get(agg, agg) agg = builtin_aggs.get(agg, agg) if agg not in supported_aggs: raise ValueError("Unrecognized Vega-Lite aggregation: {0}".format(agg)) return agg
def function[validate_aggregation, parameter[agg]]: constant[Validate an aggregation for use in Vega-Lite. Translate agg to one of the following supported named aggregations: ['mean', 'sum', 'median', 'min', 'max', 'count'] Parameters ---------- agg : string or callable A string Supported reductions are ['mean', 'sum', 'median', 'min', 'max', 'count']. If agg is a numpy function, the return value is the string representation. If agg is unrecognized, raise a ValueError ] if compare[name[agg] is constant[None]] begin[:] return[name[agg]] variable[supported_aggs] assign[=] list[[<ast.Constant object at 0x7da20c76ed40>, <ast.Constant object at 0x7da20c76d2d0>, <ast.Constant object at 0x7da20c76fbb0>, <ast.Constant object at 0x7da20c76e560>, <ast.Constant object at 0x7da20c76fa90>, <ast.Constant object at 0x7da20c76eaa0>]] variable[numpy_aggs] assign[=] <ast.DictComp object at 0x7da20c76ce80> variable[builtin_aggs] assign[=] dictionary[[<ast.Name object at 0x7da20c76c760>, <ast.Name object at 0x7da20c76f9a0>, <ast.Name object at 0x7da20c76cd00>], [<ast.Constant object at 0x7da20c76c340>, <ast.Constant object at 0x7da20c76f850>, <ast.Constant object at 0x7da20c76fa60>]] variable[agg] assign[=] call[name[numpy_aggs].get, parameter[name[agg], name[agg]]] variable[agg] assign[=] call[name[builtin_aggs].get, parameter[name[agg], name[agg]]] if compare[name[agg] <ast.NotIn object at 0x7da2590d7190> name[supported_aggs]] begin[:] <ast.Raise object at 0x7da204963730> return[name[agg]]
keyword[def] identifier[validate_aggregation] ( identifier[agg] ): literal[string] keyword[if] identifier[agg] keyword[is] keyword[None] : keyword[return] identifier[agg] identifier[supported_aggs] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ] identifier[numpy_aggs] ={ identifier[getattr] ( identifier[np] , identifier[a] ): identifier[a] keyword[for] identifier[a] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]} identifier[builtin_aggs] ={ identifier[min] : literal[string] , identifier[max] : literal[string] , identifier[sum] : literal[string] } identifier[agg] = identifier[numpy_aggs] . identifier[get] ( identifier[agg] , identifier[agg] ) identifier[agg] = identifier[builtin_aggs] . identifier[get] ( identifier[agg] , identifier[agg] ) keyword[if] identifier[agg] keyword[not] keyword[in] identifier[supported_aggs] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[agg] )) keyword[return] identifier[agg]
def validate_aggregation(agg): """Validate an aggregation for use in Vega-Lite. Translate agg to one of the following supported named aggregations: ['mean', 'sum', 'median', 'min', 'max', 'count'] Parameters ---------- agg : string or callable A string Supported reductions are ['mean', 'sum', 'median', 'min', 'max', 'count']. If agg is a numpy function, the return value is the string representation. If agg is unrecognized, raise a ValueError """ if agg is None: return agg # depends on [control=['if'], data=['agg']] supported_aggs = ['mean', 'sum', 'median', 'min', 'max', 'count'] numpy_aggs = {getattr(np, a): a for a in ['mean', 'sum', 'median', 'min', 'max']} builtin_aggs = {min: 'min', max: 'max', sum: 'sum'} agg = numpy_aggs.get(agg, agg) agg = builtin_aggs.get(agg, agg) if agg not in supported_aggs: raise ValueError('Unrecognized Vega-Lite aggregation: {0}'.format(agg)) # depends on [control=['if'], data=['agg']] return agg
def generate_hashes(peaks, fan_value: int = DEFAULT_FAN_VALUE): """ Hash list structure: sha1_hash[0:20] time_offset [(e05b341a9b77a51fd26, 32), ... ] """ if PEAK_SORT: peaks = sorted(peaks, key=lambda x: x[1]) # peaks.sort(key=itemgetter(1)) for i in range(len(peaks)): for j in range(1, fan_value): if (i + j) < len(peaks): freq1 = peaks[i][IDX_FREQ_I] freq2 = peaks[i + j][IDX_FREQ_I] t1 = peaks[i][IDX_TIME_J] t2 = peaks[i + j][IDX_TIME_J] t_delta = t2 - t1 if MIN_HASH_TIME_DELTA <= t_delta <= MAX_HASH_TIME_DELTA: key = "{}|{}|{}".format(freq1, freq2, t_delta) h = hashlib.sha1(key.encode('utf-8')) yield (h.hexdigest()[0:FINGERPRINT_REDUCTION], t1)
def function[generate_hashes, parameter[peaks, fan_value]]: constant[ Hash list structure: sha1_hash[0:20] time_offset [(e05b341a9b77a51fd26, 32), ... ] ] if name[PEAK_SORT] begin[:] variable[peaks] assign[=] call[name[sorted], parameter[name[peaks]]] for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[peaks]]]]]] begin[:] for taget[name[j]] in starred[call[name[range], parameter[constant[1], name[fan_value]]]] begin[:] if compare[binary_operation[name[i] + name[j]] less[<] call[name[len], parameter[name[peaks]]]] begin[:] variable[freq1] assign[=] call[call[name[peaks]][name[i]]][name[IDX_FREQ_I]] variable[freq2] assign[=] call[call[name[peaks]][binary_operation[name[i] + name[j]]]][name[IDX_FREQ_I]] variable[t1] assign[=] call[call[name[peaks]][name[i]]][name[IDX_TIME_J]] variable[t2] assign[=] call[call[name[peaks]][binary_operation[name[i] + name[j]]]][name[IDX_TIME_J]] variable[t_delta] assign[=] binary_operation[name[t2] - name[t1]] if compare[name[MIN_HASH_TIME_DELTA] less_or_equal[<=] name[t_delta]] begin[:] variable[key] assign[=] call[constant[{}|{}|{}].format, parameter[name[freq1], name[freq2], name[t_delta]]] variable[h] assign[=] call[name[hashlib].sha1, parameter[call[name[key].encode, parameter[constant[utf-8]]]]] <ast.Yield object at 0x7da1b03da350>
keyword[def] identifier[generate_hashes] ( identifier[peaks] , identifier[fan_value] : identifier[int] = identifier[DEFAULT_FAN_VALUE] ): literal[string] keyword[if] identifier[PEAK_SORT] : identifier[peaks] = identifier[sorted] ( identifier[peaks] , identifier[key] = keyword[lambda] identifier[x] : identifier[x] [ literal[int] ]) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[peaks] )): keyword[for] identifier[j] keyword[in] identifier[range] ( literal[int] , identifier[fan_value] ): keyword[if] ( identifier[i] + identifier[j] )< identifier[len] ( identifier[peaks] ): identifier[freq1] = identifier[peaks] [ identifier[i] ][ identifier[IDX_FREQ_I] ] identifier[freq2] = identifier[peaks] [ identifier[i] + identifier[j] ][ identifier[IDX_FREQ_I] ] identifier[t1] = identifier[peaks] [ identifier[i] ][ identifier[IDX_TIME_J] ] identifier[t2] = identifier[peaks] [ identifier[i] + identifier[j] ][ identifier[IDX_TIME_J] ] identifier[t_delta] = identifier[t2] - identifier[t1] keyword[if] identifier[MIN_HASH_TIME_DELTA] <= identifier[t_delta] <= identifier[MAX_HASH_TIME_DELTA] : identifier[key] = literal[string] . identifier[format] ( identifier[freq1] , identifier[freq2] , identifier[t_delta] ) identifier[h] = identifier[hashlib] . identifier[sha1] ( identifier[key] . identifier[encode] ( literal[string] )) keyword[yield] ( identifier[h] . identifier[hexdigest] ()[ literal[int] : identifier[FINGERPRINT_REDUCTION] ], identifier[t1] )
def generate_hashes(peaks, fan_value: int=DEFAULT_FAN_VALUE): """ Hash list structure: sha1_hash[0:20] time_offset [(e05b341a9b77a51fd26, 32), ... ] """ if PEAK_SORT: peaks = sorted(peaks, key=lambda x: x[1]) # depends on [control=['if'], data=[]] # peaks.sort(key=itemgetter(1)) for i in range(len(peaks)): for j in range(1, fan_value): if i + j < len(peaks): freq1 = peaks[i][IDX_FREQ_I] freq2 = peaks[i + j][IDX_FREQ_I] t1 = peaks[i][IDX_TIME_J] t2 = peaks[i + j][IDX_TIME_J] t_delta = t2 - t1 if MIN_HASH_TIME_DELTA <= t_delta <= MAX_HASH_TIME_DELTA: key = '{}|{}|{}'.format(freq1, freq2, t_delta) h = hashlib.sha1(key.encode('utf-8')) yield (h.hexdigest()[0:FINGERPRINT_REDUCTION], t1) # depends on [control=['if'], data=['t_delta']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']]
def to_iso_time_string(self) -> str: """ Return the iso time string only """ short_time = self.to_short_time_string() second = self.time.second return f"{short_time}:{second:02}"
def function[to_iso_time_string, parameter[self]]: constant[ Return the iso time string only ] variable[short_time] assign[=] call[name[self].to_short_time_string, parameter[]] variable[second] assign[=] name[self].time.second return[<ast.JoinedStr object at 0x7da1b27e3730>]
keyword[def] identifier[to_iso_time_string] ( identifier[self] )-> identifier[str] : literal[string] identifier[short_time] = identifier[self] . identifier[to_short_time_string] () identifier[second] = identifier[self] . identifier[time] . identifier[second] keyword[return] literal[string]
def to_iso_time_string(self) -> str: """ Return the iso time string only """ short_time = self.to_short_time_string() second = self.time.second return f'{short_time}:{second:02}'
def to_unicode(sorb, allow_eval=False): r"""Ensure that strings are unicode (UTF-8 encoded). Evaluate bytes literals that are sometimes accidentally created by str(b'whatever') >>> to_unicode(b'whatever') 'whatever' >>> to_unicode(b'b"whatever"') 'whatever' >>> to_unicode(repr(b'b"whatever"')) 'whatever' >>> to_unicode(str(b'b"whatever"')) 'whatever' >>> to_unicode(str(str(b'whatever'))) 'whatever' >>> to_unicode(bytes(u'whatever', 'utf-8')) 'whatever' >>> to_unicode(b'u"whatever"') 'whatever' >>> to_unicode(u'b"whatever"') 'whatever' There seems to be a bug in python3 core: >>> str(b'whatever') # user intended str.decode(b'whatever') (str coercion) rather than python code repr "b'whatever'" >>> repr(str(b'whatever')) '"b\'whatever\'"' >>> str(repr(str(b'whatever'))) '"b\'whatever\'"' >>> repr(str(repr(str(b'whatever')))) '\'"b\\\'whatever\\\'"\'' >>> repr(repr(b'whatever')) '"b\'whatever\'"' >>> str(str(b'whatever')) "b'whatever'" >>> str(repr(b'whatever')) "b'whatever'" """ if sorb is None: return sorb if isinstance(sorb, bytes): sorb = sorb.decode() for i, s in enumerate(["b'", 'b"', "u'", 'u"']): if (sorb.startswith(s) and sorb.endswith(s[-1])): # print(i) return to_unicode(eval(sorb, {'__builtins__': None}, {})) return sorb
def function[to_unicode, parameter[sorb, allow_eval]]: constant[Ensure that strings are unicode (UTF-8 encoded). Evaluate bytes literals that are sometimes accidentally created by str(b'whatever') >>> to_unicode(b'whatever') 'whatever' >>> to_unicode(b'b"whatever"') 'whatever' >>> to_unicode(repr(b'b"whatever"')) 'whatever' >>> to_unicode(str(b'b"whatever"')) 'whatever' >>> to_unicode(str(str(b'whatever'))) 'whatever' >>> to_unicode(bytes(u'whatever', 'utf-8')) 'whatever' >>> to_unicode(b'u"whatever"') 'whatever' >>> to_unicode(u'b"whatever"') 'whatever' There seems to be a bug in python3 core: >>> str(b'whatever') # user intended str.decode(b'whatever') (str coercion) rather than python code repr "b'whatever'" >>> repr(str(b'whatever')) '"b\'whatever\'"' >>> str(repr(str(b'whatever'))) '"b\'whatever\'"' >>> repr(str(repr(str(b'whatever')))) '\'"b\\\'whatever\\\'"\'' >>> repr(repr(b'whatever')) '"b\'whatever\'"' >>> str(str(b'whatever')) "b'whatever'" >>> str(repr(b'whatever')) "b'whatever'" ] if compare[name[sorb] is constant[None]] begin[:] return[name[sorb]] if call[name[isinstance], parameter[name[sorb], name[bytes]]] begin[:] variable[sorb] assign[=] call[name[sorb].decode, parameter[]] for taget[tuple[[<ast.Name object at 0x7da20c6c62c0>, <ast.Name object at 0x7da20c6c66e0>]]] in starred[call[name[enumerate], parameter[list[[<ast.Constant object at 0x7da20c6c6c20>, <ast.Constant object at 0x7da20c6c6aa0>, <ast.Constant object at 0x7da20c6c7a00>, <ast.Constant object at 0x7da20c6c7160>]]]]] begin[:] if <ast.BoolOp object at 0x7da20c6c5ea0> begin[:] return[call[name[to_unicode], parameter[call[name[eval], parameter[name[sorb], dictionary[[<ast.Constant object at 0x7da20c6c6c80>], [<ast.Constant object at 0x7da20c6c4fd0>]], dictionary[[], []]]]]]] return[name[sorb]]
keyword[def] identifier[to_unicode] ( identifier[sorb] , identifier[allow_eval] = keyword[False] ): literal[string] keyword[if] identifier[sorb] keyword[is] keyword[None] : keyword[return] identifier[sorb] keyword[if] identifier[isinstance] ( identifier[sorb] , identifier[bytes] ): identifier[sorb] = identifier[sorb] . identifier[decode] () keyword[for] identifier[i] , identifier[s] keyword[in] identifier[enumerate] ([ literal[string] , literal[string] , literal[string] , literal[string] ]): keyword[if] ( identifier[sorb] . identifier[startswith] ( identifier[s] ) keyword[and] identifier[sorb] . identifier[endswith] ( identifier[s] [- literal[int] ])): keyword[return] identifier[to_unicode] ( identifier[eval] ( identifier[sorb] ,{ literal[string] : keyword[None] },{})) keyword[return] identifier[sorb]
def to_unicode(sorb, allow_eval=False): """Ensure that strings are unicode (UTF-8 encoded). Evaluate bytes literals that are sometimes accidentally created by str(b'whatever') >>> to_unicode(b'whatever') 'whatever' >>> to_unicode(b'b"whatever"') 'whatever' >>> to_unicode(repr(b'b"whatever"')) 'whatever' >>> to_unicode(str(b'b"whatever"')) 'whatever' >>> to_unicode(str(str(b'whatever'))) 'whatever' >>> to_unicode(bytes(u'whatever', 'utf-8')) 'whatever' >>> to_unicode(b'u"whatever"') 'whatever' >>> to_unicode(u'b"whatever"') 'whatever' There seems to be a bug in python3 core: >>> str(b'whatever') # user intended str.decode(b'whatever') (str coercion) rather than python code repr "b'whatever'" >>> repr(str(b'whatever')) '"b\\'whatever\\'"' >>> str(repr(str(b'whatever'))) '"b\\'whatever\\'"' >>> repr(str(repr(str(b'whatever')))) '\\'"b\\\\\\'whatever\\\\\\'"\\'' >>> repr(repr(b'whatever')) '"b\\'whatever\\'"' >>> str(str(b'whatever')) "b'whatever'" >>> str(repr(b'whatever')) "b'whatever'" """ if sorb is None: return sorb # depends on [control=['if'], data=['sorb']] if isinstance(sorb, bytes): sorb = sorb.decode() # depends on [control=['if'], data=[]] for (i, s) in enumerate(["b'", 'b"', "u'", 'u"']): if sorb.startswith(s) and sorb.endswith(s[-1]): # print(i) return to_unicode(eval(sorb, {'__builtins__': None}, {})) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return sorb
def get_string(self, recalculate_width=True): """Get the table as a String. Parameters ---------- recalculate_width : bool, optional If width for each column should be recalculated(default True). Note that width is always calculated if it wasn't set explicitly when this method is called for the first time , regardless of the value of `recalculate_width`. Returns ------- str: Table as a string. """ # Empty table. returning empty string. if len(self._table) == 0: return '' if self.serialno and self.column_count > 0: self.insert_column(0, self.serialno_header, range(1, len(self) + 1)) # Should widths of column be recalculated if recalculate_width or sum(self._column_widths) == 0: self._calculate_column_widths() string_ = [] # Drawing the top border if self.top_border_char: string_.append( self._get_top_border()) # Print headers if not empty or only spaces if ''.join(self._column_headers).strip(): headers = to_unicode(self._column_headers) string_.append(headers) if self.header_separator_char: string_.append( self._get_header_separator()) # Printing rows first_row_encountered = False for row in self._table: if first_row_encountered and self.row_separator_char: string_.append( self._get_row_separator()) first_row_encountered = True content = to_unicode(row) string_.append(content) # Drawing the bottom border if self.bottom_border_char: string_.append( self._get_bottom_border()) if self.serialno and self.column_count > 0: self.pop_column(0) return '\n'.join(string_)
def function[get_string, parameter[self, recalculate_width]]: constant[Get the table as a String. Parameters ---------- recalculate_width : bool, optional If width for each column should be recalculated(default True). Note that width is always calculated if it wasn't set explicitly when this method is called for the first time , regardless of the value of `recalculate_width`. Returns ------- str: Table as a string. ] if compare[call[name[len], parameter[name[self]._table]] equal[==] constant[0]] begin[:] return[constant[]] if <ast.BoolOp object at 0x7da1b06cee90> begin[:] call[name[self].insert_column, parameter[constant[0], name[self].serialno_header, call[name[range], parameter[constant[1], binary_operation[call[name[len], parameter[name[self]]] + constant[1]]]]]] if <ast.BoolOp object at 0x7da18dc9b8e0> begin[:] call[name[self]._calculate_column_widths, parameter[]] variable[string_] assign[=] list[[]] if name[self].top_border_char begin[:] call[name[string_].append, parameter[call[name[self]._get_top_border, parameter[]]]] if call[call[constant[].join, parameter[name[self]._column_headers]].strip, parameter[]] begin[:] variable[headers] assign[=] call[name[to_unicode], parameter[name[self]._column_headers]] call[name[string_].append, parameter[name[headers]]] if name[self].header_separator_char begin[:] call[name[string_].append, parameter[call[name[self]._get_header_separator, parameter[]]]] variable[first_row_encountered] assign[=] constant[False] for taget[name[row]] in starred[name[self]._table] begin[:] if <ast.BoolOp object at 0x7da18dc98df0> begin[:] call[name[string_].append, parameter[call[name[self]._get_row_separator, parameter[]]]] variable[first_row_encountered] assign[=] constant[True] variable[content] assign[=] call[name[to_unicode], parameter[name[row]]] call[name[string_].append, parameter[name[content]]] if name[self].bottom_border_char begin[:] call[name[string_].append, parameter[call[name[self]._get_bottom_border, parameter[]]]] if <ast.BoolOp object at 0x7da18dc9beb0> begin[:] call[name[self].pop_column, parameter[constant[0]]] return[call[constant[ ].join, parameter[name[string_]]]]
keyword[def] identifier[get_string] ( identifier[self] , identifier[recalculate_width] = keyword[True] ): literal[string] keyword[if] identifier[len] ( identifier[self] . identifier[_table] )== literal[int] : keyword[return] literal[string] keyword[if] identifier[self] . identifier[serialno] keyword[and] identifier[self] . identifier[column_count] > literal[int] : identifier[self] . identifier[insert_column] ( literal[int] , identifier[self] . identifier[serialno_header] , identifier[range] ( literal[int] , identifier[len] ( identifier[self] )+ literal[int] )) keyword[if] identifier[recalculate_width] keyword[or] identifier[sum] ( identifier[self] . identifier[_column_widths] )== literal[int] : identifier[self] . identifier[_calculate_column_widths] () identifier[string_] =[] keyword[if] identifier[self] . identifier[top_border_char] : identifier[string_] . identifier[append] ( identifier[self] . identifier[_get_top_border] ()) keyword[if] literal[string] . identifier[join] ( identifier[self] . identifier[_column_headers] ). identifier[strip] (): identifier[headers] = identifier[to_unicode] ( identifier[self] . identifier[_column_headers] ) identifier[string_] . identifier[append] ( identifier[headers] ) keyword[if] identifier[self] . identifier[header_separator_char] : identifier[string_] . identifier[append] ( identifier[self] . identifier[_get_header_separator] ()) identifier[first_row_encountered] = keyword[False] keyword[for] identifier[row] keyword[in] identifier[self] . identifier[_table] : keyword[if] identifier[first_row_encountered] keyword[and] identifier[self] . identifier[row_separator_char] : identifier[string_] . identifier[append] ( identifier[self] . identifier[_get_row_separator] ()) identifier[first_row_encountered] = keyword[True] identifier[content] = identifier[to_unicode] ( identifier[row] ) identifier[string_] . identifier[append] ( identifier[content] ) keyword[if] identifier[self] . identifier[bottom_border_char] : identifier[string_] . identifier[append] ( identifier[self] . identifier[_get_bottom_border] ()) keyword[if] identifier[self] . identifier[serialno] keyword[and] identifier[self] . identifier[column_count] > literal[int] : identifier[self] . identifier[pop_column] ( literal[int] ) keyword[return] literal[string] . identifier[join] ( identifier[string_] )
def get_string(self, recalculate_width=True): """Get the table as a String. Parameters ---------- recalculate_width : bool, optional If width for each column should be recalculated(default True). Note that width is always calculated if it wasn't set explicitly when this method is called for the first time , regardless of the value of `recalculate_width`. Returns ------- str: Table as a string. """ # Empty table. returning empty string. if len(self._table) == 0: return '' # depends on [control=['if'], data=[]] if self.serialno and self.column_count > 0: self.insert_column(0, self.serialno_header, range(1, len(self) + 1)) # depends on [control=['if'], data=[]] # Should widths of column be recalculated if recalculate_width or sum(self._column_widths) == 0: self._calculate_column_widths() # depends on [control=['if'], data=[]] string_ = [] # Drawing the top border if self.top_border_char: string_.append(self._get_top_border()) # depends on [control=['if'], data=[]] # Print headers if not empty or only spaces if ''.join(self._column_headers).strip(): headers = to_unicode(self._column_headers) string_.append(headers) if self.header_separator_char: string_.append(self._get_header_separator()) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # Printing rows first_row_encountered = False for row in self._table: if first_row_encountered and self.row_separator_char: string_.append(self._get_row_separator()) # depends on [control=['if'], data=[]] first_row_encountered = True content = to_unicode(row) string_.append(content) # depends on [control=['for'], data=['row']] # Drawing the bottom border if self.bottom_border_char: string_.append(self._get_bottom_border()) # depends on [control=['if'], data=[]] if self.serialno and self.column_count > 0: self.pop_column(0) # depends on [control=['if'], data=[]] return '\n'.join(string_)
def read_and_decode(filename, is_train=None): """Return tensor to read from TFRecord.""" filename_queue = tf.train.string_input_producer([filename]) reader = tf.TFRecordReader() _, serialized_example = reader.read(filename_queue) features = tf.parse_single_example( serialized_example, features={ 'label': tf.FixedLenFeature([], tf.int64), 'img_raw': tf.FixedLenFeature([], tf.string), } ) # You can do more image distortion here for training data img = tf.decode_raw(features['img_raw'], tf.float32) img = tf.reshape(img, [32, 32, 3]) # img = tf.cast(img, tf.float32) #* (1. / 255) - 0.5 if is_train ==True: # 1. Randomly crop a [height, width] section of the image. img = tf.random_crop(img, [24, 24, 3]) # 2. Randomly flip the image horizontally. img = tf.image.random_flip_left_right(img) # 3. Randomly change brightness. img = tf.image.random_brightness(img, max_delta=63) # 4. Randomly change contrast. img = tf.image.random_contrast(img, lower=0.2, upper=1.8) # 5. Subtract off the mean and divide by the variance of the pixels. img = tf.image.per_image_standardization(img) elif is_train == False: # 1. Crop the central [height, width] of the image. img = tf.image.resize_image_with_crop_or_pad(img, 24, 24) # 2. Subtract off the mean and divide by the variance of the pixels. img = tf.image.per_image_standardization(img) elif is_train == None: img = img label = tf.cast(features['label'], tf.int32) return img, label
def function[read_and_decode, parameter[filename, is_train]]: constant[Return tensor to read from TFRecord.] variable[filename_queue] assign[=] call[name[tf].train.string_input_producer, parameter[list[[<ast.Name object at 0x7da18bc71ba0>]]]] variable[reader] assign[=] call[name[tf].TFRecordReader, parameter[]] <ast.Tuple object at 0x7da18bc715a0> assign[=] call[name[reader].read, parameter[name[filename_queue]]] variable[features] assign[=] call[name[tf].parse_single_example, parameter[name[serialized_example]]] variable[img] assign[=] call[name[tf].decode_raw, parameter[call[name[features]][constant[img_raw]], name[tf].float32]] variable[img] assign[=] call[name[tf].reshape, parameter[name[img], list[[<ast.Constant object at 0x7da18bc73f10>, <ast.Constant object at 0x7da18bc734f0>, <ast.Constant object at 0x7da18bc70250>]]]] if compare[name[is_train] equal[==] constant[True]] begin[:] variable[img] assign[=] call[name[tf].random_crop, parameter[name[img], list[[<ast.Constant object at 0x7da18bc739d0>, <ast.Constant object at 0x7da18bc726e0>, <ast.Constant object at 0x7da18bc71990>]]]] variable[img] assign[=] call[name[tf].image.random_flip_left_right, parameter[name[img]]] variable[img] assign[=] call[name[tf].image.random_brightness, parameter[name[img]]] variable[img] assign[=] call[name[tf].image.random_contrast, parameter[name[img]]] variable[img] assign[=] call[name[tf].image.per_image_standardization, parameter[name[img]]] variable[label] assign[=] call[name[tf].cast, parameter[call[name[features]][constant[label]], name[tf].int32]] return[tuple[[<ast.Name object at 0x7da18bc71b10>, <ast.Name object at 0x7da18bc73130>]]]
keyword[def] identifier[read_and_decode] ( identifier[filename] , identifier[is_train] = keyword[None] ): literal[string] identifier[filename_queue] = identifier[tf] . identifier[train] . identifier[string_input_producer] ([ identifier[filename] ]) identifier[reader] = identifier[tf] . identifier[TFRecordReader] () identifier[_] , identifier[serialized_example] = identifier[reader] . identifier[read] ( identifier[filename_queue] ) identifier[features] = identifier[tf] . identifier[parse_single_example] ( identifier[serialized_example] , identifier[features] ={ literal[string] : identifier[tf] . identifier[FixedLenFeature] ([], identifier[tf] . identifier[int64] ), literal[string] : identifier[tf] . identifier[FixedLenFeature] ([], identifier[tf] . identifier[string] ), } ) identifier[img] = identifier[tf] . identifier[decode_raw] ( identifier[features] [ literal[string] ], identifier[tf] . identifier[float32] ) identifier[img] = identifier[tf] . identifier[reshape] ( identifier[img] ,[ literal[int] , literal[int] , literal[int] ]) keyword[if] identifier[is_train] == keyword[True] : identifier[img] = identifier[tf] . identifier[random_crop] ( identifier[img] ,[ literal[int] , literal[int] , literal[int] ]) identifier[img] = identifier[tf] . identifier[image] . identifier[random_flip_left_right] ( identifier[img] ) identifier[img] = identifier[tf] . identifier[image] . identifier[random_brightness] ( identifier[img] , identifier[max_delta] = literal[int] ) identifier[img] = identifier[tf] . identifier[image] . identifier[random_contrast] ( identifier[img] , identifier[lower] = literal[int] , identifier[upper] = literal[int] ) identifier[img] = identifier[tf] . identifier[image] . identifier[per_image_standardization] ( identifier[img] ) keyword[elif] identifier[is_train] == keyword[False] : identifier[img] = identifier[tf] . identifier[image] . identifier[resize_image_with_crop_or_pad] ( identifier[img] , literal[int] , literal[int] ) identifier[img] = identifier[tf] . identifier[image] . identifier[per_image_standardization] ( identifier[img] ) keyword[elif] identifier[is_train] == keyword[None] : identifier[img] = identifier[img] identifier[label] = identifier[tf] . identifier[cast] ( identifier[features] [ literal[string] ], identifier[tf] . identifier[int32] ) keyword[return] identifier[img] , identifier[label]
def read_and_decode(filename, is_train=None): """Return tensor to read from TFRecord.""" filename_queue = tf.train.string_input_producer([filename]) reader = tf.TFRecordReader() (_, serialized_example) = reader.read(filename_queue) features = tf.parse_single_example(serialized_example, features={'label': tf.FixedLenFeature([], tf.int64), 'img_raw': tf.FixedLenFeature([], tf.string)}) # You can do more image distortion here for training data img = tf.decode_raw(features['img_raw'], tf.float32) img = tf.reshape(img, [32, 32, 3]) # img = tf.cast(img, tf.float32) #* (1. / 255) - 0.5 if is_train == True: # 1. Randomly crop a [height, width] section of the image. img = tf.random_crop(img, [24, 24, 3]) # 2. Randomly flip the image horizontally. img = tf.image.random_flip_left_right(img) # 3. Randomly change brightness. img = tf.image.random_brightness(img, max_delta=63) # 4. Randomly change contrast. img = tf.image.random_contrast(img, lower=0.2, upper=1.8) # 5. Subtract off the mean and divide by the variance of the pixels. img = tf.image.per_image_standardization(img) # depends on [control=['if'], data=[]] elif is_train == False: # 1. Crop the central [height, width] of the image. img = tf.image.resize_image_with_crop_or_pad(img, 24, 24) # 2. Subtract off the mean and divide by the variance of the pixels. img = tf.image.per_image_standardization(img) # depends on [control=['if'], data=[]] elif is_train == None: img = img # depends on [control=['if'], data=[]] label = tf.cast(features['label'], tf.int32) return (img, label)
def register_from_fields(self, *args): """ Register config name from field widgets Arguments: *args: Fields that contains widget :class:`djangocodemirror.widget.CodeMirrorWidget`. Returns: list: List of registered config names from fields. """ names = [] for field in args: widget = self.resolve_widget(field) self.register(widget.config_name) if widget.config_name not in names: names.append(widget.config_name) return names
def function[register_from_fields, parameter[self]]: constant[ Register config name from field widgets Arguments: *args: Fields that contains widget :class:`djangocodemirror.widget.CodeMirrorWidget`. Returns: list: List of registered config names from fields. ] variable[names] assign[=] list[[]] for taget[name[field]] in starred[name[args]] begin[:] variable[widget] assign[=] call[name[self].resolve_widget, parameter[name[field]]] call[name[self].register, parameter[name[widget].config_name]] if compare[name[widget].config_name <ast.NotIn object at 0x7da2590d7190> name[names]] begin[:] call[name[names].append, parameter[name[widget].config_name]] return[name[names]]
keyword[def] identifier[register_from_fields] ( identifier[self] ,* identifier[args] ): literal[string] identifier[names] =[] keyword[for] identifier[field] keyword[in] identifier[args] : identifier[widget] = identifier[self] . identifier[resolve_widget] ( identifier[field] ) identifier[self] . identifier[register] ( identifier[widget] . identifier[config_name] ) keyword[if] identifier[widget] . identifier[config_name] keyword[not] keyword[in] identifier[names] : identifier[names] . identifier[append] ( identifier[widget] . identifier[config_name] ) keyword[return] identifier[names]
def register_from_fields(self, *args): """ Register config name from field widgets Arguments: *args: Fields that contains widget :class:`djangocodemirror.widget.CodeMirrorWidget`. Returns: list: List of registered config names from fields. """ names = [] for field in args: widget = self.resolve_widget(field) self.register(widget.config_name) if widget.config_name not in names: names.append(widget.config_name) # depends on [control=['if'], data=['names']] # depends on [control=['for'], data=['field']] return names
def attach(domain, filename): ''' Attach existing datasets to their harvest remote id Mapping between identifiers should be in FILENAME CSV file. ''' log.info('Attaching datasets for domain %s', domain) result = actions.attach(domain, filename) log.info('Attached %s datasets to %s', result.success, domain)
def function[attach, parameter[domain, filename]]: constant[ Attach existing datasets to their harvest remote id Mapping between identifiers should be in FILENAME CSV file. ] call[name[log].info, parameter[constant[Attaching datasets for domain %s], name[domain]]] variable[result] assign[=] call[name[actions].attach, parameter[name[domain], name[filename]]] call[name[log].info, parameter[constant[Attached %s datasets to %s], name[result].success, name[domain]]]
keyword[def] identifier[attach] ( identifier[domain] , identifier[filename] ): literal[string] identifier[log] . identifier[info] ( literal[string] , identifier[domain] ) identifier[result] = identifier[actions] . identifier[attach] ( identifier[domain] , identifier[filename] ) identifier[log] . identifier[info] ( literal[string] , identifier[result] . identifier[success] , identifier[domain] )
def attach(domain, filename): """ Attach existing datasets to their harvest remote id Mapping between identifiers should be in FILENAME CSV file. """ log.info('Attaching datasets for domain %s', domain) result = actions.attach(domain, filename) log.info('Attached %s datasets to %s', result.success, domain)
def spawn_batch_jobs(job, shared_ids, input_args): """ Spawns an alignment job for every sample in the input configuration file """ samples = [] config = input_args['config'] with open(config, 'r') as f_in: for line in f_in: line = line.strip().split(',') uuid = line[0] urls = line[1:] samples.append((uuid, urls)) for sample in samples: job.addChildJobFn(alignment, shared_ids, input_args, sample, cores=32, memory='20 G', disk='100 G')
def function[spawn_batch_jobs, parameter[job, shared_ids, input_args]]: constant[ Spawns an alignment job for every sample in the input configuration file ] variable[samples] assign[=] list[[]] variable[config] assign[=] call[name[input_args]][constant[config]] with call[name[open], parameter[name[config], constant[r]]] begin[:] for taget[name[line]] in starred[name[f_in]] begin[:] variable[line] assign[=] call[call[name[line].strip, parameter[]].split, parameter[constant[,]]] variable[uuid] assign[=] call[name[line]][constant[0]] variable[urls] assign[=] call[name[line]][<ast.Slice object at 0x7da20e954b20>] call[name[samples].append, parameter[tuple[[<ast.Name object at 0x7da20e954f70>, <ast.Name object at 0x7da20e957850>]]]] for taget[name[sample]] in starred[name[samples]] begin[:] call[name[job].addChildJobFn, parameter[name[alignment], name[shared_ids], name[input_args], name[sample]]]
keyword[def] identifier[spawn_batch_jobs] ( identifier[job] , identifier[shared_ids] , identifier[input_args] ): literal[string] identifier[samples] =[] identifier[config] = identifier[input_args] [ literal[string] ] keyword[with] identifier[open] ( identifier[config] , literal[string] ) keyword[as] identifier[f_in] : keyword[for] identifier[line] keyword[in] identifier[f_in] : identifier[line] = identifier[line] . identifier[strip] (). identifier[split] ( literal[string] ) identifier[uuid] = identifier[line] [ literal[int] ] identifier[urls] = identifier[line] [ literal[int] :] identifier[samples] . identifier[append] (( identifier[uuid] , identifier[urls] )) keyword[for] identifier[sample] keyword[in] identifier[samples] : identifier[job] . identifier[addChildJobFn] ( identifier[alignment] , identifier[shared_ids] , identifier[input_args] , identifier[sample] , identifier[cores] = literal[int] , identifier[memory] = literal[string] , identifier[disk] = literal[string] )
def spawn_batch_jobs(job, shared_ids, input_args): """ Spawns an alignment job for every sample in the input configuration file """ samples = [] config = input_args['config'] with open(config, 'r') as f_in: for line in f_in: line = line.strip().split(',') uuid = line[0] urls = line[1:] samples.append((uuid, urls)) # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['f_in']] for sample in samples: job.addChildJobFn(alignment, shared_ids, input_args, sample, cores=32, memory='20 G', disk='100 G') # depends on [control=['for'], data=['sample']]
def idle_task(self): '''handle missing parameters''' self.pstate.vehicle_name = self.vehicle_name self.pstate.fetch_check(self.master)
def function[idle_task, parameter[self]]: constant[handle missing parameters] name[self].pstate.vehicle_name assign[=] name[self].vehicle_name call[name[self].pstate.fetch_check, parameter[name[self].master]]
keyword[def] identifier[idle_task] ( identifier[self] ): literal[string] identifier[self] . identifier[pstate] . identifier[vehicle_name] = identifier[self] . identifier[vehicle_name] identifier[self] . identifier[pstate] . identifier[fetch_check] ( identifier[self] . identifier[master] )
def idle_task(self): """handle missing parameters""" self.pstate.vehicle_name = self.vehicle_name self.pstate.fetch_check(self.master)
def STRUCT_DECL(self, cursor, num=None): """ Handles Structure declaration. Its a wrapper to _record_decl. """ return self._record_decl(cursor, typedesc.Structure, num)
def function[STRUCT_DECL, parameter[self, cursor, num]]: constant[ Handles Structure declaration. Its a wrapper to _record_decl. ] return[call[name[self]._record_decl, parameter[name[cursor], name[typedesc].Structure, name[num]]]]
keyword[def] identifier[STRUCT_DECL] ( identifier[self] , identifier[cursor] , identifier[num] = keyword[None] ): literal[string] keyword[return] identifier[self] . identifier[_record_decl] ( identifier[cursor] , identifier[typedesc] . identifier[Structure] , identifier[num] )
def STRUCT_DECL(self, cursor, num=None): """ Handles Structure declaration. Its a wrapper to _record_decl. """ return self._record_decl(cursor, typedesc.Structure, num)
def _assemble_and_send_request(self): """ Fires off the Fedex request. @warning: NEVER CALL THIS METHOD DIRECTLY. CALL send_request(), WHICH RESIDES ON FedexBaseService AND IS INHERITED. """ client = self.client # We get an exception like this when specifying an IntegratorId: # suds.TypeNotFound: Type not found: 'IntegratorId' # Setting it to None does not seem to appease it. del self.ClientDetail.IntegratorId # Fire off the query. response = client.service.postalCodeInquiry(WebAuthenticationDetail=self.WebAuthenticationDetail, ClientDetail=self.ClientDetail, TransactionDetail=self.TransactionDetail, Version=self.VersionId, PostalCode=self.PostalCode, CountryCode=self.CountryCode, CarrierCode=self.CarrierCode) return response
def function[_assemble_and_send_request, parameter[self]]: constant[ Fires off the Fedex request. @warning: NEVER CALL THIS METHOD DIRECTLY. CALL send_request(), WHICH RESIDES ON FedexBaseService AND IS INHERITED. ] variable[client] assign[=] name[self].client <ast.Delete object at 0x7da1b11a0c40> variable[response] assign[=] call[name[client].service.postalCodeInquiry, parameter[]] return[name[response]]
keyword[def] identifier[_assemble_and_send_request] ( identifier[self] ): literal[string] identifier[client] = identifier[self] . identifier[client] keyword[del] identifier[self] . identifier[ClientDetail] . identifier[IntegratorId] identifier[response] = identifier[client] . identifier[service] . identifier[postalCodeInquiry] ( identifier[WebAuthenticationDetail] = identifier[self] . identifier[WebAuthenticationDetail] , identifier[ClientDetail] = identifier[self] . identifier[ClientDetail] , identifier[TransactionDetail] = identifier[self] . identifier[TransactionDetail] , identifier[Version] = identifier[self] . identifier[VersionId] , identifier[PostalCode] = identifier[self] . identifier[PostalCode] , identifier[CountryCode] = identifier[self] . identifier[CountryCode] , identifier[CarrierCode] = identifier[self] . identifier[CarrierCode] ) keyword[return] identifier[response]
def _assemble_and_send_request(self): """ Fires off the Fedex request. @warning: NEVER CALL THIS METHOD DIRECTLY. CALL send_request(), WHICH RESIDES ON FedexBaseService AND IS INHERITED. """ client = self.client # We get an exception like this when specifying an IntegratorId: # suds.TypeNotFound: Type not found: 'IntegratorId' # Setting it to None does not seem to appease it. del self.ClientDetail.IntegratorId # Fire off the query. response = client.service.postalCodeInquiry(WebAuthenticationDetail=self.WebAuthenticationDetail, ClientDetail=self.ClientDetail, TransactionDetail=self.TransactionDetail, Version=self.VersionId, PostalCode=self.PostalCode, CountryCode=self.CountryCode, CarrierCode=self.CarrierCode) return response
def evaluate(self, x): r"""Evaluate the kernels at given frequencies. Parameters ---------- x : array_like Graph frequencies at which to evaluate the filter. Returns ------- y : ndarray Frequency response of the filters. Shape ``(g.Nf, len(x))``. Examples -------- Frequency response of a low-pass filter: >>> import matplotlib.pyplot as plt >>> G = graphs.Logo() >>> G.compute_fourier_basis() >>> f = filters.Expwin(G) >>> G.compute_fourier_basis() >>> y = f.evaluate(G.e) >>> plt.plot(G.e, y[0]) # doctest: +ELLIPSIS [<matplotlib.lines.Line2D object at ...>] """ x = np.asanyarray(x) # Avoid to copy data as with np.array([g(x) for g in self._kernels]). y = np.empty([self.Nf] + list(x.shape)) for i, kernel in enumerate(self._kernels): y[i] = kernel(x) return y
def function[evaluate, parameter[self, x]]: constant[Evaluate the kernels at given frequencies. Parameters ---------- x : array_like Graph frequencies at which to evaluate the filter. Returns ------- y : ndarray Frequency response of the filters. Shape ``(g.Nf, len(x))``. Examples -------- Frequency response of a low-pass filter: >>> import matplotlib.pyplot as plt >>> G = graphs.Logo() >>> G.compute_fourier_basis() >>> f = filters.Expwin(G) >>> G.compute_fourier_basis() >>> y = f.evaluate(G.e) >>> plt.plot(G.e, y[0]) # doctest: +ELLIPSIS [<matplotlib.lines.Line2D object at ...>] ] variable[x] assign[=] call[name[np].asanyarray, parameter[name[x]]] variable[y] assign[=] call[name[np].empty, parameter[binary_operation[list[[<ast.Attribute object at 0x7da20e957070>]] + call[name[list], parameter[name[x].shape]]]]] for taget[tuple[[<ast.Name object at 0x7da20e954e50>, <ast.Name object at 0x7da20e957ca0>]]] in starred[call[name[enumerate], parameter[name[self]._kernels]]] begin[:] call[name[y]][name[i]] assign[=] call[name[kernel], parameter[name[x]]] return[name[y]]
keyword[def] identifier[evaluate] ( identifier[self] , identifier[x] ): literal[string] identifier[x] = identifier[np] . identifier[asanyarray] ( identifier[x] ) identifier[y] = identifier[np] . identifier[empty] ([ identifier[self] . identifier[Nf] ]+ identifier[list] ( identifier[x] . identifier[shape] )) keyword[for] identifier[i] , identifier[kernel] keyword[in] identifier[enumerate] ( identifier[self] . identifier[_kernels] ): identifier[y] [ identifier[i] ]= identifier[kernel] ( identifier[x] ) keyword[return] identifier[y]
def evaluate(self, x): """Evaluate the kernels at given frequencies. Parameters ---------- x : array_like Graph frequencies at which to evaluate the filter. Returns ------- y : ndarray Frequency response of the filters. Shape ``(g.Nf, len(x))``. Examples -------- Frequency response of a low-pass filter: >>> import matplotlib.pyplot as plt >>> G = graphs.Logo() >>> G.compute_fourier_basis() >>> f = filters.Expwin(G) >>> G.compute_fourier_basis() >>> y = f.evaluate(G.e) >>> plt.plot(G.e, y[0]) # doctest: +ELLIPSIS [<matplotlib.lines.Line2D object at ...>] """ x = np.asanyarray(x) # Avoid to copy data as with np.array([g(x) for g in self._kernels]). y = np.empty([self.Nf] + list(x.shape)) for (i, kernel) in enumerate(self._kernels): y[i] = kernel(x) # depends on [control=['for'], data=[]] return y
def filter_query(s): """ Filters given query with the below regex and returns lists of quoted and unquoted strings """ matches = re.findall(r'(?:"([^"]*)")|([^"]*)', s) result_quoted = [t[0].strip() for t in matches if t[0]] result_unquoted = [t[1].strip() for t in matches if t[1]] return result_quoted, result_unquoted
def function[filter_query, parameter[s]]: constant[ Filters given query with the below regex and returns lists of quoted and unquoted strings ] variable[matches] assign[=] call[name[re].findall, parameter[constant[(?:"([^"]*)")|([^"]*)], name[s]]] variable[result_quoted] assign[=] <ast.ListComp object at 0x7da1b1becbb0> variable[result_unquoted] assign[=] <ast.ListComp object at 0x7da1b1bee710> return[tuple[[<ast.Name object at 0x7da1b1bee7d0>, <ast.Name object at 0x7da1b1bed1b0>]]]
keyword[def] identifier[filter_query] ( identifier[s] ): literal[string] identifier[matches] = identifier[re] . identifier[findall] ( literal[string] , identifier[s] ) identifier[result_quoted] =[ identifier[t] [ literal[int] ]. identifier[strip] () keyword[for] identifier[t] keyword[in] identifier[matches] keyword[if] identifier[t] [ literal[int] ]] identifier[result_unquoted] =[ identifier[t] [ literal[int] ]. identifier[strip] () keyword[for] identifier[t] keyword[in] identifier[matches] keyword[if] identifier[t] [ literal[int] ]] keyword[return] identifier[result_quoted] , identifier[result_unquoted]
def filter_query(s): """ Filters given query with the below regex and returns lists of quoted and unquoted strings """ matches = re.findall('(?:"([^"]*)")|([^"]*)', s) result_quoted = [t[0].strip() for t in matches if t[0]] result_unquoted = [t[1].strip() for t in matches if t[1]] return (result_quoted, result_unquoted)
def _compute_missing_rates(self, currency): """Fill missing rates of a currency. This is done by linear interpolation of the two closest available rates. :param str currency: The currency to fill missing rates for. """ rates = self._rates[currency] # tmp will store the closest rates forward and backward tmp = defaultdict(lambda: [None, None]) for date in sorted(rates): rate = rates[date] if rate is not None: closest_rate = rate dist = 0 else: dist += 1 tmp[date][0] = closest_rate, dist for date in sorted(rates, reverse=True): rate = rates[date] if rate is not None: closest_rate = rate dist = 0 else: dist += 1 tmp[date][1] = closest_rate, dist for date in sorted(tmp): (r0, d0), (r1, d1) = tmp[date] rates[date] = (r0 * d1 + r1 * d0) / (d0 + d1) if self.verbose: print(('{0}: filling {1} missing rate using {2} ({3}d old) and ' '{4} ({5}d later)').format(currency, date, r0, d0, r1, d1))
def function[_compute_missing_rates, parameter[self, currency]]: constant[Fill missing rates of a currency. This is done by linear interpolation of the two closest available rates. :param str currency: The currency to fill missing rates for. ] variable[rates] assign[=] call[name[self]._rates][name[currency]] variable[tmp] assign[=] call[name[defaultdict], parameter[<ast.Lambda object at 0x7da1b038ba90>]] for taget[name[date]] in starred[call[name[sorted], parameter[name[rates]]]] begin[:] variable[rate] assign[=] call[name[rates]][name[date]] if compare[name[rate] is_not constant[None]] begin[:] variable[closest_rate] assign[=] name[rate] variable[dist] assign[=] constant[0] for taget[name[date]] in starred[call[name[sorted], parameter[name[rates]]]] begin[:] variable[rate] assign[=] call[name[rates]][name[date]] if compare[name[rate] is_not constant[None]] begin[:] variable[closest_rate] assign[=] name[rate] variable[dist] assign[=] constant[0] for taget[name[date]] in starred[call[name[sorted], parameter[name[tmp]]]] begin[:] <ast.Tuple object at 0x7da1b038afb0> assign[=] call[name[tmp]][name[date]] call[name[rates]][name[date]] assign[=] binary_operation[binary_operation[binary_operation[name[r0] * name[d1]] + binary_operation[name[r1] * name[d0]]] / binary_operation[name[d0] + name[d1]]] if name[self].verbose begin[:] call[name[print], parameter[call[constant[{0}: filling {1} missing rate using {2} ({3}d old) and {4} ({5}d later)].format, parameter[name[currency], name[date], name[r0], name[d0], name[r1], name[d1]]]]]
keyword[def] identifier[_compute_missing_rates] ( identifier[self] , identifier[currency] ): literal[string] identifier[rates] = identifier[self] . identifier[_rates] [ identifier[currency] ] identifier[tmp] = identifier[defaultdict] ( keyword[lambda] :[ keyword[None] , keyword[None] ]) keyword[for] identifier[date] keyword[in] identifier[sorted] ( identifier[rates] ): identifier[rate] = identifier[rates] [ identifier[date] ] keyword[if] identifier[rate] keyword[is] keyword[not] keyword[None] : identifier[closest_rate] = identifier[rate] identifier[dist] = literal[int] keyword[else] : identifier[dist] += literal[int] identifier[tmp] [ identifier[date] ][ literal[int] ]= identifier[closest_rate] , identifier[dist] keyword[for] identifier[date] keyword[in] identifier[sorted] ( identifier[rates] , identifier[reverse] = keyword[True] ): identifier[rate] = identifier[rates] [ identifier[date] ] keyword[if] identifier[rate] keyword[is] keyword[not] keyword[None] : identifier[closest_rate] = identifier[rate] identifier[dist] = literal[int] keyword[else] : identifier[dist] += literal[int] identifier[tmp] [ identifier[date] ][ literal[int] ]= identifier[closest_rate] , identifier[dist] keyword[for] identifier[date] keyword[in] identifier[sorted] ( identifier[tmp] ): ( identifier[r0] , identifier[d0] ),( identifier[r1] , identifier[d1] )= identifier[tmp] [ identifier[date] ] identifier[rates] [ identifier[date] ]=( identifier[r0] * identifier[d1] + identifier[r1] * identifier[d0] )/( identifier[d0] + identifier[d1] ) keyword[if] identifier[self] . identifier[verbose] : identifier[print] (( literal[string] literal[string] ). identifier[format] ( identifier[currency] , identifier[date] , identifier[r0] , identifier[d0] , identifier[r1] , identifier[d1] ))
def _compute_missing_rates(self, currency): """Fill missing rates of a currency. This is done by linear interpolation of the two closest available rates. :param str currency: The currency to fill missing rates for. """ rates = self._rates[currency] # tmp will store the closest rates forward and backward tmp = defaultdict(lambda : [None, None]) for date in sorted(rates): rate = rates[date] if rate is not None: closest_rate = rate dist = 0 # depends on [control=['if'], data=['rate']] else: dist += 1 tmp[date][0] = (closest_rate, dist) # depends on [control=['for'], data=['date']] for date in sorted(rates, reverse=True): rate = rates[date] if rate is not None: closest_rate = rate dist = 0 # depends on [control=['if'], data=['rate']] else: dist += 1 tmp[date][1] = (closest_rate, dist) # depends on [control=['for'], data=['date']] for date in sorted(tmp): ((r0, d0), (r1, d1)) = tmp[date] rates[date] = (r0 * d1 + r1 * d0) / (d0 + d1) if self.verbose: print('{0}: filling {1} missing rate using {2} ({3}d old) and {4} ({5}d later)'.format(currency, date, r0, d0, r1, d1)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['date']]
def _FormatTypeCheck(type_): """Pretty format of type check.""" if isinstance(type_, tuple): items = [_FormatTypeCheck(t) for t in type_] return "(%s)" % ", ".join(items) elif hasattr(type_, "__name__"): return type_.__name__ else: return repr(type_)
def function[_FormatTypeCheck, parameter[type_]]: constant[Pretty format of type check.] if call[name[isinstance], parameter[name[type_], name[tuple]]] begin[:] variable[items] assign[=] <ast.ListComp object at 0x7da18f00d000> return[binary_operation[constant[(%s)] <ast.Mod object at 0x7da2590d6920> call[constant[, ].join, parameter[name[items]]]]]
keyword[def] identifier[_FormatTypeCheck] ( identifier[type_] ): literal[string] keyword[if] identifier[isinstance] ( identifier[type_] , identifier[tuple] ): identifier[items] =[ identifier[_FormatTypeCheck] ( identifier[t] ) keyword[for] identifier[t] keyword[in] identifier[type_] ] keyword[return] literal[string] % literal[string] . identifier[join] ( identifier[items] ) keyword[elif] identifier[hasattr] ( identifier[type_] , literal[string] ): keyword[return] identifier[type_] . identifier[__name__] keyword[else] : keyword[return] identifier[repr] ( identifier[type_] )
def _FormatTypeCheck(type_): """Pretty format of type check.""" if isinstance(type_, tuple): items = [_FormatTypeCheck(t) for t in type_] return '(%s)' % ', '.join(items) # depends on [control=['if'], data=[]] elif hasattr(type_, '__name__'): return type_.__name__ # depends on [control=['if'], data=[]] else: return repr(type_)