code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def retrieve_candidate_values(self, service, operation, param): """Retrieve server side completions. :type service: str :param service: The service name, e.g. 'ec2', 'iam'. :type operation: str :param operation: The operation name, in the casing used by the CLI (words separated by hyphens), e.g. 'describe-instances', 'delete-user'. :type param: str :param param: The param name, as specified in the service model, e.g. 'InstanceIds', 'UserName'. :rtype: list :return: A list of possible completions for the service/operation/param combination. If no completions were found an empty list is returned. """ # Example call: # service='ec2', # operation='terminate-instances', # param='InstanceIds'. if service not in self._describer_creator.services_with_completions(): return [] try: client = self._client_creator.create_client(service) except BotoCoreError as e: # create_client() could raise an exception if the session # isn't fully configured (say it's missing a region). # However, we don't want to turn off all server side # completions because it's still possible to create # clients for some services without a region, e.g. IAM. LOG.debug("Error when trying to create a client for %s", service, exc_info=True) return [] api_operation_name = client.meta.method_to_api_mapping.get( operation.replace('-', '_')) if api_operation_name is None: return [] # Now we need to convert the param name to the # casing used by the API. completer = self._describer_creator.create_completer_query(service) result = completer.describe_autocomplete( service, api_operation_name, param) if result is None: return try: response = getattr(client, xform_name(result.operation, '_'))() except Exception as e: LOG.debug("Error when calling %s.%s: %s", service, result.operation, e, exc_info=True) return results = jmespath.search(result.path, response) return results
def function[retrieve_candidate_values, parameter[self, service, operation, param]]: constant[Retrieve server side completions. :type service: str :param service: The service name, e.g. 'ec2', 'iam'. :type operation: str :param operation: The operation name, in the casing used by the CLI (words separated by hyphens), e.g. 'describe-instances', 'delete-user'. :type param: str :param param: The param name, as specified in the service model, e.g. 'InstanceIds', 'UserName'. :rtype: list :return: A list of possible completions for the service/operation/param combination. If no completions were found an empty list is returned. ] if compare[name[service] <ast.NotIn object at 0x7da2590d7190> call[name[self]._describer_creator.services_with_completions, parameter[]]] begin[:] return[list[[]]] <ast.Try object at 0x7da20c6e4e50> variable[api_operation_name] assign[=] call[name[client].meta.method_to_api_mapping.get, parameter[call[name[operation].replace, parameter[constant[-], constant[_]]]]] if compare[name[api_operation_name] is constant[None]] begin[:] return[list[[]]] variable[completer] assign[=] call[name[self]._describer_creator.create_completer_query, parameter[name[service]]] variable[result] assign[=] call[name[completer].describe_autocomplete, parameter[name[service], name[api_operation_name], name[param]]] if compare[name[result] is constant[None]] begin[:] return[None] <ast.Try object at 0x7da20c6e5780> variable[results] assign[=] call[name[jmespath].search, parameter[name[result].path, name[response]]] return[name[results]]
keyword[def] identifier[retrieve_candidate_values] ( identifier[self] , identifier[service] , identifier[operation] , identifier[param] ): literal[string] keyword[if] identifier[service] keyword[not] keyword[in] identifier[self] . identifier[_describer_creator] . identifier[services_with_completions] (): keyword[return] [] keyword[try] : identifier[client] = identifier[self] . identifier[_client_creator] . identifier[create_client] ( identifier[service] ) keyword[except] identifier[BotoCoreError] keyword[as] identifier[e] : identifier[LOG] . identifier[debug] ( literal[string] , identifier[service] , identifier[exc_info] = keyword[True] ) keyword[return] [] identifier[api_operation_name] = identifier[client] . identifier[meta] . identifier[method_to_api_mapping] . identifier[get] ( identifier[operation] . identifier[replace] ( literal[string] , literal[string] )) keyword[if] identifier[api_operation_name] keyword[is] keyword[None] : keyword[return] [] identifier[completer] = identifier[self] . identifier[_describer_creator] . identifier[create_completer_query] ( identifier[service] ) identifier[result] = identifier[completer] . identifier[describe_autocomplete] ( identifier[service] , identifier[api_operation_name] , identifier[param] ) keyword[if] identifier[result] keyword[is] keyword[None] : keyword[return] keyword[try] : identifier[response] = identifier[getattr] ( identifier[client] , identifier[xform_name] ( identifier[result] . identifier[operation] , literal[string] ))() keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[LOG] . identifier[debug] ( literal[string] , identifier[service] , identifier[result] . identifier[operation] , identifier[e] , identifier[exc_info] = keyword[True] ) keyword[return] identifier[results] = identifier[jmespath] . identifier[search] ( identifier[result] . identifier[path] , identifier[response] ) keyword[return] identifier[results]
def retrieve_candidate_values(self, service, operation, param): """Retrieve server side completions. :type service: str :param service: The service name, e.g. 'ec2', 'iam'. :type operation: str :param operation: The operation name, in the casing used by the CLI (words separated by hyphens), e.g. 'describe-instances', 'delete-user'. :type param: str :param param: The param name, as specified in the service model, e.g. 'InstanceIds', 'UserName'. :rtype: list :return: A list of possible completions for the service/operation/param combination. If no completions were found an empty list is returned. """ # Example call: # service='ec2', # operation='terminate-instances', # param='InstanceIds'. if service not in self._describer_creator.services_with_completions(): return [] # depends on [control=['if'], data=[]] try: client = self._client_creator.create_client(service) # depends on [control=['try'], data=[]] except BotoCoreError as e: # create_client() could raise an exception if the session # isn't fully configured (say it's missing a region). # However, we don't want to turn off all server side # completions because it's still possible to create # clients for some services without a region, e.g. IAM. LOG.debug('Error when trying to create a client for %s', service, exc_info=True) return [] # depends on [control=['except'], data=[]] api_operation_name = client.meta.method_to_api_mapping.get(operation.replace('-', '_')) if api_operation_name is None: return [] # depends on [control=['if'], data=[]] # Now we need to convert the param name to the # casing used by the API. completer = self._describer_creator.create_completer_query(service) result = completer.describe_autocomplete(service, api_operation_name, param) if result is None: return # depends on [control=['if'], data=[]] try: response = getattr(client, xform_name(result.operation, '_'))() # depends on [control=['try'], data=[]] except Exception as e: LOG.debug('Error when calling %s.%s: %s', service, result.operation, e, exc_info=True) return # depends on [control=['except'], data=['e']] results = jmespath.search(result.path, response) return results
def _initialize_serialization(driver_id, worker=global_worker): """Initialize the serialization library. This defines a custom serializer for object IDs and also tells ray to serialize several exception classes that we define for error handling. """ serialization_context = pyarrow.default_serialization_context() # Tell the serialization context to use the cloudpickle version that we # ship with Ray. serialization_context.set_pickle(pickle.dumps, pickle.loads) pyarrow.register_torch_serialization_handlers(serialization_context) for id_type in ray._raylet._ID_TYPES: serialization_context.register_type( id_type, "{}.{}".format(id_type.__module__, id_type.__name__), pickle=True) def actor_handle_serializer(obj): return obj._serialization_helper(True) def actor_handle_deserializer(serialized_obj): new_handle = ray.actor.ActorHandle.__new__(ray.actor.ActorHandle) new_handle._deserialization_helper(serialized_obj, True) return new_handle # We register this serializer on each worker instead of calling # register_custom_serializer from the driver so that isinstance still # works. serialization_context.register_type( ray.actor.ActorHandle, "ray.ActorHandle", pickle=False, custom_serializer=actor_handle_serializer, custom_deserializer=actor_handle_deserializer) worker.serialization_context_map[driver_id] = serialization_context # Register exception types. for error_cls in RAY_EXCEPTION_TYPES: register_custom_serializer( error_cls, use_dict=True, local=True, driver_id=driver_id, class_id=error_cls.__module__ + ". " + error_cls.__name__, ) # Tell Ray to serialize lambdas with pickle. register_custom_serializer( type(lambda: 0), use_pickle=True, local=True, driver_id=driver_id, class_id="lambda") # Tell Ray to serialize types with pickle. register_custom_serializer( type(int), use_pickle=True, local=True, driver_id=driver_id, class_id="type") # Tell Ray to serialize FunctionSignatures as dictionaries. This is # used when passing around actor handles. register_custom_serializer( ray.signature.FunctionSignature, use_dict=True, local=True, driver_id=driver_id, class_id="ray.signature.FunctionSignature")
def function[_initialize_serialization, parameter[driver_id, worker]]: constant[Initialize the serialization library. This defines a custom serializer for object IDs and also tells ray to serialize several exception classes that we define for error handling. ] variable[serialization_context] assign[=] call[name[pyarrow].default_serialization_context, parameter[]] call[name[serialization_context].set_pickle, parameter[name[pickle].dumps, name[pickle].loads]] call[name[pyarrow].register_torch_serialization_handlers, parameter[name[serialization_context]]] for taget[name[id_type]] in starred[name[ray]._raylet._ID_TYPES] begin[:] call[name[serialization_context].register_type, parameter[name[id_type], call[constant[{}.{}].format, parameter[name[id_type].__module__, name[id_type].__name__]]]] def function[actor_handle_serializer, parameter[obj]]: return[call[name[obj]._serialization_helper, parameter[constant[True]]]] def function[actor_handle_deserializer, parameter[serialized_obj]]: variable[new_handle] assign[=] call[name[ray].actor.ActorHandle.__new__, parameter[name[ray].actor.ActorHandle]] call[name[new_handle]._deserialization_helper, parameter[name[serialized_obj], constant[True]]] return[name[new_handle]] call[name[serialization_context].register_type, parameter[name[ray].actor.ActorHandle, constant[ray.ActorHandle]]] call[name[worker].serialization_context_map][name[driver_id]] assign[=] name[serialization_context] for taget[name[error_cls]] in starred[name[RAY_EXCEPTION_TYPES]] begin[:] call[name[register_custom_serializer], parameter[name[error_cls]]] call[name[register_custom_serializer], parameter[call[name[type], parameter[<ast.Lambda object at 0x7da18f00c040>]]]] call[name[register_custom_serializer], parameter[call[name[type], parameter[name[int]]]]] call[name[register_custom_serializer], parameter[name[ray].signature.FunctionSignature]]
keyword[def] identifier[_initialize_serialization] ( identifier[driver_id] , identifier[worker] = identifier[global_worker] ): literal[string] identifier[serialization_context] = identifier[pyarrow] . identifier[default_serialization_context] () identifier[serialization_context] . identifier[set_pickle] ( identifier[pickle] . identifier[dumps] , identifier[pickle] . identifier[loads] ) identifier[pyarrow] . identifier[register_torch_serialization_handlers] ( identifier[serialization_context] ) keyword[for] identifier[id_type] keyword[in] identifier[ray] . identifier[_raylet] . identifier[_ID_TYPES] : identifier[serialization_context] . identifier[register_type] ( identifier[id_type] , literal[string] . identifier[format] ( identifier[id_type] . identifier[__module__] , identifier[id_type] . identifier[__name__] ), identifier[pickle] = keyword[True] ) keyword[def] identifier[actor_handle_serializer] ( identifier[obj] ): keyword[return] identifier[obj] . identifier[_serialization_helper] ( keyword[True] ) keyword[def] identifier[actor_handle_deserializer] ( identifier[serialized_obj] ): identifier[new_handle] = identifier[ray] . identifier[actor] . identifier[ActorHandle] . identifier[__new__] ( identifier[ray] . identifier[actor] . identifier[ActorHandle] ) identifier[new_handle] . identifier[_deserialization_helper] ( identifier[serialized_obj] , keyword[True] ) keyword[return] identifier[new_handle] identifier[serialization_context] . identifier[register_type] ( identifier[ray] . identifier[actor] . identifier[ActorHandle] , literal[string] , identifier[pickle] = keyword[False] , identifier[custom_serializer] = identifier[actor_handle_serializer] , identifier[custom_deserializer] = identifier[actor_handle_deserializer] ) identifier[worker] . identifier[serialization_context_map] [ identifier[driver_id] ]= identifier[serialization_context] keyword[for] identifier[error_cls] keyword[in] identifier[RAY_EXCEPTION_TYPES] : identifier[register_custom_serializer] ( identifier[error_cls] , identifier[use_dict] = keyword[True] , identifier[local] = keyword[True] , identifier[driver_id] = identifier[driver_id] , identifier[class_id] = identifier[error_cls] . identifier[__module__] + literal[string] + identifier[error_cls] . identifier[__name__] , ) identifier[register_custom_serializer] ( identifier[type] ( keyword[lambda] : literal[int] ), identifier[use_pickle] = keyword[True] , identifier[local] = keyword[True] , identifier[driver_id] = identifier[driver_id] , identifier[class_id] = literal[string] ) identifier[register_custom_serializer] ( identifier[type] ( identifier[int] ), identifier[use_pickle] = keyword[True] , identifier[local] = keyword[True] , identifier[driver_id] = identifier[driver_id] , identifier[class_id] = literal[string] ) identifier[register_custom_serializer] ( identifier[ray] . identifier[signature] . identifier[FunctionSignature] , identifier[use_dict] = keyword[True] , identifier[local] = keyword[True] , identifier[driver_id] = identifier[driver_id] , identifier[class_id] = literal[string] )
def _initialize_serialization(driver_id, worker=global_worker): """Initialize the serialization library. This defines a custom serializer for object IDs and also tells ray to serialize several exception classes that we define for error handling. """ serialization_context = pyarrow.default_serialization_context() # Tell the serialization context to use the cloudpickle version that we # ship with Ray. serialization_context.set_pickle(pickle.dumps, pickle.loads) pyarrow.register_torch_serialization_handlers(serialization_context) for id_type in ray._raylet._ID_TYPES: serialization_context.register_type(id_type, '{}.{}'.format(id_type.__module__, id_type.__name__), pickle=True) # depends on [control=['for'], data=['id_type']] def actor_handle_serializer(obj): return obj._serialization_helper(True) def actor_handle_deserializer(serialized_obj): new_handle = ray.actor.ActorHandle.__new__(ray.actor.ActorHandle) new_handle._deserialization_helper(serialized_obj, True) return new_handle # We register this serializer on each worker instead of calling # register_custom_serializer from the driver so that isinstance still # works. serialization_context.register_type(ray.actor.ActorHandle, 'ray.ActorHandle', pickle=False, custom_serializer=actor_handle_serializer, custom_deserializer=actor_handle_deserializer) worker.serialization_context_map[driver_id] = serialization_context # Register exception types. for error_cls in RAY_EXCEPTION_TYPES: register_custom_serializer(error_cls, use_dict=True, local=True, driver_id=driver_id, class_id=error_cls.__module__ + '. ' + error_cls.__name__) # depends on [control=['for'], data=['error_cls']] # Tell Ray to serialize lambdas with pickle. register_custom_serializer(type(lambda : 0), use_pickle=True, local=True, driver_id=driver_id, class_id='lambda') # Tell Ray to serialize types with pickle. register_custom_serializer(type(int), use_pickle=True, local=True, driver_id=driver_id, class_id='type') # Tell Ray to serialize FunctionSignatures as dictionaries. This is # used when passing around actor handles. register_custom_serializer(ray.signature.FunctionSignature, use_dict=True, local=True, driver_id=driver_id, class_id='ray.signature.FunctionSignature')
def content_type(transformers, default=None): """Returns a different transformer depending on the content type passed in. If none match and no default is given no transformation takes place. should pass in a dict with the following format: {'[content-type]': transformation_action, ... } """ transformers = {content_type: auto_kwargs(transformer) if transformer else transformer for content_type, transformer in transformers.items()} default = default and auto_kwargs(default) def transform(data, request): transformer = transformers.get(request.content_type.split(';')[0], default) if not transformer: return data return transformer(data) return transform
def function[content_type, parameter[transformers, default]]: constant[Returns a different transformer depending on the content type passed in. If none match and no default is given no transformation takes place. should pass in a dict with the following format: {'[content-type]': transformation_action, ... } ] variable[transformers] assign[=] <ast.DictComp object at 0x7da204621960> variable[default] assign[=] <ast.BoolOp object at 0x7da204962fb0> def function[transform, parameter[data, request]]: variable[transformer] assign[=] call[name[transformers].get, parameter[call[call[name[request].content_type.split, parameter[constant[;]]]][constant[0]], name[default]]] if <ast.UnaryOp object at 0x7da1b1b456c0> begin[:] return[name[data]] return[call[name[transformer], parameter[name[data]]]] return[name[transform]]
keyword[def] identifier[content_type] ( identifier[transformers] , identifier[default] = keyword[None] ): literal[string] identifier[transformers] ={ identifier[content_type] : identifier[auto_kwargs] ( identifier[transformer] ) keyword[if] identifier[transformer] keyword[else] identifier[transformer] keyword[for] identifier[content_type] , identifier[transformer] keyword[in] identifier[transformers] . identifier[items] ()} identifier[default] = identifier[default] keyword[and] identifier[auto_kwargs] ( identifier[default] ) keyword[def] identifier[transform] ( identifier[data] , identifier[request] ): identifier[transformer] = identifier[transformers] . identifier[get] ( identifier[request] . identifier[content_type] . identifier[split] ( literal[string] )[ literal[int] ], identifier[default] ) keyword[if] keyword[not] identifier[transformer] : keyword[return] identifier[data] keyword[return] identifier[transformer] ( identifier[data] ) keyword[return] identifier[transform]
def content_type(transformers, default=None): """Returns a different transformer depending on the content type passed in. If none match and no default is given no transformation takes place. should pass in a dict with the following format: {'[content-type]': transformation_action, ... } """ transformers = {content_type: auto_kwargs(transformer) if transformer else transformer for (content_type, transformer) in transformers.items()} default = default and auto_kwargs(default) def transform(data, request): transformer = transformers.get(request.content_type.split(';')[0], default) if not transformer: return data # depends on [control=['if'], data=[]] return transformer(data) return transform
def download_data(self, configuration, output_file): """ Выполняет указанный в конфигурации запрос и отдает файл на скачивание :param configuration: Конфгурация запроса :param output_file: Место, куда надо скачать файл :return: """ params = configuration response = self.__app.native_api_call('metaql', 'download-data', params, self.__options, False, None, True, http_path="/api/v1/meta/") with open(output_file, 'wb') as out_file: shutil.copyfileobj(response.raw, out_file) del response
def function[download_data, parameter[self, configuration, output_file]]: constant[ Выполняет указанный в конфигурации запрос и отдает файл на скачивание :param configuration: Конфгурация запроса :param output_file: Место, куда надо скачать файл :return: ] variable[params] assign[=] name[configuration] variable[response] assign[=] call[name[self].__app.native_api_call, parameter[constant[metaql], constant[download-data], name[params], name[self].__options, constant[False], constant[None], constant[True]]] with call[name[open], parameter[name[output_file], constant[wb]]] begin[:] call[name[shutil].copyfileobj, parameter[name[response].raw, name[out_file]]] <ast.Delete object at 0x7da20c990d90>
keyword[def] identifier[download_data] ( identifier[self] , identifier[configuration] , identifier[output_file] ): literal[string] identifier[params] = identifier[configuration] identifier[response] = identifier[self] . identifier[__app] . identifier[native_api_call] ( literal[string] , literal[string] , identifier[params] , identifier[self] . identifier[__options] , keyword[False] , keyword[None] , keyword[True] , identifier[http_path] = literal[string] ) keyword[with] identifier[open] ( identifier[output_file] , literal[string] ) keyword[as] identifier[out_file] : identifier[shutil] . identifier[copyfileobj] ( identifier[response] . identifier[raw] , identifier[out_file] ) keyword[del] identifier[response]
def download_data(self, configuration, output_file): """ Выполняет указанный в конфигурации запрос и отдает файл на скачивание :param configuration: Конфгурация запроса :param output_file: Место, куда надо скачать файл :return: """ params = configuration response = self.__app.native_api_call('metaql', 'download-data', params, self.__options, False, None, True, http_path='/api/v1/meta/') with open(output_file, 'wb') as out_file: shutil.copyfileobj(response.raw, out_file) # depends on [control=['with'], data=['out_file']] del response
def qos_queue_scheduler_strict_priority_dwrr_traffic_class5(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") qos = ET.SubElement(config, "qos", xmlns="urn:brocade.com:mgmt:brocade-qos") queue = ET.SubElement(qos, "queue") scheduler = ET.SubElement(queue, "scheduler") strict_priority = ET.SubElement(scheduler, "strict-priority") dwrr_traffic_class5 = ET.SubElement(strict_priority, "dwrr-traffic-class5") dwrr_traffic_class5.text = kwargs.pop('dwrr_traffic_class5') callback = kwargs.pop('callback', self._callback) return callback(config)
def function[qos_queue_scheduler_strict_priority_dwrr_traffic_class5, parameter[self]]: constant[Auto Generated Code ] variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]] variable[qos] assign[=] call[name[ET].SubElement, parameter[name[config], constant[qos]]] variable[queue] assign[=] call[name[ET].SubElement, parameter[name[qos], constant[queue]]] variable[scheduler] assign[=] call[name[ET].SubElement, parameter[name[queue], constant[scheduler]]] variable[strict_priority] assign[=] call[name[ET].SubElement, parameter[name[scheduler], constant[strict-priority]]] variable[dwrr_traffic_class5] assign[=] call[name[ET].SubElement, parameter[name[strict_priority], constant[dwrr-traffic-class5]]] name[dwrr_traffic_class5].text assign[=] call[name[kwargs].pop, parameter[constant[dwrr_traffic_class5]]] variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]] return[call[name[callback], parameter[name[config]]]]
keyword[def] identifier[qos_queue_scheduler_strict_priority_dwrr_traffic_class5] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[config] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[qos] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] ) identifier[queue] = identifier[ET] . identifier[SubElement] ( identifier[qos] , literal[string] ) identifier[scheduler] = identifier[ET] . identifier[SubElement] ( identifier[queue] , literal[string] ) identifier[strict_priority] = identifier[ET] . identifier[SubElement] ( identifier[scheduler] , literal[string] ) identifier[dwrr_traffic_class5] = identifier[ET] . identifier[SubElement] ( identifier[strict_priority] , literal[string] ) identifier[dwrr_traffic_class5] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] ) keyword[return] identifier[callback] ( identifier[config] )
def qos_queue_scheduler_strict_priority_dwrr_traffic_class5(self, **kwargs): """Auto Generated Code """ config = ET.Element('config') qos = ET.SubElement(config, 'qos', xmlns='urn:brocade.com:mgmt:brocade-qos') queue = ET.SubElement(qos, 'queue') scheduler = ET.SubElement(queue, 'scheduler') strict_priority = ET.SubElement(scheduler, 'strict-priority') dwrr_traffic_class5 = ET.SubElement(strict_priority, 'dwrr-traffic-class5') dwrr_traffic_class5.text = kwargs.pop('dwrr_traffic_class5') callback = kwargs.pop('callback', self._callback) return callback(config)
def load_remote(url, **kwargs): """ Load a mesh at a remote URL into a local trimesh object. This must be called explicitly rather than automatically from trimesh.load to ensure users don't accidentally make network requests. Parameters ------------ url : string URL containing mesh file **kwargs : passed to `load` """ # import here to keep requirement soft import requests # download the mesh response = requests.get(url) # wrap as file object file_obj = util.wrap_as_stream(response.content) # so loaders can access textures/etc resolver = visual.resolvers.WebResolver(url) # actually load loaded = load(file_obj=file_obj, file_type=url, resolver=resolver, **kwargs) return loaded
def function[load_remote, parameter[url]]: constant[ Load a mesh at a remote URL into a local trimesh object. This must be called explicitly rather than automatically from trimesh.load to ensure users don't accidentally make network requests. Parameters ------------ url : string URL containing mesh file **kwargs : passed to `load` ] import module[requests] variable[response] assign[=] call[name[requests].get, parameter[name[url]]] variable[file_obj] assign[=] call[name[util].wrap_as_stream, parameter[name[response].content]] variable[resolver] assign[=] call[name[visual].resolvers.WebResolver, parameter[name[url]]] variable[loaded] assign[=] call[name[load], parameter[]] return[name[loaded]]
keyword[def] identifier[load_remote] ( identifier[url] ,** identifier[kwargs] ): literal[string] keyword[import] identifier[requests] identifier[response] = identifier[requests] . identifier[get] ( identifier[url] ) identifier[file_obj] = identifier[util] . identifier[wrap_as_stream] ( identifier[response] . identifier[content] ) identifier[resolver] = identifier[visual] . identifier[resolvers] . identifier[WebResolver] ( identifier[url] ) identifier[loaded] = identifier[load] ( identifier[file_obj] = identifier[file_obj] , identifier[file_type] = identifier[url] , identifier[resolver] = identifier[resolver] , ** identifier[kwargs] ) keyword[return] identifier[loaded]
def load_remote(url, **kwargs): """ Load a mesh at a remote URL into a local trimesh object. This must be called explicitly rather than automatically from trimesh.load to ensure users don't accidentally make network requests. Parameters ------------ url : string URL containing mesh file **kwargs : passed to `load` """ # import here to keep requirement soft import requests # download the mesh response = requests.get(url) # wrap as file object file_obj = util.wrap_as_stream(response.content) # so loaders can access textures/etc resolver = visual.resolvers.WebResolver(url) # actually load loaded = load(file_obj=file_obj, file_type=url, resolver=resolver, **kwargs) return loaded
def _power_mismatch_dc(self, buses, generators, B, Pbusinj, base_mva): """ Returns the power mismatch constraint (B*Va + Pg = Pd). """ nb, ng = len(buses), len(generators) # Negative bus-generator incidence matrix. gen_bus = array([g.bus._i for g in generators]) neg_Cg = csr_matrix((-ones(ng), (gen_bus, range(ng))), (nb, ng)) Amis = hstack([B, neg_Cg], format="csr") Pd = array([bus.p_demand for bus in buses]) Gs = array([bus.g_shunt for bus in buses]) bmis = -(Pd - Gs) / base_mva - Pbusinj return LinearConstraint("Pmis", Amis, bmis, bmis, ["Va", "Pg"])
def function[_power_mismatch_dc, parameter[self, buses, generators, B, Pbusinj, base_mva]]: constant[ Returns the power mismatch constraint (B*Va + Pg = Pd). ] <ast.Tuple object at 0x7da1b25194b0> assign[=] tuple[[<ast.Call object at 0x7da1b2518dc0>, <ast.Call object at 0x7da1b2519630>]] variable[gen_bus] assign[=] call[name[array], parameter[<ast.ListComp object at 0x7da1b25196c0>]] variable[neg_Cg] assign[=] call[name[csr_matrix], parameter[tuple[[<ast.UnaryOp object at 0x7da1b25183d0>, <ast.Tuple object at 0x7da1b251b0a0>]], tuple[[<ast.Name object at 0x7da1b251ada0>, <ast.Name object at 0x7da1b251abf0>]]]] variable[Amis] assign[=] call[name[hstack], parameter[list[[<ast.Name object at 0x7da1b251afb0>, <ast.Name object at 0x7da1b251af20>]]]] variable[Pd] assign[=] call[name[array], parameter[<ast.ListComp object at 0x7da1b251a770>]] variable[Gs] assign[=] call[name[array], parameter[<ast.ListComp object at 0x7da1b2518460>]] variable[bmis] assign[=] binary_operation[binary_operation[<ast.UnaryOp object at 0x7da1b25184c0> / name[base_mva]] - name[Pbusinj]] return[call[name[LinearConstraint], parameter[constant[Pmis], name[Amis], name[bmis], name[bmis], list[[<ast.Constant object at 0x7da1b2518220>, <ast.Constant object at 0x7da1b25193f0>]]]]]
keyword[def] identifier[_power_mismatch_dc] ( identifier[self] , identifier[buses] , identifier[generators] , identifier[B] , identifier[Pbusinj] , identifier[base_mva] ): literal[string] identifier[nb] , identifier[ng] = identifier[len] ( identifier[buses] ), identifier[len] ( identifier[generators] ) identifier[gen_bus] = identifier[array] ([ identifier[g] . identifier[bus] . identifier[_i] keyword[for] identifier[g] keyword[in] identifier[generators] ]) identifier[neg_Cg] = identifier[csr_matrix] ((- identifier[ones] ( identifier[ng] ),( identifier[gen_bus] , identifier[range] ( identifier[ng] ))),( identifier[nb] , identifier[ng] )) identifier[Amis] = identifier[hstack] ([ identifier[B] , identifier[neg_Cg] ], identifier[format] = literal[string] ) identifier[Pd] = identifier[array] ([ identifier[bus] . identifier[p_demand] keyword[for] identifier[bus] keyword[in] identifier[buses] ]) identifier[Gs] = identifier[array] ([ identifier[bus] . identifier[g_shunt] keyword[for] identifier[bus] keyword[in] identifier[buses] ]) identifier[bmis] =-( identifier[Pd] - identifier[Gs] )/ identifier[base_mva] - identifier[Pbusinj] keyword[return] identifier[LinearConstraint] ( literal[string] , identifier[Amis] , identifier[bmis] , identifier[bmis] ,[ literal[string] , literal[string] ])
def _power_mismatch_dc(self, buses, generators, B, Pbusinj, base_mva): """ Returns the power mismatch constraint (B*Va + Pg = Pd). """ (nb, ng) = (len(buses), len(generators)) # Negative bus-generator incidence matrix. gen_bus = array([g.bus._i for g in generators]) neg_Cg = csr_matrix((-ones(ng), (gen_bus, range(ng))), (nb, ng)) Amis = hstack([B, neg_Cg], format='csr') Pd = array([bus.p_demand for bus in buses]) Gs = array([bus.g_shunt for bus in buses]) bmis = -(Pd - Gs) / base_mva - Pbusinj return LinearConstraint('Pmis', Amis, bmis, bmis, ['Va', 'Pg'])
def merge_base_attrs(attrs): """ :param dict attrs: If one of the attrs is named "base\_", assume that attribute is an instance of SimpleModel mapped on a Postgresql composite type, and that the base\_ instance is of a superclass of this class. Expand the attributes of the base\_ type and assign to class attributes. psycopg2's type casting uses namedtuple() and that forbids a name to start with underscore, so we end it with _ instead """ base = attrs.pop('base_', None) if base: d_out("SimpleModel.merge_base_attrs: base.table={0}".format(base.table)) for name in base.table: attrs[name] = base.__dict__[name]
def function[merge_base_attrs, parameter[attrs]]: constant[ :param dict attrs: If one of the attrs is named "base\_", assume that attribute is an instance of SimpleModel mapped on a Postgresql composite type, and that the base\_ instance is of a superclass of this class. Expand the attributes of the base\_ type and assign to class attributes. psycopg2's type casting uses namedtuple() and that forbids a name to start with underscore, so we end it with _ instead ] variable[base] assign[=] call[name[attrs].pop, parameter[constant[base_], constant[None]]] if name[base] begin[:] call[name[d_out], parameter[call[constant[SimpleModel.merge_base_attrs: base.table={0}].format, parameter[name[base].table]]]] for taget[name[name]] in starred[name[base].table] begin[:] call[name[attrs]][name[name]] assign[=] call[name[base].__dict__][name[name]]
keyword[def] identifier[merge_base_attrs] ( identifier[attrs] ): literal[string] identifier[base] = identifier[attrs] . identifier[pop] ( literal[string] , keyword[None] ) keyword[if] identifier[base] : identifier[d_out] ( literal[string] . identifier[format] ( identifier[base] . identifier[table] )) keyword[for] identifier[name] keyword[in] identifier[base] . identifier[table] : identifier[attrs] [ identifier[name] ]= identifier[base] . identifier[__dict__] [ identifier[name] ]
def merge_base_attrs(attrs): """ :param dict attrs: If one of the attrs is named "base\\_", assume that attribute is an instance of SimpleModel mapped on a Postgresql composite type, and that the base\\_ instance is of a superclass of this class. Expand the attributes of the base\\_ type and assign to class attributes. psycopg2's type casting uses namedtuple() and that forbids a name to start with underscore, so we end it with _ instead """ base = attrs.pop('base_', None) if base: d_out('SimpleModel.merge_base_attrs: base.table={0}'.format(base.table)) for name in base.table: attrs[name] = base.__dict__[name] # depends on [control=['for'], data=['name']] # depends on [control=['if'], data=[]]
async def getLaunchSpecs(self, *args, **kwargs): """ Get All Launch Specifications for WorkerType This method returns a preview of all possible launch specifications that this worker type definition could submit to EC2. It is used to test worker types, nothing more **This API end-point is experimental and may be subject to change without warning.** This method gives output: ``http://schemas.taskcluster.net/aws-provisioner/v1/get-launch-specs-response.json#`` This method is ``experimental`` """ return await self._makeApiCall(self.funcinfo["getLaunchSpecs"], *args, **kwargs)
<ast.AsyncFunctionDef object at 0x7da18f722fb0>
keyword[async] keyword[def] identifier[getLaunchSpecs] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[return] keyword[await] identifier[self] . identifier[_makeApiCall] ( identifier[self] . identifier[funcinfo] [ literal[string] ],* identifier[args] ,** identifier[kwargs] )
async def getLaunchSpecs(self, *args, **kwargs): """ Get All Launch Specifications for WorkerType This method returns a preview of all possible launch specifications that this worker type definition could submit to EC2. It is used to test worker types, nothing more **This API end-point is experimental and may be subject to change without warning.** This method gives output: ``http://schemas.taskcluster.net/aws-provisioner/v1/get-launch-specs-response.json#`` This method is ``experimental`` """ return await self._makeApiCall(self.funcinfo['getLaunchSpecs'], *args, **kwargs)
def animate(self, **kwargs): """ Animates the surface. This function only animates the triangulated surface. There will be no other elements, such as control points grid or bounding box. Keyword arguments: * ``colormap``: applies colormap to the surface Colormaps are a visualization feature of Matplotlib. They can be used for several types of surface plots via the following import statement: ``from matplotlib import cm`` The following link displays the list of Matplolib colormaps and some examples on colormaps: https://matplotlib.org/tutorials/colors/colormaps.html """ # Calling parent render function super(VisSurface, self).render(**kwargs) # Colormaps surf_cmaps = kwargs.get('colormap', None) # Initialize variables tri_idxs = [] vert_coords = [] trisurf_params = [] frames = [] frames_tris = [] num_vertices = 0 # Start plotting of the surface and the control points grid fig = plt.figure(figsize=self.vconf.figure_size, dpi=self.vconf.figure_dpi) ax = Axes3D(fig) # Start plotting surf_count = 0 for plot in self._plots: # Plot evaluated points if plot['type'] == 'evalpts' and self.vconf.display_evalpts: # Use internal triangulation algorithm instead of Qhull (MPL default) verts = plot['ptsarr'][0] tris = plot['ptsarr'][1] # Extract zero-indexed vertex number list tri_idxs += [[ti + num_vertices for ti in tri.data] for tri in tris] # Extract vertex coordinates vert_coords += [vert.data for vert in verts] # Update number of vertices num_vertices = len(vert_coords) # Determine the color or the colormap of the triangulated plot params = {} if surf_cmaps: try: params['cmap'] = surf_cmaps[surf_count] surf_count += 1 except IndexError: params['color'] = plot['color'] else: params['color'] = plot['color'] trisurf_params += [params for _ in range(len(tris))] # Pre-processing for the animation pts = np.array(vert_coords, dtype=self.vconf.dtype) # Create the frames (Artists) for tidx, pidx in zip(tri_idxs, trisurf_params): frames_tris.append(tidx) # Create MPL Triangulation object triangulation = mpltri.Triangulation(pts[:, 0], pts[:, 1], triangles=frames_tris) # Use custom Triangulation object and the choice of color/colormap to plot the surface p3df = ax.plot_trisurf(triangulation, pts[:, 2], alpha=self.vconf.alpha, **pidx) # Add to frames list frames.append([p3df]) # Create MPL ArtistAnimation ani = animation.ArtistAnimation(fig, frames, interval=100, blit=True, repeat_delay=1000) # Remove axes if not self.vconf.display_axes: plt.axis('off') # Set axes equal if self.vconf.axes_equal: self.vconf.set_axes_equal(ax) # Axis labels if self.vconf.display_labels: ax.set_xlabel('x') ax.set_ylabel('y') ax.set_zlabel('z') # Process keyword arguments fig_filename = kwargs.get('fig_save_as', None) fig_display = kwargs.get('display_plot', True) # Display the plot if fig_display: plt.show() else: fig_filename = self.vconf.figure_image_filename if fig_filename is None else fig_filename # Save the figure self.vconf.save_figure_as(fig, fig_filename) # Return the figure object return fig
def function[animate, parameter[self]]: constant[ Animates the surface. This function only animates the triangulated surface. There will be no other elements, such as control points grid or bounding box. Keyword arguments: * ``colormap``: applies colormap to the surface Colormaps are a visualization feature of Matplotlib. They can be used for several types of surface plots via the following import statement: ``from matplotlib import cm`` The following link displays the list of Matplolib colormaps and some examples on colormaps: https://matplotlib.org/tutorials/colors/colormaps.html ] call[call[name[super], parameter[name[VisSurface], name[self]]].render, parameter[]] variable[surf_cmaps] assign[=] call[name[kwargs].get, parameter[constant[colormap], constant[None]]] variable[tri_idxs] assign[=] list[[]] variable[vert_coords] assign[=] list[[]] variable[trisurf_params] assign[=] list[[]] variable[frames] assign[=] list[[]] variable[frames_tris] assign[=] list[[]] variable[num_vertices] assign[=] constant[0] variable[fig] assign[=] call[name[plt].figure, parameter[]] variable[ax] assign[=] call[name[Axes3D], parameter[name[fig]]] variable[surf_count] assign[=] constant[0] for taget[name[plot]] in starred[name[self]._plots] begin[:] if <ast.BoolOp object at 0x7da1b169ba30> begin[:] variable[verts] assign[=] call[call[name[plot]][constant[ptsarr]]][constant[0]] variable[tris] assign[=] call[call[name[plot]][constant[ptsarr]]][constant[1]] <ast.AugAssign object at 0x7da1b1720610> <ast.AugAssign object at 0x7da1b1720c40> variable[num_vertices] assign[=] call[name[len], parameter[name[vert_coords]]] variable[params] assign[=] dictionary[[], []] if name[surf_cmaps] begin[:] <ast.Try object at 0x7da1b180d600> <ast.AugAssign object at 0x7da1b17b6ce0> variable[pts] assign[=] call[name[np].array, parameter[name[vert_coords]]] for taget[tuple[[<ast.Name object at 0x7da1b17b7700>, <ast.Name object at 0x7da1b17b62f0>]]] in starred[call[name[zip], parameter[name[tri_idxs], name[trisurf_params]]]] begin[:] call[name[frames_tris].append, parameter[name[tidx]]] variable[triangulation] assign[=] call[name[mpltri].Triangulation, parameter[call[name[pts]][tuple[[<ast.Slice object at 0x7da1b17b77c0>, <ast.Constant object at 0x7da1b17b5480>]]], call[name[pts]][tuple[[<ast.Slice object at 0x7da1b17b52d0>, <ast.Constant object at 0x7da1b17b4040>]]]]] variable[p3df] assign[=] call[name[ax].plot_trisurf, parameter[name[triangulation], call[name[pts]][tuple[[<ast.Slice object at 0x7da1b17b71c0>, <ast.Constant object at 0x7da1b17b7490>]]]]] call[name[frames].append, parameter[list[[<ast.Name object at 0x7da1b17b4340>]]]] variable[ani] assign[=] call[name[animation].ArtistAnimation, parameter[name[fig], name[frames]]] if <ast.UnaryOp object at 0x7da1b17b68f0> begin[:] call[name[plt].axis, parameter[constant[off]]] if name[self].vconf.axes_equal begin[:] call[name[self].vconf.set_axes_equal, parameter[name[ax]]] if name[self].vconf.display_labels begin[:] call[name[ax].set_xlabel, parameter[constant[x]]] call[name[ax].set_ylabel, parameter[constant[y]]] call[name[ax].set_zlabel, parameter[constant[z]]] variable[fig_filename] assign[=] call[name[kwargs].get, parameter[constant[fig_save_as], constant[None]]] variable[fig_display] assign[=] call[name[kwargs].get, parameter[constant[display_plot], constant[True]]] if name[fig_display] begin[:] call[name[plt].show, parameter[]] call[name[self].vconf.save_figure_as, parameter[name[fig], name[fig_filename]]] return[name[fig]]
keyword[def] identifier[animate] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[super] ( identifier[VisSurface] , identifier[self] ). identifier[render] (** identifier[kwargs] ) identifier[surf_cmaps] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] ) identifier[tri_idxs] =[] identifier[vert_coords] =[] identifier[trisurf_params] =[] identifier[frames] =[] identifier[frames_tris] =[] identifier[num_vertices] = literal[int] identifier[fig] = identifier[plt] . identifier[figure] ( identifier[figsize] = identifier[self] . identifier[vconf] . identifier[figure_size] , identifier[dpi] = identifier[self] . identifier[vconf] . identifier[figure_dpi] ) identifier[ax] = identifier[Axes3D] ( identifier[fig] ) identifier[surf_count] = literal[int] keyword[for] identifier[plot] keyword[in] identifier[self] . identifier[_plots] : keyword[if] identifier[plot] [ literal[string] ]== literal[string] keyword[and] identifier[self] . identifier[vconf] . identifier[display_evalpts] : identifier[verts] = identifier[plot] [ literal[string] ][ literal[int] ] identifier[tris] = identifier[plot] [ literal[string] ][ literal[int] ] identifier[tri_idxs] +=[[ identifier[ti] + identifier[num_vertices] keyword[for] identifier[ti] keyword[in] identifier[tri] . identifier[data] ] keyword[for] identifier[tri] keyword[in] identifier[tris] ] identifier[vert_coords] +=[ identifier[vert] . identifier[data] keyword[for] identifier[vert] keyword[in] identifier[verts] ] identifier[num_vertices] = identifier[len] ( identifier[vert_coords] ) identifier[params] ={} keyword[if] identifier[surf_cmaps] : keyword[try] : identifier[params] [ literal[string] ]= identifier[surf_cmaps] [ identifier[surf_count] ] identifier[surf_count] += literal[int] keyword[except] identifier[IndexError] : identifier[params] [ literal[string] ]= identifier[plot] [ literal[string] ] keyword[else] : identifier[params] [ literal[string] ]= identifier[plot] [ literal[string] ] identifier[trisurf_params] +=[ identifier[params] keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[len] ( identifier[tris] ))] identifier[pts] = identifier[np] . identifier[array] ( identifier[vert_coords] , identifier[dtype] = identifier[self] . identifier[vconf] . identifier[dtype] ) keyword[for] identifier[tidx] , identifier[pidx] keyword[in] identifier[zip] ( identifier[tri_idxs] , identifier[trisurf_params] ): identifier[frames_tris] . identifier[append] ( identifier[tidx] ) identifier[triangulation] = identifier[mpltri] . identifier[Triangulation] ( identifier[pts] [:, literal[int] ], identifier[pts] [:, literal[int] ], identifier[triangles] = identifier[frames_tris] ) identifier[p3df] = identifier[ax] . identifier[plot_trisurf] ( identifier[triangulation] , identifier[pts] [:, literal[int] ], identifier[alpha] = identifier[self] . identifier[vconf] . identifier[alpha] ,** identifier[pidx] ) identifier[frames] . identifier[append] ([ identifier[p3df] ]) identifier[ani] = identifier[animation] . identifier[ArtistAnimation] ( identifier[fig] , identifier[frames] , identifier[interval] = literal[int] , identifier[blit] = keyword[True] , identifier[repeat_delay] = literal[int] ) keyword[if] keyword[not] identifier[self] . identifier[vconf] . identifier[display_axes] : identifier[plt] . identifier[axis] ( literal[string] ) keyword[if] identifier[self] . identifier[vconf] . identifier[axes_equal] : identifier[self] . identifier[vconf] . identifier[set_axes_equal] ( identifier[ax] ) keyword[if] identifier[self] . identifier[vconf] . identifier[display_labels] : identifier[ax] . identifier[set_xlabel] ( literal[string] ) identifier[ax] . identifier[set_ylabel] ( literal[string] ) identifier[ax] . identifier[set_zlabel] ( literal[string] ) identifier[fig_filename] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] ) identifier[fig_display] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[True] ) keyword[if] identifier[fig_display] : identifier[plt] . identifier[show] () keyword[else] : identifier[fig_filename] = identifier[self] . identifier[vconf] . identifier[figure_image_filename] keyword[if] identifier[fig_filename] keyword[is] keyword[None] keyword[else] identifier[fig_filename] identifier[self] . identifier[vconf] . identifier[save_figure_as] ( identifier[fig] , identifier[fig_filename] ) keyword[return] identifier[fig]
def animate(self, **kwargs): """ Animates the surface. This function only animates the triangulated surface. There will be no other elements, such as control points grid or bounding box. Keyword arguments: * ``colormap``: applies colormap to the surface Colormaps are a visualization feature of Matplotlib. They can be used for several types of surface plots via the following import statement: ``from matplotlib import cm`` The following link displays the list of Matplolib colormaps and some examples on colormaps: https://matplotlib.org/tutorials/colors/colormaps.html """ # Calling parent render function super(VisSurface, self).render(**kwargs) # Colormaps surf_cmaps = kwargs.get('colormap', None) # Initialize variables tri_idxs = [] vert_coords = [] trisurf_params = [] frames = [] frames_tris = [] num_vertices = 0 # Start plotting of the surface and the control points grid fig = plt.figure(figsize=self.vconf.figure_size, dpi=self.vconf.figure_dpi) ax = Axes3D(fig) # Start plotting surf_count = 0 for plot in self._plots: # Plot evaluated points if plot['type'] == 'evalpts' and self.vconf.display_evalpts: # Use internal triangulation algorithm instead of Qhull (MPL default) verts = plot['ptsarr'][0] tris = plot['ptsarr'][1] # Extract zero-indexed vertex number list tri_idxs += [[ti + num_vertices for ti in tri.data] for tri in tris] # Extract vertex coordinates vert_coords += [vert.data for vert in verts] # Update number of vertices num_vertices = len(vert_coords) # Determine the color or the colormap of the triangulated plot params = {} if surf_cmaps: try: params['cmap'] = surf_cmaps[surf_count] surf_count += 1 # depends on [control=['try'], data=[]] except IndexError: params['color'] = plot['color'] # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] else: params['color'] = plot['color'] trisurf_params += [params for _ in range(len(tris))] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['plot']] # Pre-processing for the animation pts = np.array(vert_coords, dtype=self.vconf.dtype) # Create the frames (Artists) for (tidx, pidx) in zip(tri_idxs, trisurf_params): frames_tris.append(tidx) # Create MPL Triangulation object triangulation = mpltri.Triangulation(pts[:, 0], pts[:, 1], triangles=frames_tris) # Use custom Triangulation object and the choice of color/colormap to plot the surface p3df = ax.plot_trisurf(triangulation, pts[:, 2], alpha=self.vconf.alpha, **pidx) # Add to frames list frames.append([p3df]) # depends on [control=['for'], data=[]] # Create MPL ArtistAnimation ani = animation.ArtistAnimation(fig, frames, interval=100, blit=True, repeat_delay=1000) # Remove axes if not self.vconf.display_axes: plt.axis('off') # depends on [control=['if'], data=[]] # Set axes equal if self.vconf.axes_equal: self.vconf.set_axes_equal(ax) # depends on [control=['if'], data=[]] # Axis labels if self.vconf.display_labels: ax.set_xlabel('x') ax.set_ylabel('y') ax.set_zlabel('z') # depends on [control=['if'], data=[]] # Process keyword arguments fig_filename = kwargs.get('fig_save_as', None) fig_display = kwargs.get('display_plot', True) # Display the plot if fig_display: plt.show() # depends on [control=['if'], data=[]] else: fig_filename = self.vconf.figure_image_filename if fig_filename is None else fig_filename # Save the figure self.vconf.save_figure_as(fig, fig_filename) # Return the figure object return fig
def fetch_defense_data(self): """Lazy initialization of data necessary to execute defenses.""" if self.defenses_data_initialized: return logging.info('Fetching defense data from datastore') # init data from datastore self.submissions.init_from_datastore() self.dataset_batches.init_from_datastore() self.adv_batches.init_from_datastore() # read dataset metadata self.read_dataset_metadata() # mark as initialized self.defenses_data_initialized = True
def function[fetch_defense_data, parameter[self]]: constant[Lazy initialization of data necessary to execute defenses.] if name[self].defenses_data_initialized begin[:] return[None] call[name[logging].info, parameter[constant[Fetching defense data from datastore]]] call[name[self].submissions.init_from_datastore, parameter[]] call[name[self].dataset_batches.init_from_datastore, parameter[]] call[name[self].adv_batches.init_from_datastore, parameter[]] call[name[self].read_dataset_metadata, parameter[]] name[self].defenses_data_initialized assign[=] constant[True]
keyword[def] identifier[fetch_defense_data] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[defenses_data_initialized] : keyword[return] identifier[logging] . identifier[info] ( literal[string] ) identifier[self] . identifier[submissions] . identifier[init_from_datastore] () identifier[self] . identifier[dataset_batches] . identifier[init_from_datastore] () identifier[self] . identifier[adv_batches] . identifier[init_from_datastore] () identifier[self] . identifier[read_dataset_metadata] () identifier[self] . identifier[defenses_data_initialized] = keyword[True]
def fetch_defense_data(self): """Lazy initialization of data necessary to execute defenses.""" if self.defenses_data_initialized: return # depends on [control=['if'], data=[]] logging.info('Fetching defense data from datastore') # init data from datastore self.submissions.init_from_datastore() self.dataset_batches.init_from_datastore() self.adv_batches.init_from_datastore() # read dataset metadata self.read_dataset_metadata() # mark as initialized self.defenses_data_initialized = True
def binary_search(a, k): """ Do a binary search in an array of objects ordered by '.key' returns the largest index for which: a[i].key <= k like c++: a.upperbound(k)-- """ first, last = 0, len(a) while first < last: mid = (first + last) >> 1 if k < a[mid].key: last = mid else: first = mid + 1 return first - 1
def function[binary_search, parameter[a, k]]: constant[ Do a binary search in an array of objects ordered by '.key' returns the largest index for which: a[i].key <= k like c++: a.upperbound(k)-- ] <ast.Tuple object at 0x7da20e9b0700> assign[=] tuple[[<ast.Constant object at 0x7da20e9b0640>, <ast.Call object at 0x7da20e9b14b0>]] while compare[name[first] less[<] name[last]] begin[:] variable[mid] assign[=] binary_operation[binary_operation[name[first] + name[last]] <ast.RShift object at 0x7da2590d6a40> constant[1]] if compare[name[k] less[<] call[name[a]][name[mid]].key] begin[:] variable[last] assign[=] name[mid] return[binary_operation[name[first] - constant[1]]]
keyword[def] identifier[binary_search] ( identifier[a] , identifier[k] ): literal[string] identifier[first] , identifier[last] = literal[int] , identifier[len] ( identifier[a] ) keyword[while] identifier[first] < identifier[last] : identifier[mid] =( identifier[first] + identifier[last] )>> literal[int] keyword[if] identifier[k] < identifier[a] [ identifier[mid] ]. identifier[key] : identifier[last] = identifier[mid] keyword[else] : identifier[first] = identifier[mid] + literal[int] keyword[return] identifier[first] - literal[int]
def binary_search(a, k): """ Do a binary search in an array of objects ordered by '.key' returns the largest index for which: a[i].key <= k like c++: a.upperbound(k)-- """ (first, last) = (0, len(a)) while first < last: mid = first + last >> 1 if k < a[mid].key: last = mid # depends on [control=['if'], data=[]] else: first = mid + 1 # depends on [control=['while'], data=['first', 'last']] return first - 1
def unpack_4to8(data): """ Promote 2-bit unisgned data into 8-bit unsigned data. Args: data: Numpy array with dtype == uint8 Notes: # The process is this: # ABCDEFGH [Bits of one 4+4-bit value] # 00000000ABCDEFGH [astype(uint16)] # 0000ABCDEFGH0000 [<< 4] # 0000ABCDXXXXEFGH [bitwise 'or' of previous two lines] # 0000111100001111 [0x0F0F] # 0000ABCD0000EFGH [bitwise 'and' of previous two lines] # ABCD0000EFGH0000 [<< 4] # which effectively pads the two 4-bit values with zeros on the right # Note: This technique assumes LSB-first ordering """ tmpdata = data.astype(np.int16) # np.empty(upshape, dtype=np.int16) tmpdata = (tmpdata | (tmpdata << 4)) & 0x0F0F # tmpdata = tmpdata << 4 # Shift into high bits to avoid needing to sign extend updata = tmpdata.byteswap() return updata.view(data.dtype)
def function[unpack_4to8, parameter[data]]: constant[ Promote 2-bit unisgned data into 8-bit unsigned data. Args: data: Numpy array with dtype == uint8 Notes: # The process is this: # ABCDEFGH [Bits of one 4+4-bit value] # 00000000ABCDEFGH [astype(uint16)] # 0000ABCDEFGH0000 [<< 4] # 0000ABCDXXXXEFGH [bitwise 'or' of previous two lines] # 0000111100001111 [0x0F0F] # 0000ABCD0000EFGH [bitwise 'and' of previous two lines] # ABCD0000EFGH0000 [<< 4] # which effectively pads the two 4-bit values with zeros on the right # Note: This technique assumes LSB-first ordering ] variable[tmpdata] assign[=] call[name[data].astype, parameter[name[np].int16]] variable[tmpdata] assign[=] binary_operation[binary_operation[name[tmpdata] <ast.BitOr object at 0x7da2590d6aa0> binary_operation[name[tmpdata] <ast.LShift object at 0x7da2590d69e0> constant[4]]] <ast.BitAnd object at 0x7da2590d6b60> constant[3855]] variable[updata] assign[=] call[name[tmpdata].byteswap, parameter[]] return[call[name[updata].view, parameter[name[data].dtype]]]
keyword[def] identifier[unpack_4to8] ( identifier[data] ): literal[string] identifier[tmpdata] = identifier[data] . identifier[astype] ( identifier[np] . identifier[int16] ) identifier[tmpdata] =( identifier[tmpdata] |( identifier[tmpdata] << literal[int] ))& literal[int] identifier[updata] = identifier[tmpdata] . identifier[byteswap] () keyword[return] identifier[updata] . identifier[view] ( identifier[data] . identifier[dtype] )
def unpack_4to8(data): """ Promote 2-bit unisgned data into 8-bit unsigned data. Args: data: Numpy array with dtype == uint8 Notes: # The process is this: # ABCDEFGH [Bits of one 4+4-bit value] # 00000000ABCDEFGH [astype(uint16)] # 0000ABCDEFGH0000 [<< 4] # 0000ABCDXXXXEFGH [bitwise 'or' of previous two lines] # 0000111100001111 [0x0F0F] # 0000ABCD0000EFGH [bitwise 'and' of previous two lines] # ABCD0000EFGH0000 [<< 4] # which effectively pads the two 4-bit values with zeros on the right # Note: This technique assumes LSB-first ordering """ tmpdata = data.astype(np.int16) # np.empty(upshape, dtype=np.int16) tmpdata = (tmpdata | tmpdata << 4) & 3855 # tmpdata = tmpdata << 4 # Shift into high bits to avoid needing to sign extend updata = tmpdata.byteswap() return updata.view(data.dtype)
def hw(self, hw): """ Hardware operations """ if hw.upper() == "INIT": self._raw(HW_INIT) elif hw.upper() == "SELECT": self._raw(HW_SELECT) elif hw.upper() == "RESET": self._raw(HW_RESET) else: # DEFAULT: DOES NOTHING pass
def function[hw, parameter[self, hw]]: constant[ Hardware operations ] if compare[call[name[hw].upper, parameter[]] equal[==] constant[INIT]] begin[:] call[name[self]._raw, parameter[name[HW_INIT]]]
keyword[def] identifier[hw] ( identifier[self] , identifier[hw] ): literal[string] keyword[if] identifier[hw] . identifier[upper] ()== literal[string] : identifier[self] . identifier[_raw] ( identifier[HW_INIT] ) keyword[elif] identifier[hw] . identifier[upper] ()== literal[string] : identifier[self] . identifier[_raw] ( identifier[HW_SELECT] ) keyword[elif] identifier[hw] . identifier[upper] ()== literal[string] : identifier[self] . identifier[_raw] ( identifier[HW_RESET] ) keyword[else] : keyword[pass]
def hw(self, hw): """ Hardware operations """ if hw.upper() == 'INIT': self._raw(HW_INIT) # depends on [control=['if'], data=[]] elif hw.upper() == 'SELECT': self._raw(HW_SELECT) # depends on [control=['if'], data=[]] elif hw.upper() == 'RESET': self._raw(HW_RESET) # depends on [control=['if'], data=[]] else: # DEFAULT: DOES NOTHING pass
def run_script(scriptfile): '''run a script file''' try: f = open(scriptfile, mode='r') except Exception: return mpstate.console.writeln("Running script %s" % scriptfile) for line in f: line = line.strip() if line == "" or line.startswith('#'): continue if line.startswith('@'): line = line[1:] else: mpstate.console.writeln("-> %s" % line) process_stdin(line) f.close()
def function[run_script, parameter[scriptfile]]: constant[run a script file] <ast.Try object at 0x7da2041db7c0> call[name[mpstate].console.writeln, parameter[binary_operation[constant[Running script %s] <ast.Mod object at 0x7da2590d6920> name[scriptfile]]]] for taget[name[line]] in starred[name[f]] begin[:] variable[line] assign[=] call[name[line].strip, parameter[]] if <ast.BoolOp object at 0x7da204345510> begin[:] continue if call[name[line].startswith, parameter[constant[@]]] begin[:] variable[line] assign[=] call[name[line]][<ast.Slice object at 0x7da204346290>] call[name[process_stdin], parameter[name[line]]] call[name[f].close, parameter[]]
keyword[def] identifier[run_script] ( identifier[scriptfile] ): literal[string] keyword[try] : identifier[f] = identifier[open] ( identifier[scriptfile] , identifier[mode] = literal[string] ) keyword[except] identifier[Exception] : keyword[return] identifier[mpstate] . identifier[console] . identifier[writeln] ( literal[string] % identifier[scriptfile] ) keyword[for] identifier[line] keyword[in] identifier[f] : identifier[line] = identifier[line] . identifier[strip] () keyword[if] identifier[line] == literal[string] keyword[or] identifier[line] . identifier[startswith] ( literal[string] ): keyword[continue] keyword[if] identifier[line] . identifier[startswith] ( literal[string] ): identifier[line] = identifier[line] [ literal[int] :] keyword[else] : identifier[mpstate] . identifier[console] . identifier[writeln] ( literal[string] % identifier[line] ) identifier[process_stdin] ( identifier[line] ) identifier[f] . identifier[close] ()
def run_script(scriptfile): """run a script file""" try: f = open(scriptfile, mode='r') # depends on [control=['try'], data=[]] except Exception: return # depends on [control=['except'], data=[]] mpstate.console.writeln('Running script %s' % scriptfile) for line in f: line = line.strip() if line == '' or line.startswith('#'): continue # depends on [control=['if'], data=[]] if line.startswith('@'): line = line[1:] # depends on [control=['if'], data=[]] else: mpstate.console.writeln('-> %s' % line) process_stdin(line) # depends on [control=['for'], data=['line']] f.close()
def select_distinct_field(col, field_or_fields, filters=None): """Select distinct value or combination of values of single or multiple fields. :params fields: str or list of str. :return data: list of list. **中文文档** 选择多列中出现过的所有可能的排列组合。 """ fields = _preprocess_field_or_fields(field_or_fields) if filters is None: filters = dict() if len(fields) == 1: key = fields[0] data = list(col.find(filters).distinct(key)) return data else: pipeline = [ { "$match": filters }, { "$group": { "_id": {key: "$" + key for key in fields}, }, }, ] data = list() for doc in col.aggregate(pipeline): # doc = {"_id": {"a": 0, "b": 0}} ... data.append([doc["_id"][key] for key in fields]) return data
def function[select_distinct_field, parameter[col, field_or_fields, filters]]: constant[Select distinct value or combination of values of single or multiple fields. :params fields: str or list of str. :return data: list of list. **中文文档** 选择多列中出现过的所有可能的排列组合。 ] variable[fields] assign[=] call[name[_preprocess_field_or_fields], parameter[name[field_or_fields]]] if compare[name[filters] is constant[None]] begin[:] variable[filters] assign[=] call[name[dict], parameter[]] if compare[call[name[len], parameter[name[fields]]] equal[==] constant[1]] begin[:] variable[key] assign[=] call[name[fields]][constant[0]] variable[data] assign[=] call[name[list], parameter[call[call[name[col].find, parameter[name[filters]]].distinct, parameter[name[key]]]]] return[name[data]]
keyword[def] identifier[select_distinct_field] ( identifier[col] , identifier[field_or_fields] , identifier[filters] = keyword[None] ): literal[string] identifier[fields] = identifier[_preprocess_field_or_fields] ( identifier[field_or_fields] ) keyword[if] identifier[filters] keyword[is] keyword[None] : identifier[filters] = identifier[dict] () keyword[if] identifier[len] ( identifier[fields] )== literal[int] : identifier[key] = identifier[fields] [ literal[int] ] identifier[data] = identifier[list] ( identifier[col] . identifier[find] ( identifier[filters] ). identifier[distinct] ( identifier[key] )) keyword[return] identifier[data] keyword[else] : identifier[pipeline] =[ { literal[string] : identifier[filters] }, { literal[string] :{ literal[string] :{ identifier[key] : literal[string] + identifier[key] keyword[for] identifier[key] keyword[in] identifier[fields] }, }, }, ] identifier[data] = identifier[list] () keyword[for] identifier[doc] keyword[in] identifier[col] . identifier[aggregate] ( identifier[pipeline] ): identifier[data] . identifier[append] ([ identifier[doc] [ literal[string] ][ identifier[key] ] keyword[for] identifier[key] keyword[in] identifier[fields] ]) keyword[return] identifier[data]
def select_distinct_field(col, field_or_fields, filters=None): """Select distinct value or combination of values of single or multiple fields. :params fields: str or list of str. :return data: list of list. **中文文档** 选择多列中出现过的所有可能的排列组合。 """ fields = _preprocess_field_or_fields(field_or_fields) if filters is None: filters = dict() # depends on [control=['if'], data=['filters']] if len(fields) == 1: key = fields[0] data = list(col.find(filters).distinct(key)) return data # depends on [control=['if'], data=[]] else: pipeline = [{'$match': filters}, {'$group': {'_id': {key: '$' + key for key in fields}}}] data = list() for doc in col.aggregate(pipeline): # doc = {"_id": {"a": 0, "b": 0}} ... data.append([doc['_id'][key] for key in fields]) # depends on [control=['for'], data=['doc']] return data
def patch_wave_header(body): """Patch header to the given wave body. :param body: the wave content body, it should be bytearray. """ length = len(body) padded = length + length % 2 total = WAVE_HEADER_LENGTH + padded header = copy.copy(WAVE_HEADER) # fill the total length position header[4:8] = bytearray(struct.pack('<I', total)) header += bytearray(struct.pack('<I', length)) data = header + body # the total length is even if length != padded: data = data + bytearray([0]) return data
def function[patch_wave_header, parameter[body]]: constant[Patch header to the given wave body. :param body: the wave content body, it should be bytearray. ] variable[length] assign[=] call[name[len], parameter[name[body]]] variable[padded] assign[=] binary_operation[name[length] + binary_operation[name[length] <ast.Mod object at 0x7da2590d6920> constant[2]]] variable[total] assign[=] binary_operation[name[WAVE_HEADER_LENGTH] + name[padded]] variable[header] assign[=] call[name[copy].copy, parameter[name[WAVE_HEADER]]] call[name[header]][<ast.Slice object at 0x7da1b20bf760>] assign[=] call[name[bytearray], parameter[call[name[struct].pack, parameter[constant[<I], name[total]]]]] <ast.AugAssign object at 0x7da1b20bc430> variable[data] assign[=] binary_operation[name[header] + name[body]] if compare[name[length] not_equal[!=] name[padded]] begin[:] variable[data] assign[=] binary_operation[name[data] + call[name[bytearray], parameter[list[[<ast.Constant object at 0x7da1b20be3b0>]]]]] return[name[data]]
keyword[def] identifier[patch_wave_header] ( identifier[body] ): literal[string] identifier[length] = identifier[len] ( identifier[body] ) identifier[padded] = identifier[length] + identifier[length] % literal[int] identifier[total] = identifier[WAVE_HEADER_LENGTH] + identifier[padded] identifier[header] = identifier[copy] . identifier[copy] ( identifier[WAVE_HEADER] ) identifier[header] [ literal[int] : literal[int] ]= identifier[bytearray] ( identifier[struct] . identifier[pack] ( literal[string] , identifier[total] )) identifier[header] += identifier[bytearray] ( identifier[struct] . identifier[pack] ( literal[string] , identifier[length] )) identifier[data] = identifier[header] + identifier[body] keyword[if] identifier[length] != identifier[padded] : identifier[data] = identifier[data] + identifier[bytearray] ([ literal[int] ]) keyword[return] identifier[data]
def patch_wave_header(body): """Patch header to the given wave body. :param body: the wave content body, it should be bytearray. """ length = len(body) padded = length + length % 2 total = WAVE_HEADER_LENGTH + padded header = copy.copy(WAVE_HEADER) # fill the total length position header[4:8] = bytearray(struct.pack('<I', total)) header += bytearray(struct.pack('<I', length)) data = header + body # the total length is even if length != padded: data = data + bytearray([0]) # depends on [control=['if'], data=[]] return data
def XYZ_to_xyY(cobj, *args, **kwargs): """ Convert from XYZ to xyY. """ xyz_sum = cobj.xyz_x + cobj.xyz_y + cobj.xyz_z # avoid division by zero if xyz_sum == 0.0: xyy_x = 0.0 xyy_y = 0.0 else: xyy_x = cobj.xyz_x / xyz_sum xyy_y = cobj.xyz_y / xyz_sum xyy_Y = cobj.xyz_y return xyYColor( xyy_x, xyy_y, xyy_Y, observer=cobj.observer, illuminant=cobj.illuminant)
def function[XYZ_to_xyY, parameter[cobj]]: constant[ Convert from XYZ to xyY. ] variable[xyz_sum] assign[=] binary_operation[binary_operation[name[cobj].xyz_x + name[cobj].xyz_y] + name[cobj].xyz_z] if compare[name[xyz_sum] equal[==] constant[0.0]] begin[:] variable[xyy_x] assign[=] constant[0.0] variable[xyy_y] assign[=] constant[0.0] variable[xyy_Y] assign[=] name[cobj].xyz_y return[call[name[xyYColor], parameter[name[xyy_x], name[xyy_y], name[xyy_Y]]]]
keyword[def] identifier[XYZ_to_xyY] ( identifier[cobj] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[xyz_sum] = identifier[cobj] . identifier[xyz_x] + identifier[cobj] . identifier[xyz_y] + identifier[cobj] . identifier[xyz_z] keyword[if] identifier[xyz_sum] == literal[int] : identifier[xyy_x] = literal[int] identifier[xyy_y] = literal[int] keyword[else] : identifier[xyy_x] = identifier[cobj] . identifier[xyz_x] / identifier[xyz_sum] identifier[xyy_y] = identifier[cobj] . identifier[xyz_y] / identifier[xyz_sum] identifier[xyy_Y] = identifier[cobj] . identifier[xyz_y] keyword[return] identifier[xyYColor] ( identifier[xyy_x] , identifier[xyy_y] , identifier[xyy_Y] , identifier[observer] = identifier[cobj] . identifier[observer] , identifier[illuminant] = identifier[cobj] . identifier[illuminant] )
def XYZ_to_xyY(cobj, *args, **kwargs): """ Convert from XYZ to xyY. """ xyz_sum = cobj.xyz_x + cobj.xyz_y + cobj.xyz_z # avoid division by zero if xyz_sum == 0.0: xyy_x = 0.0 xyy_y = 0.0 # depends on [control=['if'], data=[]] else: xyy_x = cobj.xyz_x / xyz_sum xyy_y = cobj.xyz_y / xyz_sum xyy_Y = cobj.xyz_y return xyYColor(xyy_x, xyy_y, xyy_Y, observer=cobj.observer, illuminant=cobj.illuminant)
def check_duplicate_options(self, section1, section2, raise_error=False): """ Check for duplicate options in two sections, section1 and section2. Will return a list of the duplicate options. Parameters ---------- section1 : string The name of the first section to compare section2 : string The name of the second section to compare raise_error : Boolean, optional (default=False) If True, raise an error if duplicates are present. Returns ---------- duplicates : List List of duplicate options """ # Sanity checking if not self.has_section(section1): raise ValueError('Section %s not present in ConfigParser.'\ %(section1,) ) if not self.has_section(section2): raise ValueError('Section %s not present in ConfigParser.'\ %(section2,) ) items1 = self.options(section1) items2 = self.options(section2) # The list comprehension here creates a list of all duplicate items duplicates = [x for x in items1 if x in items2] if duplicates and raise_error: raise ValueError('The following options appear in both section ' +\ '%s and %s: %s' \ %(section1,section2,' '.join(duplicates))) return duplicates
def function[check_duplicate_options, parameter[self, section1, section2, raise_error]]: constant[ Check for duplicate options in two sections, section1 and section2. Will return a list of the duplicate options. Parameters ---------- section1 : string The name of the first section to compare section2 : string The name of the second section to compare raise_error : Boolean, optional (default=False) If True, raise an error if duplicates are present. Returns ---------- duplicates : List List of duplicate options ] if <ast.UnaryOp object at 0x7da1b1e73eb0> begin[:] <ast.Raise object at 0x7da1b1e73400> if <ast.UnaryOp object at 0x7da1b1e72ec0> begin[:] <ast.Raise object at 0x7da1b1e72d40> variable[items1] assign[=] call[name[self].options, parameter[name[section1]]] variable[items2] assign[=] call[name[self].options, parameter[name[section2]]] variable[duplicates] assign[=] <ast.ListComp object at 0x7da1b1e702b0> if <ast.BoolOp object at 0x7da1b1e71b10> begin[:] <ast.Raise object at 0x7da1b1e73dc0> return[name[duplicates]]
keyword[def] identifier[check_duplicate_options] ( identifier[self] , identifier[section1] , identifier[section2] , identifier[raise_error] = keyword[False] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[has_section] ( identifier[section1] ): keyword[raise] identifier[ValueError] ( literal[string] %( identifier[section1] ,)) keyword[if] keyword[not] identifier[self] . identifier[has_section] ( identifier[section2] ): keyword[raise] identifier[ValueError] ( literal[string] %( identifier[section2] ,)) identifier[items1] = identifier[self] . identifier[options] ( identifier[section1] ) identifier[items2] = identifier[self] . identifier[options] ( identifier[section2] ) identifier[duplicates] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[items1] keyword[if] identifier[x] keyword[in] identifier[items2] ] keyword[if] identifier[duplicates] keyword[and] identifier[raise_error] : keyword[raise] identifier[ValueError] ( literal[string] + literal[string] %( identifier[section1] , identifier[section2] , literal[string] . identifier[join] ( identifier[duplicates] ))) keyword[return] identifier[duplicates]
def check_duplicate_options(self, section1, section2, raise_error=False): """ Check for duplicate options in two sections, section1 and section2. Will return a list of the duplicate options. Parameters ---------- section1 : string The name of the first section to compare section2 : string The name of the second section to compare raise_error : Boolean, optional (default=False) If True, raise an error if duplicates are present. Returns ---------- duplicates : List List of duplicate options """ # Sanity checking if not self.has_section(section1): raise ValueError('Section %s not present in ConfigParser.' % (section1,)) # depends on [control=['if'], data=[]] if not self.has_section(section2): raise ValueError('Section %s not present in ConfigParser.' % (section2,)) # depends on [control=['if'], data=[]] items1 = self.options(section1) items2 = self.options(section2) # The list comprehension here creates a list of all duplicate items duplicates = [x for x in items1 if x in items2] if duplicates and raise_error: raise ValueError('The following options appear in both section ' + '%s and %s: %s' % (section1, section2, ' '.join(duplicates))) # depends on [control=['if'], data=[]] return duplicates
def resolve_reference(ref): """ Return the object pointed to by ``ref``. If ``ref`` is not a string or does not contain ``:``, it is returned as is. References must be in the form <modulename>:<varname> where <modulename> is the fully qualified module name and varname is the path to the variable inside that module. For example, "concurrent.futures:Future" would give you the :class:`~concurrent.futures.Future` class. :raises LookupError: if the reference could not be resolved """ if not isinstance(ref, str) or ':' not in ref: return ref modulename, rest = ref.split(':', 1) try: obj = import_module(modulename) except ImportError as e: raise LookupError( 'error resolving reference {}: could not import module'.format(ref)) from e try: for name in rest.split('.'): obj = getattr(obj, name) return obj except AttributeError: raise LookupError('error resolving reference {}: error looking up object'.format(ref))
def function[resolve_reference, parameter[ref]]: constant[ Return the object pointed to by ``ref``. If ``ref`` is not a string or does not contain ``:``, it is returned as is. References must be in the form <modulename>:<varname> where <modulename> is the fully qualified module name and varname is the path to the variable inside that module. For example, "concurrent.futures:Future" would give you the :class:`~concurrent.futures.Future` class. :raises LookupError: if the reference could not be resolved ] if <ast.BoolOp object at 0x7da1b26ae860> begin[:] return[name[ref]] <ast.Tuple object at 0x7da18dc045b0> assign[=] call[name[ref].split, parameter[constant[:], constant[1]]] <ast.Try object at 0x7da18dc058a0> <ast.Try object at 0x7da18dc05510>
keyword[def] identifier[resolve_reference] ( identifier[ref] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[ref] , identifier[str] ) keyword[or] literal[string] keyword[not] keyword[in] identifier[ref] : keyword[return] identifier[ref] identifier[modulename] , identifier[rest] = identifier[ref] . identifier[split] ( literal[string] , literal[int] ) keyword[try] : identifier[obj] = identifier[import_module] ( identifier[modulename] ) keyword[except] identifier[ImportError] keyword[as] identifier[e] : keyword[raise] identifier[LookupError] ( literal[string] . identifier[format] ( identifier[ref] )) keyword[from] identifier[e] keyword[try] : keyword[for] identifier[name] keyword[in] identifier[rest] . identifier[split] ( literal[string] ): identifier[obj] = identifier[getattr] ( identifier[obj] , identifier[name] ) keyword[return] identifier[obj] keyword[except] identifier[AttributeError] : keyword[raise] identifier[LookupError] ( literal[string] . identifier[format] ( identifier[ref] ))
def resolve_reference(ref): """ Return the object pointed to by ``ref``. If ``ref`` is not a string or does not contain ``:``, it is returned as is. References must be in the form <modulename>:<varname> where <modulename> is the fully qualified module name and varname is the path to the variable inside that module. For example, "concurrent.futures:Future" would give you the :class:`~concurrent.futures.Future` class. :raises LookupError: if the reference could not be resolved """ if not isinstance(ref, str) or ':' not in ref: return ref # depends on [control=['if'], data=[]] (modulename, rest) = ref.split(':', 1) try: obj = import_module(modulename) # depends on [control=['try'], data=[]] except ImportError as e: raise LookupError('error resolving reference {}: could not import module'.format(ref)) from e # depends on [control=['except'], data=['e']] try: for name in rest.split('.'): obj = getattr(obj, name) # depends on [control=['for'], data=['name']] return obj # depends on [control=['try'], data=[]] except AttributeError: raise LookupError('error resolving reference {}: error looking up object'.format(ref)) # depends on [control=['except'], data=[]]
def filter_only_significant(word): """Gets a word which removes insignificant letters at the end of the given word:: >>> pick_significant(u'넥슨(코리아)') 넥슨 >>> pick_significant(u'메이플스토리...') 메이플스토리 """ if not word: return word # Unwrap a complete parenthesis. if word.startswith(u'(') and word.endswith(u')'): return filter_only_significant(word[1:-1]) x = len(word) while x > 0: x -= 1 c = word[x] # Skip a complete parenthesis. if c == u')': m = INSIGNIFICANT_PARENTHESIS_PATTERN.search(word[:x + 1]) if m is not None: x = m.start() continue # Skip unreadable characters such as punctuations. unicode_category = unicodedata.category(c) if not SIGNIFICANT_UNICODE_CATEGORY_PATTERN.match(unicode_category): continue break return word[:x + 1]
def function[filter_only_significant, parameter[word]]: constant[Gets a word which removes insignificant letters at the end of the given word:: >>> pick_significant(u'넥슨(코리아)') 넥슨 >>> pick_significant(u'메이플스토리...') 메이플스토리 ] if <ast.UnaryOp object at 0x7da1afe8b9d0> begin[:] return[name[word]] if <ast.BoolOp object at 0x7da1afe895d0> begin[:] return[call[name[filter_only_significant], parameter[call[name[word]][<ast.Slice object at 0x7da1afe8b8b0>]]]] variable[x] assign[=] call[name[len], parameter[name[word]]] while compare[name[x] greater[>] constant[0]] begin[:] <ast.AugAssign object at 0x7da1affe70a0> variable[c] assign[=] call[name[word]][name[x]] if compare[name[c] equal[==] constant[)]] begin[:] variable[m] assign[=] call[name[INSIGNIFICANT_PARENTHESIS_PATTERN].search, parameter[call[name[word]][<ast.Slice object at 0x7da1affe5810>]]] if compare[name[m] is_not constant[None]] begin[:] variable[x] assign[=] call[name[m].start, parameter[]] continue variable[unicode_category] assign[=] call[name[unicodedata].category, parameter[name[c]]] if <ast.UnaryOp object at 0x7da1aff02770> begin[:] continue break return[call[name[word]][<ast.Slice object at 0x7da1afe70970>]]
keyword[def] identifier[filter_only_significant] ( identifier[word] ): literal[string] keyword[if] keyword[not] identifier[word] : keyword[return] identifier[word] keyword[if] identifier[word] . identifier[startswith] ( literal[string] ) keyword[and] identifier[word] . identifier[endswith] ( literal[string] ): keyword[return] identifier[filter_only_significant] ( identifier[word] [ literal[int] :- literal[int] ]) identifier[x] = identifier[len] ( identifier[word] ) keyword[while] identifier[x] > literal[int] : identifier[x] -= literal[int] identifier[c] = identifier[word] [ identifier[x] ] keyword[if] identifier[c] == literal[string] : identifier[m] = identifier[INSIGNIFICANT_PARENTHESIS_PATTERN] . identifier[search] ( identifier[word] [: identifier[x] + literal[int] ]) keyword[if] identifier[m] keyword[is] keyword[not] keyword[None] : identifier[x] = identifier[m] . identifier[start] () keyword[continue] identifier[unicode_category] = identifier[unicodedata] . identifier[category] ( identifier[c] ) keyword[if] keyword[not] identifier[SIGNIFICANT_UNICODE_CATEGORY_PATTERN] . identifier[match] ( identifier[unicode_category] ): keyword[continue] keyword[break] keyword[return] identifier[word] [: identifier[x] + literal[int] ]
def filter_only_significant(word): """Gets a word which removes insignificant letters at the end of the given word:: >>> pick_significant(u'넥슨(코리아)') 넥슨 >>> pick_significant(u'메이플스토리...') 메이플스토리 """ if not word: return word # depends on [control=['if'], data=[]] # Unwrap a complete parenthesis. if word.startswith(u'(') and word.endswith(u')'): return filter_only_significant(word[1:-1]) # depends on [control=['if'], data=[]] x = len(word) while x > 0: x -= 1 c = word[x] # Skip a complete parenthesis. if c == u')': m = INSIGNIFICANT_PARENTHESIS_PATTERN.search(word[:x + 1]) if m is not None: x = m.start() # depends on [control=['if'], data=['m']] continue # depends on [control=['if'], data=[]] # Skip unreadable characters such as punctuations. unicode_category = unicodedata.category(c) if not SIGNIFICANT_UNICODE_CATEGORY_PATTERN.match(unicode_category): continue # depends on [control=['if'], data=[]] break # depends on [control=['while'], data=['x']] return word[:x + 1]
def load_options(): """Read various proselintrc files, allowing user overrides.""" possible_defaults = ( '/etc/proselintrc', os.path.join(proselint_path, '.proselintrc'), ) options = {} has_overrides = False for filename in possible_defaults: try: options = json.load(open(filename)) break except IOError: pass try: user_options = json.load( open(os.path.join(_get_xdg_config_home(), 'proselint', 'config'))) has_overrides = True except IOError: pass # Read user configuration from the legacy path. if not has_overrides: try: user_options = json.load( open(os.path.join(os.path.expanduser('~'), '.proselintrc'))) has_overrides = True except IOError: pass if has_overrides: if 'max_errors' in user_options: options['max_errors'] = user_options['max_errors'] if 'checks' in user_options: for (key, value) in user_options['checks'].items(): try: options['checks'][key] = value except KeyError: pass return options
def function[load_options, parameter[]]: constant[Read various proselintrc files, allowing user overrides.] variable[possible_defaults] assign[=] tuple[[<ast.Constant object at 0x7da1b065ceb0>, <ast.Call object at 0x7da1b065ce80>]] variable[options] assign[=] dictionary[[], []] variable[has_overrides] assign[=] constant[False] for taget[name[filename]] in starred[name[possible_defaults]] begin[:] <ast.Try object at 0x7da1b065d1e0> <ast.Try object at 0x7da1b065d4b0> if <ast.UnaryOp object at 0x7da1b065dae0> begin[:] <ast.Try object at 0x7da1b065db40> if name[has_overrides] begin[:] if compare[constant[max_errors] in name[user_options]] begin[:] call[name[options]][constant[max_errors]] assign[=] call[name[user_options]][constant[max_errors]] if compare[constant[checks] in name[user_options]] begin[:] for taget[tuple[[<ast.Name object at 0x7da1b065e380>, <ast.Name object at 0x7da1b065e3b0>]]] in starred[call[call[name[user_options]][constant[checks]].items, parameter[]]] begin[:] <ast.Try object at 0x7da1b065e4d0> return[name[options]]
keyword[def] identifier[load_options] (): literal[string] identifier[possible_defaults] =( literal[string] , identifier[os] . identifier[path] . identifier[join] ( identifier[proselint_path] , literal[string] ), ) identifier[options] ={} identifier[has_overrides] = keyword[False] keyword[for] identifier[filename] keyword[in] identifier[possible_defaults] : keyword[try] : identifier[options] = identifier[json] . identifier[load] ( identifier[open] ( identifier[filename] )) keyword[break] keyword[except] identifier[IOError] : keyword[pass] keyword[try] : identifier[user_options] = identifier[json] . identifier[load] ( identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( identifier[_get_xdg_config_home] (), literal[string] , literal[string] ))) identifier[has_overrides] = keyword[True] keyword[except] identifier[IOError] : keyword[pass] keyword[if] keyword[not] identifier[has_overrides] : keyword[try] : identifier[user_options] = identifier[json] . identifier[load] ( identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[expanduser] ( literal[string] ), literal[string] ))) identifier[has_overrides] = keyword[True] keyword[except] identifier[IOError] : keyword[pass] keyword[if] identifier[has_overrides] : keyword[if] literal[string] keyword[in] identifier[user_options] : identifier[options] [ literal[string] ]= identifier[user_options] [ literal[string] ] keyword[if] literal[string] keyword[in] identifier[user_options] : keyword[for] ( identifier[key] , identifier[value] ) keyword[in] identifier[user_options] [ literal[string] ]. identifier[items] (): keyword[try] : identifier[options] [ literal[string] ][ identifier[key] ]= identifier[value] keyword[except] identifier[KeyError] : keyword[pass] keyword[return] identifier[options]
def load_options(): """Read various proselintrc files, allowing user overrides.""" possible_defaults = ('/etc/proselintrc', os.path.join(proselint_path, '.proselintrc')) options = {} has_overrides = False for filename in possible_defaults: try: options = json.load(open(filename)) break # depends on [control=['try'], data=[]] except IOError: pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['filename']] try: user_options = json.load(open(os.path.join(_get_xdg_config_home(), 'proselint', 'config'))) has_overrides = True # depends on [control=['try'], data=[]] except IOError: pass # depends on [control=['except'], data=[]] # Read user configuration from the legacy path. if not has_overrides: try: user_options = json.load(open(os.path.join(os.path.expanduser('~'), '.proselintrc'))) has_overrides = True # depends on [control=['try'], data=[]] except IOError: pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] if has_overrides: if 'max_errors' in user_options: options['max_errors'] = user_options['max_errors'] # depends on [control=['if'], data=['user_options']] if 'checks' in user_options: for (key, value) in user_options['checks'].items(): try: options['checks'][key] = value # depends on [control=['try'], data=[]] except KeyError: pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['user_options']] # depends on [control=['if'], data=[]] return options
def get_kerberos_subs(netid): """ Return a restclients.models.uwnetid.Subscription objects on the given uwnetid """ subs = get_netid_subscriptions(netid, Subscription.SUBS_CODE_KERBEROS) if subs is not None: for subscription in subs: if subscription.subscription_code == Subscription.SUBS_CODE_KERBEROS: return subscription return None
def function[get_kerberos_subs, parameter[netid]]: constant[ Return a restclients.models.uwnetid.Subscription objects on the given uwnetid ] variable[subs] assign[=] call[name[get_netid_subscriptions], parameter[name[netid], name[Subscription].SUBS_CODE_KERBEROS]] if compare[name[subs] is_not constant[None]] begin[:] for taget[name[subscription]] in starred[name[subs]] begin[:] if compare[name[subscription].subscription_code equal[==] name[Subscription].SUBS_CODE_KERBEROS] begin[:] return[name[subscription]] return[constant[None]]
keyword[def] identifier[get_kerberos_subs] ( identifier[netid] ): literal[string] identifier[subs] = identifier[get_netid_subscriptions] ( identifier[netid] , identifier[Subscription] . identifier[SUBS_CODE_KERBEROS] ) keyword[if] identifier[subs] keyword[is] keyword[not] keyword[None] : keyword[for] identifier[subscription] keyword[in] identifier[subs] : keyword[if] identifier[subscription] . identifier[subscription_code] == identifier[Subscription] . identifier[SUBS_CODE_KERBEROS] : keyword[return] identifier[subscription] keyword[return] keyword[None]
def get_kerberos_subs(netid): """ Return a restclients.models.uwnetid.Subscription objects on the given uwnetid """ subs = get_netid_subscriptions(netid, Subscription.SUBS_CODE_KERBEROS) if subs is not None: for subscription in subs: if subscription.subscription_code == Subscription.SUBS_CODE_KERBEROS: return subscription # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['subscription']] # depends on [control=['if'], data=['subs']] return None
def get_bad_rows_and_cols(df, validation_names, type_col_names, value_col_names, verbose=False): """ Input: validated DataFrame, all validation names, names of the type columns, names of the value columns, verbose (True or False). Output: list of rows with bad values, list of columns with bad values, list of missing (but required) columns. """ df["num"] = list(range(len(df))) problems = df[validation_names.union(["num"])] all_problems = problems.dropna(how='all', axis=0, subset=validation_names) value_problems = problems.dropna(how='all', axis=0, subset=type_col_names.union(value_col_names)) all_problems = all_problems.dropna(how='all', axis=1) value_problems = value_problems.dropna(how='all', axis=1) if not len(problems): return None, None, None # bad_cols = all_problems.columns prefixes = ["value_pass_", "type_pass_"] missing_prefix = "presence_pass_" problem_cols = [] missing_cols = [] long_missing_cols = [] problem_rows = [] for col in bad_cols: pre, stripped_col = extract_col_name(col) for prefix in prefixes: if col.startswith(prefix): problem_cols.append(stripped_col) continue if col.startswith(missing_prefix): missing_cols.append(stripped_col) long_missing_cols.append(col) if len(value_problems): bad_rows = list(zip(list(value_problems["num"]), list(value_problems.index))) else: bad_rows = [] if verbose: if bad_rows: formatted_rows = ["row: {}, name: {}".format(row[0], row[1]) for row in bad_rows] if len(bad_rows) > 5: print("-W- these rows have problems:\n", "\n".join(formatted_rows[:5]), " ...") print("(for full error output see error file)") else: print("-W- these rows have problems:", "\n".join(formatted_rows)) if problem_cols: print("-W- these columns contain bad values:", ", ".join(set(problem_cols))) if missing_cols: print("-W- these required columns are missing:", ", ".join(missing_cols)) return bad_rows, problem_cols, missing_cols
def function[get_bad_rows_and_cols, parameter[df, validation_names, type_col_names, value_col_names, verbose]]: constant[ Input: validated DataFrame, all validation names, names of the type columns, names of the value columns, verbose (True or False). Output: list of rows with bad values, list of columns with bad values, list of missing (but required) columns. ] call[name[df]][constant[num]] assign[=] call[name[list], parameter[call[name[range], parameter[call[name[len], parameter[name[df]]]]]]] variable[problems] assign[=] call[name[df]][call[name[validation_names].union, parameter[list[[<ast.Constant object at 0x7da20e954460>]]]]] variable[all_problems] assign[=] call[name[problems].dropna, parameter[]] variable[value_problems] assign[=] call[name[problems].dropna, parameter[]] variable[all_problems] assign[=] call[name[all_problems].dropna, parameter[]] variable[value_problems] assign[=] call[name[value_problems].dropna, parameter[]] if <ast.UnaryOp object at 0x7da20e955f90> begin[:] return[tuple[[<ast.Constant object at 0x7da20e9541c0>, <ast.Constant object at 0x7da20e956440>, <ast.Constant object at 0x7da20e955db0>]]] variable[bad_cols] assign[=] name[all_problems].columns variable[prefixes] assign[=] list[[<ast.Constant object at 0x7da20e9577c0>, <ast.Constant object at 0x7da20e957b50>]] variable[missing_prefix] assign[=] constant[presence_pass_] variable[problem_cols] assign[=] list[[]] variable[missing_cols] assign[=] list[[]] variable[long_missing_cols] assign[=] list[[]] variable[problem_rows] assign[=] list[[]] for taget[name[col]] in starred[name[bad_cols]] begin[:] <ast.Tuple object at 0x7da20e9577f0> assign[=] call[name[extract_col_name], parameter[name[col]]] for taget[name[prefix]] in starred[name[prefixes]] begin[:] if call[name[col].startswith, parameter[name[prefix]]] begin[:] call[name[problem_cols].append, parameter[name[stripped_col]]] continue if call[name[col].startswith, parameter[name[missing_prefix]]] begin[:] call[name[missing_cols].append, parameter[name[stripped_col]]] call[name[long_missing_cols].append, parameter[name[col]]] if call[name[len], parameter[name[value_problems]]] begin[:] variable[bad_rows] assign[=] call[name[list], parameter[call[name[zip], parameter[call[name[list], parameter[call[name[value_problems]][constant[num]]]], call[name[list], parameter[name[value_problems].index]]]]]] if name[verbose] begin[:] if name[bad_rows] begin[:] variable[formatted_rows] assign[=] <ast.ListComp object at 0x7da20e957dc0> if compare[call[name[len], parameter[name[bad_rows]]] greater[>] constant[5]] begin[:] call[name[print], parameter[constant[-W- these rows have problems: ], call[constant[ ].join, parameter[call[name[formatted_rows]][<ast.Slice object at 0x7da20e954790>]]], constant[ ...]]] call[name[print], parameter[constant[(for full error output see error file)]]] if name[problem_cols] begin[:] call[name[print], parameter[constant[-W- these columns contain bad values:], call[constant[, ].join, parameter[call[name[set], parameter[name[problem_cols]]]]]]] if name[missing_cols] begin[:] call[name[print], parameter[constant[-W- these required columns are missing:], call[constant[, ].join, parameter[name[missing_cols]]]]] return[tuple[[<ast.Name object at 0x7da20e954a60>, <ast.Name object at 0x7da20e957700>, <ast.Name object at 0x7da20e956d40>]]]
keyword[def] identifier[get_bad_rows_and_cols] ( identifier[df] , identifier[validation_names] , identifier[type_col_names] , identifier[value_col_names] , identifier[verbose] = keyword[False] ): literal[string] identifier[df] [ literal[string] ]= identifier[list] ( identifier[range] ( identifier[len] ( identifier[df] ))) identifier[problems] = identifier[df] [ identifier[validation_names] . identifier[union] ([ literal[string] ])] identifier[all_problems] = identifier[problems] . identifier[dropna] ( identifier[how] = literal[string] , identifier[axis] = literal[int] , identifier[subset] = identifier[validation_names] ) identifier[value_problems] = identifier[problems] . identifier[dropna] ( identifier[how] = literal[string] , identifier[axis] = literal[int] , identifier[subset] = identifier[type_col_names] . identifier[union] ( identifier[value_col_names] )) identifier[all_problems] = identifier[all_problems] . identifier[dropna] ( identifier[how] = literal[string] , identifier[axis] = literal[int] ) identifier[value_problems] = identifier[value_problems] . identifier[dropna] ( identifier[how] = literal[string] , identifier[axis] = literal[int] ) keyword[if] keyword[not] identifier[len] ( identifier[problems] ): keyword[return] keyword[None] , keyword[None] , keyword[None] identifier[bad_cols] = identifier[all_problems] . identifier[columns] identifier[prefixes] =[ literal[string] , literal[string] ] identifier[missing_prefix] = literal[string] identifier[problem_cols] =[] identifier[missing_cols] =[] identifier[long_missing_cols] =[] identifier[problem_rows] =[] keyword[for] identifier[col] keyword[in] identifier[bad_cols] : identifier[pre] , identifier[stripped_col] = identifier[extract_col_name] ( identifier[col] ) keyword[for] identifier[prefix] keyword[in] identifier[prefixes] : keyword[if] identifier[col] . identifier[startswith] ( identifier[prefix] ): identifier[problem_cols] . identifier[append] ( identifier[stripped_col] ) keyword[continue] keyword[if] identifier[col] . identifier[startswith] ( identifier[missing_prefix] ): identifier[missing_cols] . identifier[append] ( identifier[stripped_col] ) identifier[long_missing_cols] . identifier[append] ( identifier[col] ) keyword[if] identifier[len] ( identifier[value_problems] ): identifier[bad_rows] = identifier[list] ( identifier[zip] ( identifier[list] ( identifier[value_problems] [ literal[string] ]), identifier[list] ( identifier[value_problems] . identifier[index] ))) keyword[else] : identifier[bad_rows] =[] keyword[if] identifier[verbose] : keyword[if] identifier[bad_rows] : identifier[formatted_rows] =[ literal[string] . identifier[format] ( identifier[row] [ literal[int] ], identifier[row] [ literal[int] ]) keyword[for] identifier[row] keyword[in] identifier[bad_rows] ] keyword[if] identifier[len] ( identifier[bad_rows] )> literal[int] : identifier[print] ( literal[string] , literal[string] . identifier[join] ( identifier[formatted_rows] [: literal[int] ]), literal[string] ) identifier[print] ( literal[string] ) keyword[else] : identifier[print] ( literal[string] , literal[string] . identifier[join] ( identifier[formatted_rows] )) keyword[if] identifier[problem_cols] : identifier[print] ( literal[string] , literal[string] . identifier[join] ( identifier[set] ( identifier[problem_cols] ))) keyword[if] identifier[missing_cols] : identifier[print] ( literal[string] , literal[string] . identifier[join] ( identifier[missing_cols] )) keyword[return] identifier[bad_rows] , identifier[problem_cols] , identifier[missing_cols]
def get_bad_rows_and_cols(df, validation_names, type_col_names, value_col_names, verbose=False): """ Input: validated DataFrame, all validation names, names of the type columns, names of the value columns, verbose (True or False). Output: list of rows with bad values, list of columns with bad values, list of missing (but required) columns. """ df['num'] = list(range(len(df))) problems = df[validation_names.union(['num'])] all_problems = problems.dropna(how='all', axis=0, subset=validation_names) value_problems = problems.dropna(how='all', axis=0, subset=type_col_names.union(value_col_names)) all_problems = all_problems.dropna(how='all', axis=1) value_problems = value_problems.dropna(how='all', axis=1) if not len(problems): return (None, None, None) # depends on [control=['if'], data=[]] # bad_cols = all_problems.columns prefixes = ['value_pass_', 'type_pass_'] missing_prefix = 'presence_pass_' problem_cols = [] missing_cols = [] long_missing_cols = [] problem_rows = [] for col in bad_cols: (pre, stripped_col) = extract_col_name(col) for prefix in prefixes: if col.startswith(prefix): problem_cols.append(stripped_col) continue # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['prefix']] if col.startswith(missing_prefix): missing_cols.append(stripped_col) long_missing_cols.append(col) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['col']] if len(value_problems): bad_rows = list(zip(list(value_problems['num']), list(value_problems.index))) # depends on [control=['if'], data=[]] else: bad_rows = [] if verbose: if bad_rows: formatted_rows = ['row: {}, name: {}'.format(row[0], row[1]) for row in bad_rows] if len(bad_rows) > 5: print('-W- these rows have problems:\n', '\n'.join(formatted_rows[:5]), ' ...') print('(for full error output see error file)') # depends on [control=['if'], data=[]] else: print('-W- these rows have problems:', '\n'.join(formatted_rows)) # depends on [control=['if'], data=[]] if problem_cols: print('-W- these columns contain bad values:', ', '.join(set(problem_cols))) # depends on [control=['if'], data=[]] if missing_cols: print('-W- these required columns are missing:', ', '.join(missing_cols)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return (bad_rows, problem_cols, missing_cols)
def range(self, date1, date2): """ Generate business days between two dates, taking holidays into consideration. Args: date1 (date, datetime or str): Date start of interval. date2 (date, datetime or str): Date end of interval, not included. Note: All business days between date1 (inc) and date2 (exc) are returned, and date2 must be bigger than date1. Yields: datetime: Business days in the specified range. """ date1 = self.adjust(parsefun(date1), FOLLOWING) date2 = parsefun(date2) holidays = [] holidx = 0 if len(self.holidays): index1 = bisect.bisect_left(self.holidays, date1) index2 = bisect.bisect_left(self.holidays, date2) if index2 > index1: holidays = self.holidays[index1:index2] datewk = date1.weekday() while date1 < date2: if (holidx < len(holidays)) and (holidays[holidx] == date1): holidx += 1 else: yield date1 date1 += datetime.timedelta(days=\ self.weekdaymap[datewk].offsetnext) datewk = self.weekdaymap[datewk].nextworkday
def function[range, parameter[self, date1, date2]]: constant[ Generate business days between two dates, taking holidays into consideration. Args: date1 (date, datetime or str): Date start of interval. date2 (date, datetime or str): Date end of interval, not included. Note: All business days between date1 (inc) and date2 (exc) are returned, and date2 must be bigger than date1. Yields: datetime: Business days in the specified range. ] variable[date1] assign[=] call[name[self].adjust, parameter[call[name[parsefun], parameter[name[date1]]], name[FOLLOWING]]] variable[date2] assign[=] call[name[parsefun], parameter[name[date2]]] variable[holidays] assign[=] list[[]] variable[holidx] assign[=] constant[0] if call[name[len], parameter[name[self].holidays]] begin[:] variable[index1] assign[=] call[name[bisect].bisect_left, parameter[name[self].holidays, name[date1]]] variable[index2] assign[=] call[name[bisect].bisect_left, parameter[name[self].holidays, name[date2]]] if compare[name[index2] greater[>] name[index1]] begin[:] variable[holidays] assign[=] call[name[self].holidays][<ast.Slice object at 0x7da20c6aab30>] variable[datewk] assign[=] call[name[date1].weekday, parameter[]] while compare[name[date1] less[<] name[date2]] begin[:] if <ast.BoolOp object at 0x7da20c6ab9d0> begin[:] <ast.AugAssign object at 0x7da20c6aa920> <ast.AugAssign object at 0x7da20c6a9420> variable[datewk] assign[=] call[name[self].weekdaymap][name[datewk]].nextworkday
keyword[def] identifier[range] ( identifier[self] , identifier[date1] , identifier[date2] ): literal[string] identifier[date1] = identifier[self] . identifier[adjust] ( identifier[parsefun] ( identifier[date1] ), identifier[FOLLOWING] ) identifier[date2] = identifier[parsefun] ( identifier[date2] ) identifier[holidays] =[] identifier[holidx] = literal[int] keyword[if] identifier[len] ( identifier[self] . identifier[holidays] ): identifier[index1] = identifier[bisect] . identifier[bisect_left] ( identifier[self] . identifier[holidays] , identifier[date1] ) identifier[index2] = identifier[bisect] . identifier[bisect_left] ( identifier[self] . identifier[holidays] , identifier[date2] ) keyword[if] identifier[index2] > identifier[index1] : identifier[holidays] = identifier[self] . identifier[holidays] [ identifier[index1] : identifier[index2] ] identifier[datewk] = identifier[date1] . identifier[weekday] () keyword[while] identifier[date1] < identifier[date2] : keyword[if] ( identifier[holidx] < identifier[len] ( identifier[holidays] )) keyword[and] ( identifier[holidays] [ identifier[holidx] ]== identifier[date1] ): identifier[holidx] += literal[int] keyword[else] : keyword[yield] identifier[date1] identifier[date1] += identifier[datetime] . identifier[timedelta] ( identifier[days] = identifier[self] . identifier[weekdaymap] [ identifier[datewk] ]. identifier[offsetnext] ) identifier[datewk] = identifier[self] . identifier[weekdaymap] [ identifier[datewk] ]. identifier[nextworkday]
def range(self, date1, date2): """ Generate business days between two dates, taking holidays into consideration. Args: date1 (date, datetime or str): Date start of interval. date2 (date, datetime or str): Date end of interval, not included. Note: All business days between date1 (inc) and date2 (exc) are returned, and date2 must be bigger than date1. Yields: datetime: Business days in the specified range. """ date1 = self.adjust(parsefun(date1), FOLLOWING) date2 = parsefun(date2) holidays = [] holidx = 0 if len(self.holidays): index1 = bisect.bisect_left(self.holidays, date1) index2 = bisect.bisect_left(self.holidays, date2) if index2 > index1: holidays = self.holidays[index1:index2] # depends on [control=['if'], data=['index2', 'index1']] # depends on [control=['if'], data=[]] datewk = date1.weekday() while date1 < date2: if holidx < len(holidays) and holidays[holidx] == date1: holidx += 1 # depends on [control=['if'], data=[]] else: yield date1 date1 += datetime.timedelta(days=self.weekdaymap[datewk].offsetnext) datewk = self.weekdaymap[datewk].nextworkday # depends on [control=['while'], data=['date1']]
def get_factory_by_class(self, klass): """ Returns a custom factory for class. By default it will return the class itself. :param klass: Class type :type klass: type :return: Class factory :rtype: callable """ for check, factory in self._factories.items(): if klass is check: return factory(self, klass) for check, factory in self._factories.items(): if issubclass(klass, check): return factory(self, klass) return klass
def function[get_factory_by_class, parameter[self, klass]]: constant[ Returns a custom factory for class. By default it will return the class itself. :param klass: Class type :type klass: type :return: Class factory :rtype: callable ] for taget[tuple[[<ast.Name object at 0x7da1b15f5e40>, <ast.Name object at 0x7da1b15f6950>]]] in starred[call[name[self]._factories.items, parameter[]]] begin[:] if compare[name[klass] is name[check]] begin[:] return[call[name[factory], parameter[name[self], name[klass]]]] for taget[tuple[[<ast.Name object at 0x7da20c993d60>, <ast.Name object at 0x7da20c9908e0>]]] in starred[call[name[self]._factories.items, parameter[]]] begin[:] if call[name[issubclass], parameter[name[klass], name[check]]] begin[:] return[call[name[factory], parameter[name[self], name[klass]]]] return[name[klass]]
keyword[def] identifier[get_factory_by_class] ( identifier[self] , identifier[klass] ): literal[string] keyword[for] identifier[check] , identifier[factory] keyword[in] identifier[self] . identifier[_factories] . identifier[items] (): keyword[if] identifier[klass] keyword[is] identifier[check] : keyword[return] identifier[factory] ( identifier[self] , identifier[klass] ) keyword[for] identifier[check] , identifier[factory] keyword[in] identifier[self] . identifier[_factories] . identifier[items] (): keyword[if] identifier[issubclass] ( identifier[klass] , identifier[check] ): keyword[return] identifier[factory] ( identifier[self] , identifier[klass] ) keyword[return] identifier[klass]
def get_factory_by_class(self, klass): """ Returns a custom factory for class. By default it will return the class itself. :param klass: Class type :type klass: type :return: Class factory :rtype: callable """ for (check, factory) in self._factories.items(): if klass is check: return factory(self, klass) # depends on [control=['if'], data=['klass']] # depends on [control=['for'], data=[]] for (check, factory) in self._factories.items(): if issubclass(klass, check): return factory(self, klass) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return klass
def process_m2m_through_save(self, obj, created=False, **kwargs): """Process M2M post save for custom through model.""" # We are only interested in signals that establish relations. if not created: return self._process_m2m_through(obj, 'post_add')
def function[process_m2m_through_save, parameter[self, obj, created]]: constant[Process M2M post save for custom through model.] if <ast.UnaryOp object at 0x7da1b1993fd0> begin[:] return[None] call[name[self]._process_m2m_through, parameter[name[obj], constant[post_add]]]
keyword[def] identifier[process_m2m_through_save] ( identifier[self] , identifier[obj] , identifier[created] = keyword[False] ,** identifier[kwargs] ): literal[string] keyword[if] keyword[not] identifier[created] : keyword[return] identifier[self] . identifier[_process_m2m_through] ( identifier[obj] , literal[string] )
def process_m2m_through_save(self, obj, created=False, **kwargs): """Process M2M post save for custom through model.""" # We are only interested in signals that establish relations. if not created: return # depends on [control=['if'], data=[]] self._process_m2m_through(obj, 'post_add')
def every_match(self, callback, **kwargs): """Invoke callback every time a matching message is received. The callback will be invoked directly inside process_message so that you can guarantee that it has been called by the time process_message has returned. The callback can be removed by a call to remove_waiter(), passing the handle object returned by this call to identify it. Args: callback (callable): A callable function that will be called as callback(message) whenever a matching message is received. Returns: object: An opaque handle that can be passed to remove_waiter(). This handle is the only way to remove this callback if you no longer want it to be called. """ if len(kwargs) == 0: raise ArgumentError("You must specify at least one message field to wait on") spec = MessageSpec(**kwargs) responder = self._add_waiter(spec, callback) return (spec, responder)
def function[every_match, parameter[self, callback]]: constant[Invoke callback every time a matching message is received. The callback will be invoked directly inside process_message so that you can guarantee that it has been called by the time process_message has returned. The callback can be removed by a call to remove_waiter(), passing the handle object returned by this call to identify it. Args: callback (callable): A callable function that will be called as callback(message) whenever a matching message is received. Returns: object: An opaque handle that can be passed to remove_waiter(). This handle is the only way to remove this callback if you no longer want it to be called. ] if compare[call[name[len], parameter[name[kwargs]]] equal[==] constant[0]] begin[:] <ast.Raise object at 0x7da18f723160> variable[spec] assign[=] call[name[MessageSpec], parameter[]] variable[responder] assign[=] call[name[self]._add_waiter, parameter[name[spec], name[callback]]] return[tuple[[<ast.Name object at 0x7da20c6a99f0>, <ast.Name object at 0x7da20c6abbe0>]]]
keyword[def] identifier[every_match] ( identifier[self] , identifier[callback] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[len] ( identifier[kwargs] )== literal[int] : keyword[raise] identifier[ArgumentError] ( literal[string] ) identifier[spec] = identifier[MessageSpec] (** identifier[kwargs] ) identifier[responder] = identifier[self] . identifier[_add_waiter] ( identifier[spec] , identifier[callback] ) keyword[return] ( identifier[spec] , identifier[responder] )
def every_match(self, callback, **kwargs): """Invoke callback every time a matching message is received. The callback will be invoked directly inside process_message so that you can guarantee that it has been called by the time process_message has returned. The callback can be removed by a call to remove_waiter(), passing the handle object returned by this call to identify it. Args: callback (callable): A callable function that will be called as callback(message) whenever a matching message is received. Returns: object: An opaque handle that can be passed to remove_waiter(). This handle is the only way to remove this callback if you no longer want it to be called. """ if len(kwargs) == 0: raise ArgumentError('You must specify at least one message field to wait on') # depends on [control=['if'], data=[]] spec = MessageSpec(**kwargs) responder = self._add_waiter(spec, callback) return (spec, responder)
def signalStrength(self): """ Checks the modem's cellular network signal strength :raise CommandError: if an error occurs :return: The network signal strength as an integer between 0 and 99, or -1 if it is unknown :rtype: int """ csq = self.CSQ_REGEX.match(self.write('AT+CSQ')[0]) if csq: ss = int(csq.group(1)) return ss if ss != 99 else -1 else: raise CommandError()
def function[signalStrength, parameter[self]]: constant[ Checks the modem's cellular network signal strength :raise CommandError: if an error occurs :return: The network signal strength as an integer between 0 and 99, or -1 if it is unknown :rtype: int ] variable[csq] assign[=] call[name[self].CSQ_REGEX.match, parameter[call[call[name[self].write, parameter[constant[AT+CSQ]]]][constant[0]]]] if name[csq] begin[:] variable[ss] assign[=] call[name[int], parameter[call[name[csq].group, parameter[constant[1]]]]] return[<ast.IfExp object at 0x7da1b13a9ae0>]
keyword[def] identifier[signalStrength] ( identifier[self] ): literal[string] identifier[csq] = identifier[self] . identifier[CSQ_REGEX] . identifier[match] ( identifier[self] . identifier[write] ( literal[string] )[ literal[int] ]) keyword[if] identifier[csq] : identifier[ss] = identifier[int] ( identifier[csq] . identifier[group] ( literal[int] )) keyword[return] identifier[ss] keyword[if] identifier[ss] != literal[int] keyword[else] - literal[int] keyword[else] : keyword[raise] identifier[CommandError] ()
def signalStrength(self): """ Checks the modem's cellular network signal strength :raise CommandError: if an error occurs :return: The network signal strength as an integer between 0 and 99, or -1 if it is unknown :rtype: int """ csq = self.CSQ_REGEX.match(self.write('AT+CSQ')[0]) if csq: ss = int(csq.group(1)) return ss if ss != 99 else -1 # depends on [control=['if'], data=[]] else: raise CommandError()
def cluster_application(self, application_id): """ An application resource contains information about a particular application that was submitted to a cluster. :param str application_id: The application id :returns: API response object with JSON data :rtype: :py:class:`yarn_api_client.base.Response` """ path = '/ws/v1/cluster/apps/{appid}'.format(appid=application_id) return self.request(path)
def function[cluster_application, parameter[self, application_id]]: constant[ An application resource contains information about a particular application that was submitted to a cluster. :param str application_id: The application id :returns: API response object with JSON data :rtype: :py:class:`yarn_api_client.base.Response` ] variable[path] assign[=] call[constant[/ws/v1/cluster/apps/{appid}].format, parameter[]] return[call[name[self].request, parameter[name[path]]]]
keyword[def] identifier[cluster_application] ( identifier[self] , identifier[application_id] ): literal[string] identifier[path] = literal[string] . identifier[format] ( identifier[appid] = identifier[application_id] ) keyword[return] identifier[self] . identifier[request] ( identifier[path] )
def cluster_application(self, application_id): """ An application resource contains information about a particular application that was submitted to a cluster. :param str application_id: The application id :returns: API response object with JSON data :rtype: :py:class:`yarn_api_client.base.Response` """ path = '/ws/v1/cluster/apps/{appid}'.format(appid=application_id) return self.request(path)
def open(self, number=0): """ Open the FaderPort and register a callback so we can send and receive MIDI messages. :param number: 0 unless you've got more than one FaderPort attached. In which case 0 is the first, 1 is the second etc I only have access to a single device so I can't actually test this. """ self.inport = mido.open_input(find_faderport_input_name(number)) self.outport = mido.open_output(find_faderport_output_name(number)) self.outport.send(mido.Message.from_bytes([0x91, 0, 0x64])) # A reset message??? time.sleep(0.01) self.inport.callback = self._message_callback self.on_open()
def function[open, parameter[self, number]]: constant[ Open the FaderPort and register a callback so we can send and receive MIDI messages. :param number: 0 unless you've got more than one FaderPort attached. In which case 0 is the first, 1 is the second etc I only have access to a single device so I can't actually test this. ] name[self].inport assign[=] call[name[mido].open_input, parameter[call[name[find_faderport_input_name], parameter[name[number]]]]] name[self].outport assign[=] call[name[mido].open_output, parameter[call[name[find_faderport_output_name], parameter[name[number]]]]] call[name[self].outport.send, parameter[call[name[mido].Message.from_bytes, parameter[list[[<ast.Constant object at 0x7da18fe90bb0>, <ast.Constant object at 0x7da18fe91c90>, <ast.Constant object at 0x7da18fe93550>]]]]]] call[name[time].sleep, parameter[constant[0.01]]] name[self].inport.callback assign[=] name[self]._message_callback call[name[self].on_open, parameter[]]
keyword[def] identifier[open] ( identifier[self] , identifier[number] = literal[int] ): literal[string] identifier[self] . identifier[inport] = identifier[mido] . identifier[open_input] ( identifier[find_faderport_input_name] ( identifier[number] )) identifier[self] . identifier[outport] = identifier[mido] . identifier[open_output] ( identifier[find_faderport_output_name] ( identifier[number] )) identifier[self] . identifier[outport] . identifier[send] ( identifier[mido] . identifier[Message] . identifier[from_bytes] ([ literal[int] , literal[int] , literal[int] ])) identifier[time] . identifier[sleep] ( literal[int] ) identifier[self] . identifier[inport] . identifier[callback] = identifier[self] . identifier[_message_callback] identifier[self] . identifier[on_open] ()
def open(self, number=0): """ Open the FaderPort and register a callback so we can send and receive MIDI messages. :param number: 0 unless you've got more than one FaderPort attached. In which case 0 is the first, 1 is the second etc I only have access to a single device so I can't actually test this. """ self.inport = mido.open_input(find_faderport_input_name(number)) self.outport = mido.open_output(find_faderport_output_name(number)) self.outport.send(mido.Message.from_bytes([145, 0, 100])) # A reset message??? time.sleep(0.01) self.inport.callback = self._message_callback self.on_open()
def build(cls: Type[T], data: Generic) -> T: """Build objects from dictionaries, recursively.""" fields = fields_dict(cls) kwargs: Dict[str, Any] = {} for key, value in data.items(): if key in fields: if isinstance(value, Mapping): t = fields[key].type if issubclass(t, Auto): value = t.build(value) else: value = Auto.generate(value, name=key.title()) kwargs[key] = value else: log.debug(f"got unknown attribute {key} for {cls.__name__}") return cls(**kwargs)
def function[build, parameter[cls, data]]: constant[Build objects from dictionaries, recursively.] variable[fields] assign[=] call[name[fields_dict], parameter[name[cls]]] <ast.AnnAssign object at 0x7da18f812890> for taget[tuple[[<ast.Name object at 0x7da18f810f40>, <ast.Name object at 0x7da18f8101f0>]]] in starred[call[name[data].items, parameter[]]] begin[:] if compare[name[key] in name[fields]] begin[:] if call[name[isinstance], parameter[name[value], name[Mapping]]] begin[:] variable[t] assign[=] call[name[fields]][name[key]].type if call[name[issubclass], parameter[name[t], name[Auto]]] begin[:] variable[value] assign[=] call[name[t].build, parameter[name[value]]] call[name[kwargs]][name[key]] assign[=] name[value] return[call[name[cls], parameter[]]]
keyword[def] identifier[build] ( identifier[cls] : identifier[Type] [ identifier[T] ], identifier[data] : identifier[Generic] )-> identifier[T] : literal[string] identifier[fields] = identifier[fields_dict] ( identifier[cls] ) identifier[kwargs] : identifier[Dict] [ identifier[str] , identifier[Any] ]={} keyword[for] identifier[key] , identifier[value] keyword[in] identifier[data] . identifier[items] (): keyword[if] identifier[key] keyword[in] identifier[fields] : keyword[if] identifier[isinstance] ( identifier[value] , identifier[Mapping] ): identifier[t] = identifier[fields] [ identifier[key] ]. identifier[type] keyword[if] identifier[issubclass] ( identifier[t] , identifier[Auto] ): identifier[value] = identifier[t] . identifier[build] ( identifier[value] ) keyword[else] : identifier[value] = identifier[Auto] . identifier[generate] ( identifier[value] , identifier[name] = identifier[key] . identifier[title] ()) identifier[kwargs] [ identifier[key] ]= identifier[value] keyword[else] : identifier[log] . identifier[debug] ( literal[string] ) keyword[return] identifier[cls] (** identifier[kwargs] )
def build(cls: Type[T], data: Generic) -> T: """Build objects from dictionaries, recursively.""" fields = fields_dict(cls) kwargs: Dict[str, Any] = {} for (key, value) in data.items(): if key in fields: if isinstance(value, Mapping): t = fields[key].type if issubclass(t, Auto): value = t.build(value) # depends on [control=['if'], data=[]] else: value = Auto.generate(value, name=key.title()) # depends on [control=['if'], data=[]] kwargs[key] = value # depends on [control=['if'], data=['key', 'fields']] else: log.debug(f'got unknown attribute {key} for {cls.__name__}') # depends on [control=['for'], data=[]] return cls(**kwargs)
def _field_controller_generator(self): """ Generates the methods called by the injected controller """ # Local variable, to avoid messing with "self" stored_instance = self._ipopo_instance def get_value(self, name): # pylint: disable=W0613 """ Retrieves the controller value, from the iPOPO dictionaries :param name: The property name :return: The property value """ return stored_instance.get_controller_state(name) def set_value(self, name, new_value): # pylint: disable=W0613 """ Sets the property value and trigger an update event :param name: The property name :param new_value: The new property value """ # Get the previous value old_value = stored_instance.get_controller_state(name) if new_value != old_value: # Update the controller state stored_instance.set_controller_state(name, new_value) return new_value return get_value, set_value
def function[_field_controller_generator, parameter[self]]: constant[ Generates the methods called by the injected controller ] variable[stored_instance] assign[=] name[self]._ipopo_instance def function[get_value, parameter[self, name]]: constant[ Retrieves the controller value, from the iPOPO dictionaries :param name: The property name :return: The property value ] return[call[name[stored_instance].get_controller_state, parameter[name[name]]]] def function[set_value, parameter[self, name, new_value]]: constant[ Sets the property value and trigger an update event :param name: The property name :param new_value: The new property value ] variable[old_value] assign[=] call[name[stored_instance].get_controller_state, parameter[name[name]]] if compare[name[new_value] not_equal[!=] name[old_value]] begin[:] call[name[stored_instance].set_controller_state, parameter[name[name], name[new_value]]] return[name[new_value]] return[tuple[[<ast.Name object at 0x7da1b04d72b0>, <ast.Name object at 0x7da1b04d57e0>]]]
keyword[def] identifier[_field_controller_generator] ( identifier[self] ): literal[string] identifier[stored_instance] = identifier[self] . identifier[_ipopo_instance] keyword[def] identifier[get_value] ( identifier[self] , identifier[name] ): literal[string] keyword[return] identifier[stored_instance] . identifier[get_controller_state] ( identifier[name] ) keyword[def] identifier[set_value] ( identifier[self] , identifier[name] , identifier[new_value] ): literal[string] identifier[old_value] = identifier[stored_instance] . identifier[get_controller_state] ( identifier[name] ) keyword[if] identifier[new_value] != identifier[old_value] : identifier[stored_instance] . identifier[set_controller_state] ( identifier[name] , identifier[new_value] ) keyword[return] identifier[new_value] keyword[return] identifier[get_value] , identifier[set_value]
def _field_controller_generator(self): """ Generates the methods called by the injected controller """ # Local variable, to avoid messing with "self" stored_instance = self._ipopo_instance def get_value(self, name): # pylint: disable=W0613 '\n Retrieves the controller value, from the iPOPO dictionaries\n\n :param name: The property name\n :return: The property value\n ' return stored_instance.get_controller_state(name) def set_value(self, name, new_value): # pylint: disable=W0613 '\n Sets the property value and trigger an update event\n\n :param name: The property name\n :param new_value: The new property value\n ' # Get the previous value old_value = stored_instance.get_controller_state(name) if new_value != old_value: # Update the controller state stored_instance.set_controller_state(name, new_value) # depends on [control=['if'], data=['new_value']] return new_value return (get_value, set_value)
def addfield(self, pkt, s, val): """Add an internal value to a string""" if self.adjust(pkt, self.length_of) == 2: return s + struct.pack(self.fmt[0] + "H", val) elif self.adjust(pkt, self.length_of) == 8: return s + struct.pack(self.fmt[0] + "Q", val) else: return s
def function[addfield, parameter[self, pkt, s, val]]: constant[Add an internal value to a string] if compare[call[name[self].adjust, parameter[name[pkt], name[self].length_of]] equal[==] constant[2]] begin[:] return[binary_operation[name[s] + call[name[struct].pack, parameter[binary_operation[call[name[self].fmt][constant[0]] + constant[H]], name[val]]]]]
keyword[def] identifier[addfield] ( identifier[self] , identifier[pkt] , identifier[s] , identifier[val] ): literal[string] keyword[if] identifier[self] . identifier[adjust] ( identifier[pkt] , identifier[self] . identifier[length_of] )== literal[int] : keyword[return] identifier[s] + identifier[struct] . identifier[pack] ( identifier[self] . identifier[fmt] [ literal[int] ]+ literal[string] , identifier[val] ) keyword[elif] identifier[self] . identifier[adjust] ( identifier[pkt] , identifier[self] . identifier[length_of] )== literal[int] : keyword[return] identifier[s] + identifier[struct] . identifier[pack] ( identifier[self] . identifier[fmt] [ literal[int] ]+ literal[string] , identifier[val] ) keyword[else] : keyword[return] identifier[s]
def addfield(self, pkt, s, val): """Add an internal value to a string""" if self.adjust(pkt, self.length_of) == 2: return s + struct.pack(self.fmt[0] + 'H', val) # depends on [control=['if'], data=[]] elif self.adjust(pkt, self.length_of) == 8: return s + struct.pack(self.fmt[0] + 'Q', val) # depends on [control=['if'], data=[]] else: return s
def _read_config(correlation_id, path, parameters): """ Reads configuration from a file, parameterize it with given values and returns a new ConfigParams object. :param correlation_id: (optional) transaction id to trace execution through call chain. :param path: a path to configuration file. :param parameters: values to parameters the configuration. :return: ConfigParams configuration. """ value = YamlConfigReader(path)._read_object(correlation_id, parameters) return ConfigParams.from_value(value)
def function[_read_config, parameter[correlation_id, path, parameters]]: constant[ Reads configuration from a file, parameterize it with given values and returns a new ConfigParams object. :param correlation_id: (optional) transaction id to trace execution through call chain. :param path: a path to configuration file. :param parameters: values to parameters the configuration. :return: ConfigParams configuration. ] variable[value] assign[=] call[call[name[YamlConfigReader], parameter[name[path]]]._read_object, parameter[name[correlation_id], name[parameters]]] return[call[name[ConfigParams].from_value, parameter[name[value]]]]
keyword[def] identifier[_read_config] ( identifier[correlation_id] , identifier[path] , identifier[parameters] ): literal[string] identifier[value] = identifier[YamlConfigReader] ( identifier[path] ). identifier[_read_object] ( identifier[correlation_id] , identifier[parameters] ) keyword[return] identifier[ConfigParams] . identifier[from_value] ( identifier[value] )
def _read_config(correlation_id, path, parameters): """ Reads configuration from a file, parameterize it with given values and returns a new ConfigParams object. :param correlation_id: (optional) transaction id to trace execution through call chain. :param path: a path to configuration file. :param parameters: values to parameters the configuration. :return: ConfigParams configuration. """ value = YamlConfigReader(path)._read_object(correlation_id, parameters) return ConfigParams.from_value(value)
def add_and_shuffle(self, peer): """Push a new peer into the heap and shuffle the heap""" self.push_peer(peer) r = random.randint(0, self.size() - 1) self.swap_order(peer.index, r)
def function[add_and_shuffle, parameter[self, peer]]: constant[Push a new peer into the heap and shuffle the heap] call[name[self].push_peer, parameter[name[peer]]] variable[r] assign[=] call[name[random].randint, parameter[constant[0], binary_operation[call[name[self].size, parameter[]] - constant[1]]]] call[name[self].swap_order, parameter[name[peer].index, name[r]]]
keyword[def] identifier[add_and_shuffle] ( identifier[self] , identifier[peer] ): literal[string] identifier[self] . identifier[push_peer] ( identifier[peer] ) identifier[r] = identifier[random] . identifier[randint] ( literal[int] , identifier[self] . identifier[size] ()- literal[int] ) identifier[self] . identifier[swap_order] ( identifier[peer] . identifier[index] , identifier[r] )
def add_and_shuffle(self, peer): """Push a new peer into the heap and shuffle the heap""" self.push_peer(peer) r = random.randint(0, self.size() - 1) self.swap_order(peer.index, r)
def printAggregateJobStats(self, properties, childNumber): """Prints a job's ID, log file, remaining tries, and other properties.""" for job in self.jobsToReport: lf = lambda x: "%s:%s" % (x, str(x in properties)) print("\t".join(("JOB:%s" % job, "LOG_FILE:%s" % job.logJobStoreFileID, "TRYS_REMAINING:%i" % job.remainingRetryCount, "CHILD_NUMBER:%s" % childNumber, lf("READY_TO_RUN"), lf("IS_ZOMBIE"), lf("HAS_SERVICES"), lf("IS_SERVICE"))))
def function[printAggregateJobStats, parameter[self, properties, childNumber]]: constant[Prints a job's ID, log file, remaining tries, and other properties.] for taget[name[job]] in starred[name[self].jobsToReport] begin[:] variable[lf] assign[=] <ast.Lambda object at 0x7da18fe90af0> call[name[print], parameter[call[constant[ ].join, parameter[tuple[[<ast.BinOp object at 0x7da20c992560>, <ast.BinOp object at 0x7da20c990670>, <ast.BinOp object at 0x7da20c9921a0>, <ast.BinOp object at 0x7da20c9937f0>, <ast.Call object at 0x7da20c993460>, <ast.Call object at 0x7da20c992b60>, <ast.Call object at 0x7da20c992920>, <ast.Call object at 0x7da20c9901f0>]]]]]]
keyword[def] identifier[printAggregateJobStats] ( identifier[self] , identifier[properties] , identifier[childNumber] ): literal[string] keyword[for] identifier[job] keyword[in] identifier[self] . identifier[jobsToReport] : identifier[lf] = keyword[lambda] identifier[x] : literal[string] %( identifier[x] , identifier[str] ( identifier[x] keyword[in] identifier[properties] )) identifier[print] ( literal[string] . identifier[join] (( literal[string] % identifier[job] , literal[string] % identifier[job] . identifier[logJobStoreFileID] , literal[string] % identifier[job] . identifier[remainingRetryCount] , literal[string] % identifier[childNumber] , identifier[lf] ( literal[string] ), identifier[lf] ( literal[string] ), identifier[lf] ( literal[string] ), identifier[lf] ( literal[string] ))))
def printAggregateJobStats(self, properties, childNumber): """Prints a job's ID, log file, remaining tries, and other properties.""" for job in self.jobsToReport: lf = lambda x: '%s:%s' % (x, str(x in properties)) print('\t'.join(('JOB:%s' % job, 'LOG_FILE:%s' % job.logJobStoreFileID, 'TRYS_REMAINING:%i' % job.remainingRetryCount, 'CHILD_NUMBER:%s' % childNumber, lf('READY_TO_RUN'), lf('IS_ZOMBIE'), lf('HAS_SERVICES'), lf('IS_SERVICE')))) # depends on [control=['for'], data=['job']]
def fit(self, X, y=None, **kwargs): """ Fits the corpus to the appropriate tag map. Text documents must be tokenized & tagged before passing to fit. Parameters ---------- X : list or generator Should be provided as a list of documents or a generator that yields a list of documents that contain a list of sentences that contain (token, tag) tuples. y : ndarray or Series of length n An optional array of target values that are ignored by the visualizer. kwargs : dict Pass generic arguments to the drawing method Returns ------- self : instance Returns the instance of the transformer/visualizer """ # TODO: add support for other tagsets? if self.tagset == "penn_treebank": self.pos_tag_counts_ = self._penn_tag_map() self._handle_treebank(X) elif self.tagset == "universal": self.pos_tag_counts_ = self._uni_tag_map() self._handle_universal(X) self.draw() return self
def function[fit, parameter[self, X, y]]: constant[ Fits the corpus to the appropriate tag map. Text documents must be tokenized & tagged before passing to fit. Parameters ---------- X : list or generator Should be provided as a list of documents or a generator that yields a list of documents that contain a list of sentences that contain (token, tag) tuples. y : ndarray or Series of length n An optional array of target values that are ignored by the visualizer. kwargs : dict Pass generic arguments to the drawing method Returns ------- self : instance Returns the instance of the transformer/visualizer ] if compare[name[self].tagset equal[==] constant[penn_treebank]] begin[:] name[self].pos_tag_counts_ assign[=] call[name[self]._penn_tag_map, parameter[]] call[name[self]._handle_treebank, parameter[name[X]]] call[name[self].draw, parameter[]] return[name[self]]
keyword[def] identifier[fit] ( identifier[self] , identifier[X] , identifier[y] = keyword[None] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[self] . identifier[tagset] == literal[string] : identifier[self] . identifier[pos_tag_counts_] = identifier[self] . identifier[_penn_tag_map] () identifier[self] . identifier[_handle_treebank] ( identifier[X] ) keyword[elif] identifier[self] . identifier[tagset] == literal[string] : identifier[self] . identifier[pos_tag_counts_] = identifier[self] . identifier[_uni_tag_map] () identifier[self] . identifier[_handle_universal] ( identifier[X] ) identifier[self] . identifier[draw] () keyword[return] identifier[self]
def fit(self, X, y=None, **kwargs): """ Fits the corpus to the appropriate tag map. Text documents must be tokenized & tagged before passing to fit. Parameters ---------- X : list or generator Should be provided as a list of documents or a generator that yields a list of documents that contain a list of sentences that contain (token, tag) tuples. y : ndarray or Series of length n An optional array of target values that are ignored by the visualizer. kwargs : dict Pass generic arguments to the drawing method Returns ------- self : instance Returns the instance of the transformer/visualizer """ # TODO: add support for other tagsets? if self.tagset == 'penn_treebank': self.pos_tag_counts_ = self._penn_tag_map() self._handle_treebank(X) # depends on [control=['if'], data=[]] elif self.tagset == 'universal': self.pos_tag_counts_ = self._uni_tag_map() self._handle_universal(X) # depends on [control=['if'], data=[]] self.draw() return self
def pairwise(iterable): "s -> (s0,s1), (s1,s2), (s2, s3), ..." now, nxt = tee(iterable) next(nxt, None) return izip(now, nxt)
def function[pairwise, parameter[iterable]]: constant[s -> (s0,s1), (s1,s2), (s2, s3), ...] <ast.Tuple object at 0x7da1b05f0880> assign[=] call[name[tee], parameter[name[iterable]]] call[name[next], parameter[name[nxt], constant[None]]] return[call[name[izip], parameter[name[now], name[nxt]]]]
keyword[def] identifier[pairwise] ( identifier[iterable] ): literal[string] identifier[now] , identifier[nxt] = identifier[tee] ( identifier[iterable] ) identifier[next] ( identifier[nxt] , keyword[None] ) keyword[return] identifier[izip] ( identifier[now] , identifier[nxt] )
def pairwise(iterable): """s -> (s0,s1), (s1,s2), (s2, s3), ...""" (now, nxt) = tee(iterable) next(nxt, None) return izip(now, nxt)
def weld_str_upper(array): """Convert values to uppercase. Parameters ---------- array : numpy.ndarray or WeldObject Input data. Returns ------- WeldObject Representation of this computation. """ obj_id, weld_obj = create_weld_object(array) weld_template = """map( {array}, |e: vec[i8]| result( for(e, appender[i8], |c: appender[i8], j: i64, f: i8| if(f > 96c && f < 123c, merge(c, f - 32c), merge(c, f)) ) ) )""" weld_obj.weld_code = weld_template.format(array=obj_id) return weld_obj
def function[weld_str_upper, parameter[array]]: constant[Convert values to uppercase. Parameters ---------- array : numpy.ndarray or WeldObject Input data. Returns ------- WeldObject Representation of this computation. ] <ast.Tuple object at 0x7da1b0a71510> assign[=] call[name[create_weld_object], parameter[name[array]]] variable[weld_template] assign[=] constant[map( {array}, |e: vec[i8]| result( for(e, appender[i8], |c: appender[i8], j: i64, f: i8| if(f > 96c && f < 123c, merge(c, f - 32c), merge(c, f)) ) ) )] name[weld_obj].weld_code assign[=] call[name[weld_template].format, parameter[]] return[name[weld_obj]]
keyword[def] identifier[weld_str_upper] ( identifier[array] ): literal[string] identifier[obj_id] , identifier[weld_obj] = identifier[create_weld_object] ( identifier[array] ) identifier[weld_template] = literal[string] identifier[weld_obj] . identifier[weld_code] = identifier[weld_template] . identifier[format] ( identifier[array] = identifier[obj_id] ) keyword[return] identifier[weld_obj]
def weld_str_upper(array): """Convert values to uppercase. Parameters ---------- array : numpy.ndarray or WeldObject Input data. Returns ------- WeldObject Representation of this computation. """ (obj_id, weld_obj) = create_weld_object(array) weld_template = 'map(\n {array},\n |e: vec[i8]|\n result(\n for(e,\n appender[i8],\n |c: appender[i8], j: i64, f: i8|\n if(f > 96c && f < 123c,\n merge(c, f - 32c),\n merge(c, f))\n )\n )\n)' weld_obj.weld_code = weld_template.format(array=obj_id) return weld_obj
def delimit_words(string: str) -> Generator[str, None, None]: """ Delimit a string at word boundaries. :: >>> import uqbar.strings >>> list(uqbar.strings.delimit_words("i want to believe")) ['i', 'want', 'to', 'believe'] :: >>> list(uqbar.strings.delimit_words("S3Bucket")) ['S3', 'Bucket'] :: >>> list(uqbar.strings.delimit_words("Route53")) ['Route', '53'] """ # TODO: Reimplement this wordlike_characters = ("<", ">", "!") current_word = "" for i, character in enumerate(string): if ( not character.isalpha() and not character.isdigit() and character not in wordlike_characters ): if current_word: yield current_word current_word = "" elif not current_word: current_word += character elif character.isupper(): if current_word[-1].isupper(): current_word += character else: yield current_word current_word = character elif character.islower(): if current_word[-1].isalpha(): current_word += character else: yield current_word current_word = character elif character.isdigit(): if current_word[-1].isdigit() or current_word[-1].isupper(): current_word += character else: yield current_word current_word = character elif character in wordlike_characters: if current_word[-1] in wordlike_characters: current_word += character else: yield current_word current_word = character if current_word: yield current_word
def function[delimit_words, parameter[string]]: constant[ Delimit a string at word boundaries. :: >>> import uqbar.strings >>> list(uqbar.strings.delimit_words("i want to believe")) ['i', 'want', 'to', 'believe'] :: >>> list(uqbar.strings.delimit_words("S3Bucket")) ['S3', 'Bucket'] :: >>> list(uqbar.strings.delimit_words("Route53")) ['Route', '53'] ] variable[wordlike_characters] assign[=] tuple[[<ast.Constant object at 0x7da204566e60>, <ast.Constant object at 0x7da204564220>, <ast.Constant object at 0x7da204566650>]] variable[current_word] assign[=] constant[] for taget[tuple[[<ast.Name object at 0x7da2045649a0>, <ast.Name object at 0x7da204565ba0>]]] in starred[call[name[enumerate], parameter[name[string]]]] begin[:] if <ast.BoolOp object at 0x7da204564370> begin[:] if name[current_word] begin[:] <ast.Yield object at 0x7da2045663b0> variable[current_word] assign[=] constant[] if name[current_word] begin[:] <ast.Yield object at 0x7da2045650f0>
keyword[def] identifier[delimit_words] ( identifier[string] : identifier[str] )-> identifier[Generator] [ identifier[str] , keyword[None] , keyword[None] ]: literal[string] identifier[wordlike_characters] =( literal[string] , literal[string] , literal[string] ) identifier[current_word] = literal[string] keyword[for] identifier[i] , identifier[character] keyword[in] identifier[enumerate] ( identifier[string] ): keyword[if] ( keyword[not] identifier[character] . identifier[isalpha] () keyword[and] keyword[not] identifier[character] . identifier[isdigit] () keyword[and] identifier[character] keyword[not] keyword[in] identifier[wordlike_characters] ): keyword[if] identifier[current_word] : keyword[yield] identifier[current_word] identifier[current_word] = literal[string] keyword[elif] keyword[not] identifier[current_word] : identifier[current_word] += identifier[character] keyword[elif] identifier[character] . identifier[isupper] (): keyword[if] identifier[current_word] [- literal[int] ]. identifier[isupper] (): identifier[current_word] += identifier[character] keyword[else] : keyword[yield] identifier[current_word] identifier[current_word] = identifier[character] keyword[elif] identifier[character] . identifier[islower] (): keyword[if] identifier[current_word] [- literal[int] ]. identifier[isalpha] (): identifier[current_word] += identifier[character] keyword[else] : keyword[yield] identifier[current_word] identifier[current_word] = identifier[character] keyword[elif] identifier[character] . identifier[isdigit] (): keyword[if] identifier[current_word] [- literal[int] ]. identifier[isdigit] () keyword[or] identifier[current_word] [- literal[int] ]. identifier[isupper] (): identifier[current_word] += identifier[character] keyword[else] : keyword[yield] identifier[current_word] identifier[current_word] = identifier[character] keyword[elif] identifier[character] keyword[in] identifier[wordlike_characters] : keyword[if] identifier[current_word] [- literal[int] ] keyword[in] identifier[wordlike_characters] : identifier[current_word] += identifier[character] keyword[else] : keyword[yield] identifier[current_word] identifier[current_word] = identifier[character] keyword[if] identifier[current_word] : keyword[yield] identifier[current_word]
def delimit_words(string: str) -> Generator[str, None, None]: """ Delimit a string at word boundaries. :: >>> import uqbar.strings >>> list(uqbar.strings.delimit_words("i want to believe")) ['i', 'want', 'to', 'believe'] :: >>> list(uqbar.strings.delimit_words("S3Bucket")) ['S3', 'Bucket'] :: >>> list(uqbar.strings.delimit_words("Route53")) ['Route', '53'] """ # TODO: Reimplement this wordlike_characters = ('<', '>', '!') current_word = '' for (i, character) in enumerate(string): if not character.isalpha() and (not character.isdigit()) and (character not in wordlike_characters): if current_word: yield current_word current_word = '' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif not current_word: current_word += character # depends on [control=['if'], data=[]] elif character.isupper(): if current_word[-1].isupper(): current_word += character # depends on [control=['if'], data=[]] else: yield current_word current_word = character # depends on [control=['if'], data=[]] elif character.islower(): if current_word[-1].isalpha(): current_word += character # depends on [control=['if'], data=[]] else: yield current_word current_word = character # depends on [control=['if'], data=[]] elif character.isdigit(): if current_word[-1].isdigit() or current_word[-1].isupper(): current_word += character # depends on [control=['if'], data=[]] else: yield current_word current_word = character # depends on [control=['if'], data=[]] elif character in wordlike_characters: if current_word[-1] in wordlike_characters: current_word += character # depends on [control=['if'], data=[]] else: yield current_word current_word = character # depends on [control=['if'], data=['character', 'wordlike_characters']] # depends on [control=['for'], data=[]] if current_word: yield current_word # depends on [control=['if'], data=[]]
def generate_password_hash(password, digestmod='sha256', salt_length=8): """ Hash a password with given method and salt length. """ salt = ''.join(random.sample(SALT_CHARS, salt_length)) signature = create_signature(salt, password, digestmod=digestmod) return '$'.join((digestmod, salt, signature))
def function[generate_password_hash, parameter[password, digestmod, salt_length]]: constant[ Hash a password with given method and salt length. ] variable[salt] assign[=] call[constant[].join, parameter[call[name[random].sample, parameter[name[SALT_CHARS], name[salt_length]]]]] variable[signature] assign[=] call[name[create_signature], parameter[name[salt], name[password]]] return[call[constant[$].join, parameter[tuple[[<ast.Name object at 0x7da18ede7dc0>, <ast.Name object at 0x7da18ede40a0>, <ast.Name object at 0x7da18ede5390>]]]]]
keyword[def] identifier[generate_password_hash] ( identifier[password] , identifier[digestmod] = literal[string] , identifier[salt_length] = literal[int] ): literal[string] identifier[salt] = literal[string] . identifier[join] ( identifier[random] . identifier[sample] ( identifier[SALT_CHARS] , identifier[salt_length] )) identifier[signature] = identifier[create_signature] ( identifier[salt] , identifier[password] , identifier[digestmod] = identifier[digestmod] ) keyword[return] literal[string] . identifier[join] (( identifier[digestmod] , identifier[salt] , identifier[signature] ))
def generate_password_hash(password, digestmod='sha256', salt_length=8): """ Hash a password with given method and salt length. """ salt = ''.join(random.sample(SALT_CHARS, salt_length)) signature = create_signature(salt, password, digestmod=digestmod) return '$'.join((digestmod, salt, signature))
def visit_copy_command(element, compiler, **kw): """ Returns the actual sql query for the CopyCommand class. """ qs = """COPY {table}{columns} FROM :data_location WITH CREDENTIALS AS :credentials {format} {parameters}""" parameters = [] bindparams = [ sa.bindparam( 'data_location', value=element.data_location, type_=sa.String, ), sa.bindparam( 'credentials', value=element.credentials, type_=sa.String, ), ] if element.format == Format.csv: format_ = 'FORMAT AS CSV' if element.quote is not None: format_ += ' QUOTE AS :quote_character' bindparams.append(sa.bindparam( 'quote_character', value=element.quote, type_=sa.String, )) elif element.format == Format.json: format_ = 'FORMAT AS JSON AS :json_option' bindparams.append(sa.bindparam( 'json_option', value=element.path_file, type_=sa.String, )) elif element.format == Format.avro: format_ = 'FORMAT AS AVRO AS :avro_option' bindparams.append(sa.bindparam( 'avro_option', value=element.path_file, type_=sa.String, )) elif element.format == Format.orc: format_ = 'FORMAT AS ORC' elif element.format == Format.parquet: format_ = 'FORMAT AS PARQUET' elif element.format == Format.fixed_width and element.fixed_width is None: raise sa_exc.CompileError( "'fixed_width' argument required for format 'FIXEDWIDTH'.") else: format_ = '' if element.delimiter is not None: parameters.append('DELIMITER AS :delimiter_char') bindparams.append(sa.bindparam( 'delimiter_char', value=element.delimiter, type_=sa.String, )) if element.fixed_width is not None: parameters.append('FIXEDWIDTH AS :fixedwidth_spec') bindparams.append(sa.bindparam( 'fixedwidth_spec', value=_process_fixed_width(element.fixed_width), type_=sa.String, )) if element.compression is not None: parameters.append(Compression(element.compression).value) if element.manifest: parameters.append('MANIFEST') if element.accept_any_date: parameters.append('ACCEPTANYDATE') if element.accept_inv_chars is not None: parameters.append('ACCEPTINVCHARS AS :replacement_char') bindparams.append(sa.bindparam( 'replacement_char', value=element.accept_inv_chars, type_=sa.String )) if element.blanks_as_null: parameters.append('BLANKSASNULL') if element.date_format is not None: parameters.append('DATEFORMAT AS :dateformat_string') bindparams.append(sa.bindparam( 'dateformat_string', value=element.date_format, type_=sa.String, )) if element.empty_as_null: parameters.append('EMPTYASNULL') if element.encoding is not None: parameters.append('ENCODING AS ' + Encoding(element.encoding).value) if element.escape: parameters.append('ESCAPE') if element.explicit_ids: parameters.append('EXPLICIT_IDS') if element.fill_record: parameters.append('FILLRECORD') if element.ignore_blank_lines: parameters.append('IGNOREBLANKLINES') if element.ignore_header is not None: parameters.append('IGNOREHEADER AS :number_rows') bindparams.append(sa.bindparam( 'number_rows', value=element.ignore_header, type_=sa.Integer, )) if element.dangerous_null_delimiter is not None: parameters.append("NULL AS '%s'" % element.dangerous_null_delimiter) if element.remove_quotes: parameters.append('REMOVEQUOTES') if element.roundec: parameters.append('ROUNDEC') if element.time_format is not None: parameters.append('TIMEFORMAT AS :timeformat_string') bindparams.append(sa.bindparam( 'timeformat_string', value=element.time_format, type_=sa.String, )) if element.trim_blanks: parameters.append('TRIMBLANKS') if element.truncate_columns: parameters.append('TRUNCATECOLUMNS') if element.comp_rows: parameters.append('COMPROWS :numrows') bindparams.append(sa.bindparam( 'numrows', value=element.comp_rows, type_=sa.Integer, )) if element.comp_update: parameters.append('COMPUPDATE ON') elif element.comp_update is not None: parameters.append('COMPUPDATE OFF') if element.max_error is not None: parameters.append('MAXERROR AS :error_count') bindparams.append(sa.bindparam( 'error_count', value=element.max_error, type_=sa.Integer, )) if element.no_load: parameters.append('NOLOAD') if element.stat_update: parameters.append('STATUPDATE ON') elif element.stat_update is not None: parameters.append('STATUPDATE OFF') if element.region is not None: parameters.append('REGION :region') bindparams.append(sa.bindparam( 'region', value=element.region, type_=sa.String )) columns = ' (%s)' % ', '.join( compiler.preparer.format_column(column) for column in element.columns ) if element.columns else '' qs = qs.format( table=compiler.preparer.format_table(element.table), columns=columns, format=format_, parameters='\n'.join(parameters) ) return compiler.process(sa.text(qs).bindparams(*bindparams), **kw)
def function[visit_copy_command, parameter[element, compiler]]: constant[ Returns the actual sql query for the CopyCommand class. ] variable[qs] assign[=] constant[COPY {table}{columns} FROM :data_location WITH CREDENTIALS AS :credentials {format} {parameters}] variable[parameters] assign[=] list[[]] variable[bindparams] assign[=] list[[<ast.Call object at 0x7da18c4ce860>, <ast.Call object at 0x7da18c4cfe20>]] if compare[name[element].format equal[==] name[Format].csv] begin[:] variable[format_] assign[=] constant[FORMAT AS CSV] if compare[name[element].quote is_not constant[None]] begin[:] <ast.AugAssign object at 0x7da18fe92050> call[name[bindparams].append, parameter[call[name[sa].bindparam, parameter[constant[quote_character]]]]] if compare[name[element].delimiter is_not constant[None]] begin[:] call[name[parameters].append, parameter[constant[DELIMITER AS :delimiter_char]]] call[name[bindparams].append, parameter[call[name[sa].bindparam, parameter[constant[delimiter_char]]]]] if compare[name[element].fixed_width is_not constant[None]] begin[:] call[name[parameters].append, parameter[constant[FIXEDWIDTH AS :fixedwidth_spec]]] call[name[bindparams].append, parameter[call[name[sa].bindparam, parameter[constant[fixedwidth_spec]]]]] if compare[name[element].compression is_not constant[None]] begin[:] call[name[parameters].append, parameter[call[name[Compression], parameter[name[element].compression]].value]] if name[element].manifest begin[:] call[name[parameters].append, parameter[constant[MANIFEST]]] if name[element].accept_any_date begin[:] call[name[parameters].append, parameter[constant[ACCEPTANYDATE]]] if compare[name[element].accept_inv_chars is_not constant[None]] begin[:] call[name[parameters].append, parameter[constant[ACCEPTINVCHARS AS :replacement_char]]] call[name[bindparams].append, parameter[call[name[sa].bindparam, parameter[constant[replacement_char]]]]] if name[element].blanks_as_null begin[:] call[name[parameters].append, parameter[constant[BLANKSASNULL]]] if compare[name[element].date_format is_not constant[None]] begin[:] call[name[parameters].append, parameter[constant[DATEFORMAT AS :dateformat_string]]] call[name[bindparams].append, parameter[call[name[sa].bindparam, parameter[constant[dateformat_string]]]]] if name[element].empty_as_null begin[:] call[name[parameters].append, parameter[constant[EMPTYASNULL]]] if compare[name[element].encoding is_not constant[None]] begin[:] call[name[parameters].append, parameter[binary_operation[constant[ENCODING AS ] + call[name[Encoding], parameter[name[element].encoding]].value]]] if name[element].escape begin[:] call[name[parameters].append, parameter[constant[ESCAPE]]] if name[element].explicit_ids begin[:] call[name[parameters].append, parameter[constant[EXPLICIT_IDS]]] if name[element].fill_record begin[:] call[name[parameters].append, parameter[constant[FILLRECORD]]] if name[element].ignore_blank_lines begin[:] call[name[parameters].append, parameter[constant[IGNOREBLANKLINES]]] if compare[name[element].ignore_header is_not constant[None]] begin[:] call[name[parameters].append, parameter[constant[IGNOREHEADER AS :number_rows]]] call[name[bindparams].append, parameter[call[name[sa].bindparam, parameter[constant[number_rows]]]]] if compare[name[element].dangerous_null_delimiter is_not constant[None]] begin[:] call[name[parameters].append, parameter[binary_operation[constant[NULL AS '%s'] <ast.Mod object at 0x7da2590d6920> name[element].dangerous_null_delimiter]]] if name[element].remove_quotes begin[:] call[name[parameters].append, parameter[constant[REMOVEQUOTES]]] if name[element].roundec begin[:] call[name[parameters].append, parameter[constant[ROUNDEC]]] if compare[name[element].time_format is_not constant[None]] begin[:] call[name[parameters].append, parameter[constant[TIMEFORMAT AS :timeformat_string]]] call[name[bindparams].append, parameter[call[name[sa].bindparam, parameter[constant[timeformat_string]]]]] if name[element].trim_blanks begin[:] call[name[parameters].append, parameter[constant[TRIMBLANKS]]] if name[element].truncate_columns begin[:] call[name[parameters].append, parameter[constant[TRUNCATECOLUMNS]]] if name[element].comp_rows begin[:] call[name[parameters].append, parameter[constant[COMPROWS :numrows]]] call[name[bindparams].append, parameter[call[name[sa].bindparam, parameter[constant[numrows]]]]] if name[element].comp_update begin[:] call[name[parameters].append, parameter[constant[COMPUPDATE ON]]] if compare[name[element].max_error is_not constant[None]] begin[:] call[name[parameters].append, parameter[constant[MAXERROR AS :error_count]]] call[name[bindparams].append, parameter[call[name[sa].bindparam, parameter[constant[error_count]]]]] if name[element].no_load begin[:] call[name[parameters].append, parameter[constant[NOLOAD]]] if name[element].stat_update begin[:] call[name[parameters].append, parameter[constant[STATUPDATE ON]]] if compare[name[element].region is_not constant[None]] begin[:] call[name[parameters].append, parameter[constant[REGION :region]]] call[name[bindparams].append, parameter[call[name[sa].bindparam, parameter[constant[region]]]]] variable[columns] assign[=] <ast.IfExp object at 0x7da2041d87c0> variable[qs] assign[=] call[name[qs].format, parameter[]] return[call[name[compiler].process, parameter[call[call[name[sa].text, parameter[name[qs]]].bindparams, parameter[<ast.Starred object at 0x7da2041d9d20>]]]]]
keyword[def] identifier[visit_copy_command] ( identifier[element] , identifier[compiler] ,** identifier[kw] ): literal[string] identifier[qs] = literal[string] identifier[parameters] =[] identifier[bindparams] =[ identifier[sa] . identifier[bindparam] ( literal[string] , identifier[value] = identifier[element] . identifier[data_location] , identifier[type_] = identifier[sa] . identifier[String] , ), identifier[sa] . identifier[bindparam] ( literal[string] , identifier[value] = identifier[element] . identifier[credentials] , identifier[type_] = identifier[sa] . identifier[String] , ), ] keyword[if] identifier[element] . identifier[format] == identifier[Format] . identifier[csv] : identifier[format_] = literal[string] keyword[if] identifier[element] . identifier[quote] keyword[is] keyword[not] keyword[None] : identifier[format_] += literal[string] identifier[bindparams] . identifier[append] ( identifier[sa] . identifier[bindparam] ( literal[string] , identifier[value] = identifier[element] . identifier[quote] , identifier[type_] = identifier[sa] . identifier[String] , )) keyword[elif] identifier[element] . identifier[format] == identifier[Format] . identifier[json] : identifier[format_] = literal[string] identifier[bindparams] . identifier[append] ( identifier[sa] . identifier[bindparam] ( literal[string] , identifier[value] = identifier[element] . identifier[path_file] , identifier[type_] = identifier[sa] . identifier[String] , )) keyword[elif] identifier[element] . identifier[format] == identifier[Format] . identifier[avro] : identifier[format_] = literal[string] identifier[bindparams] . identifier[append] ( identifier[sa] . identifier[bindparam] ( literal[string] , identifier[value] = identifier[element] . identifier[path_file] , identifier[type_] = identifier[sa] . identifier[String] , )) keyword[elif] identifier[element] . identifier[format] == identifier[Format] . identifier[orc] : identifier[format_] = literal[string] keyword[elif] identifier[element] . identifier[format] == identifier[Format] . identifier[parquet] : identifier[format_] = literal[string] keyword[elif] identifier[element] . identifier[format] == identifier[Format] . identifier[fixed_width] keyword[and] identifier[element] . identifier[fixed_width] keyword[is] keyword[None] : keyword[raise] identifier[sa_exc] . identifier[CompileError] ( literal[string] ) keyword[else] : identifier[format_] = literal[string] keyword[if] identifier[element] . identifier[delimiter] keyword[is] keyword[not] keyword[None] : identifier[parameters] . identifier[append] ( literal[string] ) identifier[bindparams] . identifier[append] ( identifier[sa] . identifier[bindparam] ( literal[string] , identifier[value] = identifier[element] . identifier[delimiter] , identifier[type_] = identifier[sa] . identifier[String] , )) keyword[if] identifier[element] . identifier[fixed_width] keyword[is] keyword[not] keyword[None] : identifier[parameters] . identifier[append] ( literal[string] ) identifier[bindparams] . identifier[append] ( identifier[sa] . identifier[bindparam] ( literal[string] , identifier[value] = identifier[_process_fixed_width] ( identifier[element] . identifier[fixed_width] ), identifier[type_] = identifier[sa] . identifier[String] , )) keyword[if] identifier[element] . identifier[compression] keyword[is] keyword[not] keyword[None] : identifier[parameters] . identifier[append] ( identifier[Compression] ( identifier[element] . identifier[compression] ). identifier[value] ) keyword[if] identifier[element] . identifier[manifest] : identifier[parameters] . identifier[append] ( literal[string] ) keyword[if] identifier[element] . identifier[accept_any_date] : identifier[parameters] . identifier[append] ( literal[string] ) keyword[if] identifier[element] . identifier[accept_inv_chars] keyword[is] keyword[not] keyword[None] : identifier[parameters] . identifier[append] ( literal[string] ) identifier[bindparams] . identifier[append] ( identifier[sa] . identifier[bindparam] ( literal[string] , identifier[value] = identifier[element] . identifier[accept_inv_chars] , identifier[type_] = identifier[sa] . identifier[String] )) keyword[if] identifier[element] . identifier[blanks_as_null] : identifier[parameters] . identifier[append] ( literal[string] ) keyword[if] identifier[element] . identifier[date_format] keyword[is] keyword[not] keyword[None] : identifier[parameters] . identifier[append] ( literal[string] ) identifier[bindparams] . identifier[append] ( identifier[sa] . identifier[bindparam] ( literal[string] , identifier[value] = identifier[element] . identifier[date_format] , identifier[type_] = identifier[sa] . identifier[String] , )) keyword[if] identifier[element] . identifier[empty_as_null] : identifier[parameters] . identifier[append] ( literal[string] ) keyword[if] identifier[element] . identifier[encoding] keyword[is] keyword[not] keyword[None] : identifier[parameters] . identifier[append] ( literal[string] + identifier[Encoding] ( identifier[element] . identifier[encoding] ). identifier[value] ) keyword[if] identifier[element] . identifier[escape] : identifier[parameters] . identifier[append] ( literal[string] ) keyword[if] identifier[element] . identifier[explicit_ids] : identifier[parameters] . identifier[append] ( literal[string] ) keyword[if] identifier[element] . identifier[fill_record] : identifier[parameters] . identifier[append] ( literal[string] ) keyword[if] identifier[element] . identifier[ignore_blank_lines] : identifier[parameters] . identifier[append] ( literal[string] ) keyword[if] identifier[element] . identifier[ignore_header] keyword[is] keyword[not] keyword[None] : identifier[parameters] . identifier[append] ( literal[string] ) identifier[bindparams] . identifier[append] ( identifier[sa] . identifier[bindparam] ( literal[string] , identifier[value] = identifier[element] . identifier[ignore_header] , identifier[type_] = identifier[sa] . identifier[Integer] , )) keyword[if] identifier[element] . identifier[dangerous_null_delimiter] keyword[is] keyword[not] keyword[None] : identifier[parameters] . identifier[append] ( literal[string] % identifier[element] . identifier[dangerous_null_delimiter] ) keyword[if] identifier[element] . identifier[remove_quotes] : identifier[parameters] . identifier[append] ( literal[string] ) keyword[if] identifier[element] . identifier[roundec] : identifier[parameters] . identifier[append] ( literal[string] ) keyword[if] identifier[element] . identifier[time_format] keyword[is] keyword[not] keyword[None] : identifier[parameters] . identifier[append] ( literal[string] ) identifier[bindparams] . identifier[append] ( identifier[sa] . identifier[bindparam] ( literal[string] , identifier[value] = identifier[element] . identifier[time_format] , identifier[type_] = identifier[sa] . identifier[String] , )) keyword[if] identifier[element] . identifier[trim_blanks] : identifier[parameters] . identifier[append] ( literal[string] ) keyword[if] identifier[element] . identifier[truncate_columns] : identifier[parameters] . identifier[append] ( literal[string] ) keyword[if] identifier[element] . identifier[comp_rows] : identifier[parameters] . identifier[append] ( literal[string] ) identifier[bindparams] . identifier[append] ( identifier[sa] . identifier[bindparam] ( literal[string] , identifier[value] = identifier[element] . identifier[comp_rows] , identifier[type_] = identifier[sa] . identifier[Integer] , )) keyword[if] identifier[element] . identifier[comp_update] : identifier[parameters] . identifier[append] ( literal[string] ) keyword[elif] identifier[element] . identifier[comp_update] keyword[is] keyword[not] keyword[None] : identifier[parameters] . identifier[append] ( literal[string] ) keyword[if] identifier[element] . identifier[max_error] keyword[is] keyword[not] keyword[None] : identifier[parameters] . identifier[append] ( literal[string] ) identifier[bindparams] . identifier[append] ( identifier[sa] . identifier[bindparam] ( literal[string] , identifier[value] = identifier[element] . identifier[max_error] , identifier[type_] = identifier[sa] . identifier[Integer] , )) keyword[if] identifier[element] . identifier[no_load] : identifier[parameters] . identifier[append] ( literal[string] ) keyword[if] identifier[element] . identifier[stat_update] : identifier[parameters] . identifier[append] ( literal[string] ) keyword[elif] identifier[element] . identifier[stat_update] keyword[is] keyword[not] keyword[None] : identifier[parameters] . identifier[append] ( literal[string] ) keyword[if] identifier[element] . identifier[region] keyword[is] keyword[not] keyword[None] : identifier[parameters] . identifier[append] ( literal[string] ) identifier[bindparams] . identifier[append] ( identifier[sa] . identifier[bindparam] ( literal[string] , identifier[value] = identifier[element] . identifier[region] , identifier[type_] = identifier[sa] . identifier[String] )) identifier[columns] = literal[string] % literal[string] . identifier[join] ( identifier[compiler] . identifier[preparer] . identifier[format_column] ( identifier[column] ) keyword[for] identifier[column] keyword[in] identifier[element] . identifier[columns] ) keyword[if] identifier[element] . identifier[columns] keyword[else] literal[string] identifier[qs] = identifier[qs] . identifier[format] ( identifier[table] = identifier[compiler] . identifier[preparer] . identifier[format_table] ( identifier[element] . identifier[table] ), identifier[columns] = identifier[columns] , identifier[format] = identifier[format_] , identifier[parameters] = literal[string] . identifier[join] ( identifier[parameters] ) ) keyword[return] identifier[compiler] . identifier[process] ( identifier[sa] . identifier[text] ( identifier[qs] ). identifier[bindparams] (* identifier[bindparams] ),** identifier[kw] )
def visit_copy_command(element, compiler, **kw): """ Returns the actual sql query for the CopyCommand class. """ qs = 'COPY {table}{columns} FROM :data_location\n WITH CREDENTIALS AS :credentials\n {format}\n {parameters}' parameters = [] bindparams = [sa.bindparam('data_location', value=element.data_location, type_=sa.String), sa.bindparam('credentials', value=element.credentials, type_=sa.String)] if element.format == Format.csv: format_ = 'FORMAT AS CSV' if element.quote is not None: format_ += ' QUOTE AS :quote_character' bindparams.append(sa.bindparam('quote_character', value=element.quote, type_=sa.String)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif element.format == Format.json: format_ = 'FORMAT AS JSON AS :json_option' bindparams.append(sa.bindparam('json_option', value=element.path_file, type_=sa.String)) # depends on [control=['if'], data=[]] elif element.format == Format.avro: format_ = 'FORMAT AS AVRO AS :avro_option' bindparams.append(sa.bindparam('avro_option', value=element.path_file, type_=sa.String)) # depends on [control=['if'], data=[]] elif element.format == Format.orc: format_ = 'FORMAT AS ORC' # depends on [control=['if'], data=[]] elif element.format == Format.parquet: format_ = 'FORMAT AS PARQUET' # depends on [control=['if'], data=[]] elif element.format == Format.fixed_width and element.fixed_width is None: raise sa_exc.CompileError("'fixed_width' argument required for format 'FIXEDWIDTH'.") # depends on [control=['if'], data=[]] else: format_ = '' if element.delimiter is not None: parameters.append('DELIMITER AS :delimiter_char') bindparams.append(sa.bindparam('delimiter_char', value=element.delimiter, type_=sa.String)) # depends on [control=['if'], data=[]] if element.fixed_width is not None: parameters.append('FIXEDWIDTH AS :fixedwidth_spec') bindparams.append(sa.bindparam('fixedwidth_spec', value=_process_fixed_width(element.fixed_width), type_=sa.String)) # depends on [control=['if'], data=[]] if element.compression is not None: parameters.append(Compression(element.compression).value) # depends on [control=['if'], data=[]] if element.manifest: parameters.append('MANIFEST') # depends on [control=['if'], data=[]] if element.accept_any_date: parameters.append('ACCEPTANYDATE') # depends on [control=['if'], data=[]] if element.accept_inv_chars is not None: parameters.append('ACCEPTINVCHARS AS :replacement_char') bindparams.append(sa.bindparam('replacement_char', value=element.accept_inv_chars, type_=sa.String)) # depends on [control=['if'], data=[]] if element.blanks_as_null: parameters.append('BLANKSASNULL') # depends on [control=['if'], data=[]] if element.date_format is not None: parameters.append('DATEFORMAT AS :dateformat_string') bindparams.append(sa.bindparam('dateformat_string', value=element.date_format, type_=sa.String)) # depends on [control=['if'], data=[]] if element.empty_as_null: parameters.append('EMPTYASNULL') # depends on [control=['if'], data=[]] if element.encoding is not None: parameters.append('ENCODING AS ' + Encoding(element.encoding).value) # depends on [control=['if'], data=[]] if element.escape: parameters.append('ESCAPE') # depends on [control=['if'], data=[]] if element.explicit_ids: parameters.append('EXPLICIT_IDS') # depends on [control=['if'], data=[]] if element.fill_record: parameters.append('FILLRECORD') # depends on [control=['if'], data=[]] if element.ignore_blank_lines: parameters.append('IGNOREBLANKLINES') # depends on [control=['if'], data=[]] if element.ignore_header is not None: parameters.append('IGNOREHEADER AS :number_rows') bindparams.append(sa.bindparam('number_rows', value=element.ignore_header, type_=sa.Integer)) # depends on [control=['if'], data=[]] if element.dangerous_null_delimiter is not None: parameters.append("NULL AS '%s'" % element.dangerous_null_delimiter) # depends on [control=['if'], data=[]] if element.remove_quotes: parameters.append('REMOVEQUOTES') # depends on [control=['if'], data=[]] if element.roundec: parameters.append('ROUNDEC') # depends on [control=['if'], data=[]] if element.time_format is not None: parameters.append('TIMEFORMAT AS :timeformat_string') bindparams.append(sa.bindparam('timeformat_string', value=element.time_format, type_=sa.String)) # depends on [control=['if'], data=[]] if element.trim_blanks: parameters.append('TRIMBLANKS') # depends on [control=['if'], data=[]] if element.truncate_columns: parameters.append('TRUNCATECOLUMNS') # depends on [control=['if'], data=[]] if element.comp_rows: parameters.append('COMPROWS :numrows') bindparams.append(sa.bindparam('numrows', value=element.comp_rows, type_=sa.Integer)) # depends on [control=['if'], data=[]] if element.comp_update: parameters.append('COMPUPDATE ON') # depends on [control=['if'], data=[]] elif element.comp_update is not None: parameters.append('COMPUPDATE OFF') # depends on [control=['if'], data=[]] if element.max_error is not None: parameters.append('MAXERROR AS :error_count') bindparams.append(sa.bindparam('error_count', value=element.max_error, type_=sa.Integer)) # depends on [control=['if'], data=[]] if element.no_load: parameters.append('NOLOAD') # depends on [control=['if'], data=[]] if element.stat_update: parameters.append('STATUPDATE ON') # depends on [control=['if'], data=[]] elif element.stat_update is not None: parameters.append('STATUPDATE OFF') # depends on [control=['if'], data=[]] if element.region is not None: parameters.append('REGION :region') bindparams.append(sa.bindparam('region', value=element.region, type_=sa.String)) # depends on [control=['if'], data=[]] columns = ' (%s)' % ', '.join((compiler.preparer.format_column(column) for column in element.columns)) if element.columns else '' qs = qs.format(table=compiler.preparer.format_table(element.table), columns=columns, format=format_, parameters='\n'.join(parameters)) return compiler.process(sa.text(qs).bindparams(*bindparams), **kw)
def _process_with_joblib(self, X: Union[pd.DataFrame, np.ndarray], n_refs: int, cluster_array: np.ndarray): """ Process calling of .calculate_gap() method using the joblib backend """ if Parallel is None: raise EnvironmentError('joblib is not installed; cannot use joblib as the parallel backend!') with Parallel(n_jobs=self.n_jobs) as parallel: for gap_value, n_clusters in parallel(delayed(self._calculate_gap)(X, n_refs, n_clusters) for n_clusters in cluster_array): yield (gap_value, n_clusters)
def function[_process_with_joblib, parameter[self, X, n_refs, cluster_array]]: constant[ Process calling of .calculate_gap() method using the joblib backend ] if compare[name[Parallel] is constant[None]] begin[:] <ast.Raise object at 0x7da1b1034640> with call[name[Parallel], parameter[]] begin[:] for taget[tuple[[<ast.Name object at 0x7da1b10e4460>, <ast.Name object at 0x7da1b10e6ce0>]]] in starred[call[name[parallel], parameter[<ast.GeneratorExp object at 0x7da1b10e5810>]]] begin[:] <ast.Yield object at 0x7da1b10e7a90>
keyword[def] identifier[_process_with_joblib] ( identifier[self] , identifier[X] : identifier[Union] [ identifier[pd] . identifier[DataFrame] , identifier[np] . identifier[ndarray] ], identifier[n_refs] : identifier[int] , identifier[cluster_array] : identifier[np] . identifier[ndarray] ): literal[string] keyword[if] identifier[Parallel] keyword[is] keyword[None] : keyword[raise] identifier[EnvironmentError] ( literal[string] ) keyword[with] identifier[Parallel] ( identifier[n_jobs] = identifier[self] . identifier[n_jobs] ) keyword[as] identifier[parallel] : keyword[for] identifier[gap_value] , identifier[n_clusters] keyword[in] identifier[parallel] ( identifier[delayed] ( identifier[self] . identifier[_calculate_gap] )( identifier[X] , identifier[n_refs] , identifier[n_clusters] ) keyword[for] identifier[n_clusters] keyword[in] identifier[cluster_array] ): keyword[yield] ( identifier[gap_value] , identifier[n_clusters] )
def _process_with_joblib(self, X: Union[pd.DataFrame, np.ndarray], n_refs: int, cluster_array: np.ndarray): """ Process calling of .calculate_gap() method using the joblib backend """ if Parallel is None: raise EnvironmentError('joblib is not installed; cannot use joblib as the parallel backend!') # depends on [control=['if'], data=[]] with Parallel(n_jobs=self.n_jobs) as parallel: for (gap_value, n_clusters) in parallel((delayed(self._calculate_gap)(X, n_refs, n_clusters) for n_clusters in cluster_array)): yield (gap_value, n_clusters) # depends on [control=['for'], data=[]] # depends on [control=['with'], data=['parallel']]
def show_firmware_version_output_show_firmware_version_node_info_firmware_version_info_secondary_version(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") show_firmware_version = ET.Element("show_firmware_version") config = show_firmware_version output = ET.SubElement(show_firmware_version, "output") show_firmware_version = ET.SubElement(output, "show-firmware-version") node_info = ET.SubElement(show_firmware_version, "node-info") firmware_version_info = ET.SubElement(node_info, "firmware-version-info") secondary_version = ET.SubElement(firmware_version_info, "secondary-version") secondary_version.text = kwargs.pop('secondary_version') callback = kwargs.pop('callback', self._callback) return callback(config)
def function[show_firmware_version_output_show_firmware_version_node_info_firmware_version_info_secondary_version, parameter[self]]: constant[Auto Generated Code ] variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]] variable[show_firmware_version] assign[=] call[name[ET].Element, parameter[constant[show_firmware_version]]] variable[config] assign[=] name[show_firmware_version] variable[output] assign[=] call[name[ET].SubElement, parameter[name[show_firmware_version], constant[output]]] variable[show_firmware_version] assign[=] call[name[ET].SubElement, parameter[name[output], constant[show-firmware-version]]] variable[node_info] assign[=] call[name[ET].SubElement, parameter[name[show_firmware_version], constant[node-info]]] variable[firmware_version_info] assign[=] call[name[ET].SubElement, parameter[name[node_info], constant[firmware-version-info]]] variable[secondary_version] assign[=] call[name[ET].SubElement, parameter[name[firmware_version_info], constant[secondary-version]]] name[secondary_version].text assign[=] call[name[kwargs].pop, parameter[constant[secondary_version]]] variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]] return[call[name[callback], parameter[name[config]]]]
keyword[def] identifier[show_firmware_version_output_show_firmware_version_node_info_firmware_version_info_secondary_version] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[config] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[show_firmware_version] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[config] = identifier[show_firmware_version] identifier[output] = identifier[ET] . identifier[SubElement] ( identifier[show_firmware_version] , literal[string] ) identifier[show_firmware_version] = identifier[ET] . identifier[SubElement] ( identifier[output] , literal[string] ) identifier[node_info] = identifier[ET] . identifier[SubElement] ( identifier[show_firmware_version] , literal[string] ) identifier[firmware_version_info] = identifier[ET] . identifier[SubElement] ( identifier[node_info] , literal[string] ) identifier[secondary_version] = identifier[ET] . identifier[SubElement] ( identifier[firmware_version_info] , literal[string] ) identifier[secondary_version] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] ) keyword[return] identifier[callback] ( identifier[config] )
def show_firmware_version_output_show_firmware_version_node_info_firmware_version_info_secondary_version(self, **kwargs): """Auto Generated Code """ config = ET.Element('config') show_firmware_version = ET.Element('show_firmware_version') config = show_firmware_version output = ET.SubElement(show_firmware_version, 'output') show_firmware_version = ET.SubElement(output, 'show-firmware-version') node_info = ET.SubElement(show_firmware_version, 'node-info') firmware_version_info = ET.SubElement(node_info, 'firmware-version-info') secondary_version = ET.SubElement(firmware_version_info, 'secondary-version') secondary_version.text = kwargs.pop('secondary_version') callback = kwargs.pop('callback', self._callback) return callback(config)
def add_constraints(self): """ Set the base constraints of the relation query """ if self._constraints: super(MorphOneOrMany, self).add_constraints() self._query.where(self._morph_type, self._morph_name)
def function[add_constraints, parameter[self]]: constant[ Set the base constraints of the relation query ] if name[self]._constraints begin[:] call[call[name[super], parameter[name[MorphOneOrMany], name[self]]].add_constraints, parameter[]] call[name[self]._query.where, parameter[name[self]._morph_type, name[self]._morph_name]]
keyword[def] identifier[add_constraints] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_constraints] : identifier[super] ( identifier[MorphOneOrMany] , identifier[self] ). identifier[add_constraints] () identifier[self] . identifier[_query] . identifier[where] ( identifier[self] . identifier[_morph_type] , identifier[self] . identifier[_morph_name] )
def add_constraints(self): """ Set the base constraints of the relation query """ if self._constraints: super(MorphOneOrMany, self).add_constraints() self._query.where(self._morph_type, self._morph_name) # depends on [control=['if'], data=[]]
def print_env_info(key, out=sys.stderr): """If given environment key is defined, print it out.""" value = os.getenv(key) if value is not None: print(key, "=", repr(value), file=out)
def function[print_env_info, parameter[key, out]]: constant[If given environment key is defined, print it out.] variable[value] assign[=] call[name[os].getenv, parameter[name[key]]] if compare[name[value] is_not constant[None]] begin[:] call[name[print], parameter[name[key], constant[=], call[name[repr], parameter[name[value]]]]]
keyword[def] identifier[print_env_info] ( identifier[key] , identifier[out] = identifier[sys] . identifier[stderr] ): literal[string] identifier[value] = identifier[os] . identifier[getenv] ( identifier[key] ) keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] : identifier[print] ( identifier[key] , literal[string] , identifier[repr] ( identifier[value] ), identifier[file] = identifier[out] )
def print_env_info(key, out=sys.stderr): """If given environment key is defined, print it out.""" value = os.getenv(key) if value is not None: print(key, '=', repr(value), file=out) # depends on [control=['if'], data=['value']]
def set_vim_style(theme): """Add style and compatibility with vim notebook extension""" vim_jupyter_nbext = os.path.join(jupyter_nbext, 'vim_binding') if not os.path.isdir(vim_jupyter_nbext): os.makedirs(vim_jupyter_nbext) vim_less = '@import "styles{}";\n'.format(''.join([os.sep, theme])) with open(vim_style, 'r') as vimstyle: vim_less += vimstyle.read() + '\n' with open(vimtemp, 'w') as vtemp: vtemp.write(vim_less) os.chdir(package_dir) vim_css = lesscpy.compile(vimtemp) vim_css += '\n\n' # install vim_custom_css to ...nbextensions/vim_binding/vim_binding.css vim_custom_css = os.path.join(vim_jupyter_nbext, 'vim_binding.css') with open(vim_custom_css, 'w') as vim_custom: vim_custom.write(vim_css)
def function[set_vim_style, parameter[theme]]: constant[Add style and compatibility with vim notebook extension] variable[vim_jupyter_nbext] assign[=] call[name[os].path.join, parameter[name[jupyter_nbext], constant[vim_binding]]] if <ast.UnaryOp object at 0x7da20c6c57e0> begin[:] call[name[os].makedirs, parameter[name[vim_jupyter_nbext]]] variable[vim_less] assign[=] call[constant[@import "styles{}"; ].format, parameter[call[constant[].join, parameter[list[[<ast.Attribute object at 0x7da20c6c4490>, <ast.Name object at 0x7da20c6c4670>]]]]]] with call[name[open], parameter[name[vim_style], constant[r]]] begin[:] <ast.AugAssign object at 0x7da20c6c64d0> with call[name[open], parameter[name[vimtemp], constant[w]]] begin[:] call[name[vtemp].write, parameter[name[vim_less]]] call[name[os].chdir, parameter[name[package_dir]]] variable[vim_css] assign[=] call[name[lesscpy].compile, parameter[name[vimtemp]]] <ast.AugAssign object at 0x7da20c6c52a0> variable[vim_custom_css] assign[=] call[name[os].path.join, parameter[name[vim_jupyter_nbext], constant[vim_binding.css]]] with call[name[open], parameter[name[vim_custom_css], constant[w]]] begin[:] call[name[vim_custom].write, parameter[name[vim_css]]]
keyword[def] identifier[set_vim_style] ( identifier[theme] ): literal[string] identifier[vim_jupyter_nbext] = identifier[os] . identifier[path] . identifier[join] ( identifier[jupyter_nbext] , literal[string] ) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[vim_jupyter_nbext] ): identifier[os] . identifier[makedirs] ( identifier[vim_jupyter_nbext] ) identifier[vim_less] = literal[string] . identifier[format] ( literal[string] . identifier[join] ([ identifier[os] . identifier[sep] , identifier[theme] ])) keyword[with] identifier[open] ( identifier[vim_style] , literal[string] ) keyword[as] identifier[vimstyle] : identifier[vim_less] += identifier[vimstyle] . identifier[read] ()+ literal[string] keyword[with] identifier[open] ( identifier[vimtemp] , literal[string] ) keyword[as] identifier[vtemp] : identifier[vtemp] . identifier[write] ( identifier[vim_less] ) identifier[os] . identifier[chdir] ( identifier[package_dir] ) identifier[vim_css] = identifier[lesscpy] . identifier[compile] ( identifier[vimtemp] ) identifier[vim_css] += literal[string] identifier[vim_custom_css] = identifier[os] . identifier[path] . identifier[join] ( identifier[vim_jupyter_nbext] , literal[string] ) keyword[with] identifier[open] ( identifier[vim_custom_css] , literal[string] ) keyword[as] identifier[vim_custom] : identifier[vim_custom] . identifier[write] ( identifier[vim_css] )
def set_vim_style(theme): """Add style and compatibility with vim notebook extension""" vim_jupyter_nbext = os.path.join(jupyter_nbext, 'vim_binding') if not os.path.isdir(vim_jupyter_nbext): os.makedirs(vim_jupyter_nbext) # depends on [control=['if'], data=[]] vim_less = '@import "styles{}";\n'.format(''.join([os.sep, theme])) with open(vim_style, 'r') as vimstyle: vim_less += vimstyle.read() + '\n' # depends on [control=['with'], data=['vimstyle']] with open(vimtemp, 'w') as vtemp: vtemp.write(vim_less) # depends on [control=['with'], data=['vtemp']] os.chdir(package_dir) vim_css = lesscpy.compile(vimtemp) vim_css += '\n\n' # install vim_custom_css to ...nbextensions/vim_binding/vim_binding.css vim_custom_css = os.path.join(vim_jupyter_nbext, 'vim_binding.css') with open(vim_custom_css, 'w') as vim_custom: vim_custom.write(vim_css) # depends on [control=['with'], data=['vim_custom']]
def round(self, decimals=0, *args, **kwargs): """Round each element in the DataFrame. Args: decimals: The number of decimals to round to. Returns: A new DataFrame. """ return self.__constructor__( query_compiler=self._query_compiler.round(decimals=decimals, **kwargs) )
def function[round, parameter[self, decimals]]: constant[Round each element in the DataFrame. Args: decimals: The number of decimals to round to. Returns: A new DataFrame. ] return[call[name[self].__constructor__, parameter[]]]
keyword[def] identifier[round] ( identifier[self] , identifier[decimals] = literal[int] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[return] identifier[self] . identifier[__constructor__] ( identifier[query_compiler] = identifier[self] . identifier[_query_compiler] . identifier[round] ( identifier[decimals] = identifier[decimals] ,** identifier[kwargs] ) )
def round(self, decimals=0, *args, **kwargs): """Round each element in the DataFrame. Args: decimals: The number of decimals to round to. Returns: A new DataFrame. """ return self.__constructor__(query_compiler=self._query_compiler.round(decimals=decimals, **kwargs))
def queue_declare(self, queue, durable, exclusive, auto_delete, warn_if_exists=False, arguments=None): """Declare a named queue.""" return self.channel.queue_declare(queue=queue, durable=durable, exclusive=exclusive, auto_delete=auto_delete, arguments=arguments)
def function[queue_declare, parameter[self, queue, durable, exclusive, auto_delete, warn_if_exists, arguments]]: constant[Declare a named queue.] return[call[name[self].channel.queue_declare, parameter[]]]
keyword[def] identifier[queue_declare] ( identifier[self] , identifier[queue] , identifier[durable] , identifier[exclusive] , identifier[auto_delete] , identifier[warn_if_exists] = keyword[False] , identifier[arguments] = keyword[None] ): literal[string] keyword[return] identifier[self] . identifier[channel] . identifier[queue_declare] ( identifier[queue] = identifier[queue] , identifier[durable] = identifier[durable] , identifier[exclusive] = identifier[exclusive] , identifier[auto_delete] = identifier[auto_delete] , identifier[arguments] = identifier[arguments] )
def queue_declare(self, queue, durable, exclusive, auto_delete, warn_if_exists=False, arguments=None): """Declare a named queue.""" return self.channel.queue_declare(queue=queue, durable=durable, exclusive=exclusive, auto_delete=auto_delete, arguments=arguments)
def delete_queue(name, region, opts=None, user=None): ''' Deletes a queue in the region. name Name of the SQS queue to deletes region Name of the region to delete the queue from opts : None Any additional options to add to the command line user : None Run hg as a user other than what the minion runs as CLI Example: salt '*' aws_sqs.delete_queue <sqs queue> <region> ''' queues = list_queues(region, opts, user) url_map = _parse_queue_list(queues) log.debug('map %s', url_map) if name in url_map: delete = {'queue-url': url_map[name]} rtn = _run_aws( 'delete-queue', region=region, opts=opts, user=user, **delete) success = True err = '' out = '{0} deleted'.format(name) else: out = '' err = "Delete failed" success = False ret = { 'retcode': 0 if success else 1, 'stdout': out, 'stderr': err, } return ret
def function[delete_queue, parameter[name, region, opts, user]]: constant[ Deletes a queue in the region. name Name of the SQS queue to deletes region Name of the region to delete the queue from opts : None Any additional options to add to the command line user : None Run hg as a user other than what the minion runs as CLI Example: salt '*' aws_sqs.delete_queue <sqs queue> <region> ] variable[queues] assign[=] call[name[list_queues], parameter[name[region], name[opts], name[user]]] variable[url_map] assign[=] call[name[_parse_queue_list], parameter[name[queues]]] call[name[log].debug, parameter[constant[map %s], name[url_map]]] if compare[name[name] in name[url_map]] begin[:] variable[delete] assign[=] dictionary[[<ast.Constant object at 0x7da1b2044940>], [<ast.Subscript object at 0x7da1b2046140>]] variable[rtn] assign[=] call[name[_run_aws], parameter[constant[delete-queue]]] variable[success] assign[=] constant[True] variable[err] assign[=] constant[] variable[out] assign[=] call[constant[{0} deleted].format, parameter[name[name]]] variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b21a86d0>, <ast.Constant object at 0x7da1b21a8880>, <ast.Constant object at 0x7da1b21aba90>], [<ast.IfExp object at 0x7da1b21ab310>, <ast.Name object at 0x7da1b21a8760>, <ast.Name object at 0x7da1b21a8610>]] return[name[ret]]
keyword[def] identifier[delete_queue] ( identifier[name] , identifier[region] , identifier[opts] = keyword[None] , identifier[user] = keyword[None] ): literal[string] identifier[queues] = identifier[list_queues] ( identifier[region] , identifier[opts] , identifier[user] ) identifier[url_map] = identifier[_parse_queue_list] ( identifier[queues] ) identifier[log] . identifier[debug] ( literal[string] , identifier[url_map] ) keyword[if] identifier[name] keyword[in] identifier[url_map] : identifier[delete] ={ literal[string] : identifier[url_map] [ identifier[name] ]} identifier[rtn] = identifier[_run_aws] ( literal[string] , identifier[region] = identifier[region] , identifier[opts] = identifier[opts] , identifier[user] = identifier[user] , ** identifier[delete] ) identifier[success] = keyword[True] identifier[err] = literal[string] identifier[out] = literal[string] . identifier[format] ( identifier[name] ) keyword[else] : identifier[out] = literal[string] identifier[err] = literal[string] identifier[success] = keyword[False] identifier[ret] ={ literal[string] : literal[int] keyword[if] identifier[success] keyword[else] literal[int] , literal[string] : identifier[out] , literal[string] : identifier[err] , } keyword[return] identifier[ret]
def delete_queue(name, region, opts=None, user=None): """ Deletes a queue in the region. name Name of the SQS queue to deletes region Name of the region to delete the queue from opts : None Any additional options to add to the command line user : None Run hg as a user other than what the minion runs as CLI Example: salt '*' aws_sqs.delete_queue <sqs queue> <region> """ queues = list_queues(region, opts, user) url_map = _parse_queue_list(queues) log.debug('map %s', url_map) if name in url_map: delete = {'queue-url': url_map[name]} rtn = _run_aws('delete-queue', region=region, opts=opts, user=user, **delete) success = True err = '' out = '{0} deleted'.format(name) # depends on [control=['if'], data=['name', 'url_map']] else: out = '' err = 'Delete failed' success = False ret = {'retcode': 0 if success else 1, 'stdout': out, 'stderr': err} return ret
def generate_proxy( prefix, base_url='', verify_ssl=True, middleware=None, append_middleware=None, cert=None, timeout=None): """Generate a ProxyClass based view that uses the passed base_url.""" middleware = list(middleware or HttpProxy.proxy_middleware) middleware += list(append_middleware or []) return type('ProxyClass', (HttpProxy,), { 'base_url': base_url, 'reverse_urls': [(prefix, base_url)], 'verify_ssl': verify_ssl, 'proxy_middleware': middleware, 'cert': cert, 'timeout': timeout })
def function[generate_proxy, parameter[prefix, base_url, verify_ssl, middleware, append_middleware, cert, timeout]]: constant[Generate a ProxyClass based view that uses the passed base_url.] variable[middleware] assign[=] call[name[list], parameter[<ast.BoolOp object at 0x7da18f58cbe0>]] <ast.AugAssign object at 0x7da18f58e7a0> return[call[name[type], parameter[constant[ProxyClass], tuple[[<ast.Name object at 0x7da18f58ebf0>]], dictionary[[<ast.Constant object at 0x7da18f58e0b0>, <ast.Constant object at 0x7da18f58ca90>, <ast.Constant object at 0x7da18f58e3e0>, <ast.Constant object at 0x7da18f58f3d0>, <ast.Constant object at 0x7da18f58f3a0>, <ast.Constant object at 0x7da18f58fa30>], [<ast.Name object at 0x7da18f58f1c0>, <ast.List object at 0x7da18f58fca0>, <ast.Name object at 0x7da18f58f220>, <ast.Name object at 0x7da18f58c610>, <ast.Name object at 0x7da18f58c7f0>, <ast.Name object at 0x7da18f58fe20>]]]]]
keyword[def] identifier[generate_proxy] ( identifier[prefix] , identifier[base_url] = literal[string] , identifier[verify_ssl] = keyword[True] , identifier[middleware] = keyword[None] , identifier[append_middleware] = keyword[None] , identifier[cert] = keyword[None] , identifier[timeout] = keyword[None] ): literal[string] identifier[middleware] = identifier[list] ( identifier[middleware] keyword[or] identifier[HttpProxy] . identifier[proxy_middleware] ) identifier[middleware] += identifier[list] ( identifier[append_middleware] keyword[or] []) keyword[return] identifier[type] ( literal[string] ,( identifier[HttpProxy] ,),{ literal[string] : identifier[base_url] , literal[string] :[( identifier[prefix] , identifier[base_url] )], literal[string] : identifier[verify_ssl] , literal[string] : identifier[middleware] , literal[string] : identifier[cert] , literal[string] : identifier[timeout] })
def generate_proxy(prefix, base_url='', verify_ssl=True, middleware=None, append_middleware=None, cert=None, timeout=None): """Generate a ProxyClass based view that uses the passed base_url.""" middleware = list(middleware or HttpProxy.proxy_middleware) middleware += list(append_middleware or []) return type('ProxyClass', (HttpProxy,), {'base_url': base_url, 'reverse_urls': [(prefix, base_url)], 'verify_ssl': verify_ssl, 'proxy_middleware': middleware, 'cert': cert, 'timeout': timeout})
def main(): """Connect to a McDevice""" args = setup_parser().parse_args() host = getattr(args, "host") port = getattr(args, "port") ipv4 = socket.gethostbyname(host) interval = getattr(args, "interval") receiver = McDevice(ipv4, udp_port=port, mc_interval=interval) receiver.handle_status() # wait for UDP messages while True: time.sleep(0.2)
def function[main, parameter[]]: constant[Connect to a McDevice] variable[args] assign[=] call[call[name[setup_parser], parameter[]].parse_args, parameter[]] variable[host] assign[=] call[name[getattr], parameter[name[args], constant[host]]] variable[port] assign[=] call[name[getattr], parameter[name[args], constant[port]]] variable[ipv4] assign[=] call[name[socket].gethostbyname, parameter[name[host]]] variable[interval] assign[=] call[name[getattr], parameter[name[args], constant[interval]]] variable[receiver] assign[=] call[name[McDevice], parameter[name[ipv4]]] call[name[receiver].handle_status, parameter[]] while constant[True] begin[:] call[name[time].sleep, parameter[constant[0.2]]]
keyword[def] identifier[main] (): literal[string] identifier[args] = identifier[setup_parser] (). identifier[parse_args] () identifier[host] = identifier[getattr] ( identifier[args] , literal[string] ) identifier[port] = identifier[getattr] ( identifier[args] , literal[string] ) identifier[ipv4] = identifier[socket] . identifier[gethostbyname] ( identifier[host] ) identifier[interval] = identifier[getattr] ( identifier[args] , literal[string] ) identifier[receiver] = identifier[McDevice] ( identifier[ipv4] , identifier[udp_port] = identifier[port] , identifier[mc_interval] = identifier[interval] ) identifier[receiver] . identifier[handle_status] () keyword[while] keyword[True] : identifier[time] . identifier[sleep] ( literal[int] )
def main(): """Connect to a McDevice""" args = setup_parser().parse_args() host = getattr(args, 'host') port = getattr(args, 'port') ipv4 = socket.gethostbyname(host) interval = getattr(args, 'interval') receiver = McDevice(ipv4, udp_port=port, mc_interval=interval) receiver.handle_status() # wait for UDP messages while True: time.sleep(0.2) # depends on [control=['while'], data=[]]
def delete(self, workflow_id, email_id): """ Removes an individual Automation workflow email. :param workflow_id: The unique id for the Automation workflow. :type workflow_id: :py:class:`str` :param email_id: The unique id for the Automation workflow email. :type email_id: :py:class:`str` """ self.workflow_id = workflow_id self.email_id = email_id return self._mc_client._delete(url=self._build_path(workflow_id, 'emails', email_id))
def function[delete, parameter[self, workflow_id, email_id]]: constant[ Removes an individual Automation workflow email. :param workflow_id: The unique id for the Automation workflow. :type workflow_id: :py:class:`str` :param email_id: The unique id for the Automation workflow email. :type email_id: :py:class:`str` ] name[self].workflow_id assign[=] name[workflow_id] name[self].email_id assign[=] name[email_id] return[call[name[self]._mc_client._delete, parameter[]]]
keyword[def] identifier[delete] ( identifier[self] , identifier[workflow_id] , identifier[email_id] ): literal[string] identifier[self] . identifier[workflow_id] = identifier[workflow_id] identifier[self] . identifier[email_id] = identifier[email_id] keyword[return] identifier[self] . identifier[_mc_client] . identifier[_delete] ( identifier[url] = identifier[self] . identifier[_build_path] ( identifier[workflow_id] , literal[string] , identifier[email_id] ))
def delete(self, workflow_id, email_id): """ Removes an individual Automation workflow email. :param workflow_id: The unique id for the Automation workflow. :type workflow_id: :py:class:`str` :param email_id: The unique id for the Automation workflow email. :type email_id: :py:class:`str` """ self.workflow_id = workflow_id self.email_id = email_id return self._mc_client._delete(url=self._build_path(workflow_id, 'emails', email_id))
def register_event(self): """ 注册事件 """ event_bus = Environment.get_instance().event_bus event_bus.prepend_listener(EVENT.PRE_BEFORE_TRADING, self._pre_before_trading) event_bus.prepend_listener(EVENT.POST_SETTLEMENT, self._post_settlement)
def function[register_event, parameter[self]]: constant[ 注册事件 ] variable[event_bus] assign[=] call[name[Environment].get_instance, parameter[]].event_bus call[name[event_bus].prepend_listener, parameter[name[EVENT].PRE_BEFORE_TRADING, name[self]._pre_before_trading]] call[name[event_bus].prepend_listener, parameter[name[EVENT].POST_SETTLEMENT, name[self]._post_settlement]]
keyword[def] identifier[register_event] ( identifier[self] ): literal[string] identifier[event_bus] = identifier[Environment] . identifier[get_instance] (). identifier[event_bus] identifier[event_bus] . identifier[prepend_listener] ( identifier[EVENT] . identifier[PRE_BEFORE_TRADING] , identifier[self] . identifier[_pre_before_trading] ) identifier[event_bus] . identifier[prepend_listener] ( identifier[EVENT] . identifier[POST_SETTLEMENT] , identifier[self] . identifier[_post_settlement] )
def register_event(self): """ 注册事件 """ event_bus = Environment.get_instance().event_bus event_bus.prepend_listener(EVENT.PRE_BEFORE_TRADING, self._pre_before_trading) event_bus.prepend_listener(EVENT.POST_SETTLEMENT, self._post_settlement)
def handle_get_token(self, req): """Handles the various `request for token and service end point(s)` calls. There are various formats to support the various auth servers in the past. Examples:: GET <auth-prefix>/v1/<act>/auth X-Auth-User: <act>:<usr> or X-Storage-User: <usr> X-Auth-Key: <key> or X-Storage-Pass: <key> GET <auth-prefix>/auth X-Auth-User: <act>:<usr> or X-Storage-User: <act>:<usr> X-Auth-Key: <key> or X-Storage-Pass: <key> GET <auth-prefix>/v1.0 X-Auth-User: <act>:<usr> or X-Storage-User: <act>:<usr> X-Auth-Key: <key> or X-Storage-Pass: <key> Values should be url encoded, "act%3Ausr" instead of "act:usr" for example; however, for backwards compatibility the colon may be included unencoded. On successful authentication, the response will have X-Auth-Token and X-Storage-Token set to the token to use with Swift and X-Storage-URL set to the URL to the default Swift cluster to use. The response body will be set to the account's services JSON object as described here:: {"storage": { # Represents the Swift storage service end points "default": "cluster1", # Indicates which cluster is the default "cluster1": "<URL to use with Swift>", # A Swift cluster that can be used with this account, # "cluster1" is the name of the cluster which is usually a # location indicator (like "dfw" for a datacenter region). "cluster2": "<URL to use with Swift>" # Another Swift cluster that can be used with this account, # there will always be at least one Swift cluster to use or # this whole "storage" dict won't be included at all. }, "servers": { # Represents the Nova server service end points # Expected to be similar to the "storage" dict, but not # implemented yet. }, # Possibly other service dicts, not implemented yet. } One can also include an "X-Auth-New-Token: true" header to force issuing a new token and revoking any old token, even if it hasn't expired yet. :param req: The swob.Request to process. :returns: swob.Response, 2xx on success with data set as explained above. """ # Validate the request info try: pathsegs = split_path(req.path_info, minsegs=1, maxsegs=3, rest_with_last=True) except ValueError: return HTTPNotFound(request=req) if pathsegs[0] == 'v1' and pathsegs[2] == 'auth': account = pathsegs[1] user = req.headers.get('x-storage-user') if not user: user = unquote(req.headers.get('x-auth-user', '')) if not user or ':' not in user: return HTTPUnauthorized(request=req) account2, user = user.split(':', 1) if account != account2: return HTTPUnauthorized(request=req) key = req.headers.get('x-storage-pass') if not key: key = unquote(req.headers.get('x-auth-key', '')) elif pathsegs[0] in ('auth', 'v1.0'): user = unquote(req.headers.get('x-auth-user', '')) if not user: user = req.headers.get('x-storage-user') if not user or ':' not in user: return HTTPUnauthorized(request=req) account, user = user.split(':', 1) key = unquote(req.headers.get('x-auth-key', '')) if not key: key = req.headers.get('x-storage-pass') else: return HTTPBadRequest(request=req) if not all((account, user, key)): return HTTPUnauthorized(request=req) if user == '.super_admin' and self.super_admin_key and \ key == self.super_admin_key: token = self.get_itoken(req.environ) url = '%s/%s.auth' % (self.dsc_url, self.reseller_prefix) return Response( request=req, content_type=CONTENT_TYPE_JSON, body=json.dumps({'storage': {'default': 'local', 'local': url}}), headers={'x-auth-token': token, 'x-storage-token': token, 'x-storage-url': url}) # Authenticate user path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) resp = self.make_pre_authed_request( req.environ, 'GET', path).get_response(self.app) if resp.status_int == 404: return HTTPUnauthorized(request=req) if resp.status_int // 100 != 2: raise Exception('Could not obtain user details: %s %s' % (path, resp.status)) user_detail = json.loads(resp.body) if not self.credentials_match(user_detail, key): return HTTPUnauthorized(request=req) # See if a token already exists and hasn't expired token = None expires = None candidate_token = resp.headers.get('x-object-meta-auth-token') if candidate_token: object_name = self._get_concealed_token(candidate_token) path = quote('/v1/%s/.token_%s/%s' % (self.auth_account, object_name[-1], object_name)) delete_token = False try: if req.headers.get('x-auth-new-token', 'false').lower() in \ TRUE_VALUES: delete_token = True else: resp = self.make_pre_authed_request( req.environ, 'GET', path).get_response(self.app) if resp.status_int // 100 == 2: token_detail = json.loads(resp.body) if token_detail['expires'] > time(): token = candidate_token expires = token_detail['expires'] else: delete_token = True elif resp.status_int != 404: raise Exception( 'Could not detect whether a token already exists: ' '%s %s' % (path, resp.status)) finally: if delete_token: self.make_pre_authed_request( req.environ, 'DELETE', path).get_response(self.app) memcache_client = cache_from_env(req.environ) if memcache_client: memcache_key = '%s/auth/%s' % (self.reseller_prefix, candidate_token) memcache_client.delete(memcache_key) # Create a new token if one didn't exist if not token: # Retrieve account id, we'll save this in the token path = quote('/v1/%s/%s' % (self.auth_account, account)) resp = self.make_pre_authed_request( req.environ, 'HEAD', path).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not retrieve account id value: ' '%s %s' % (path, resp.status)) account_id = \ resp.headers['x-container-meta-account-id'] # Generate new token token = '%stk%s' % (self.reseller_prefix, uuid4().hex) # Save token info object_name = self._get_concealed_token(token) path = quote('/v1/%s/.token_%s/%s' % (self.auth_account, object_name[-1], object_name)) try: token_life = min( int(req.headers.get('x-auth-token-lifetime', self.token_life)), self.max_token_life) except ValueError: token_life = self.token_life expires = int(time() + token_life) resp = self.make_pre_authed_request( req.environ, 'PUT', path, json.dumps({'account': account, 'user': user, 'account_id': account_id, 'groups': user_detail['groups'], 'expires': expires})).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not create new token: %s %s' % (path, resp.status)) # Record the token with the user info for future use. path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) resp = self.make_pre_authed_request( req.environ, 'POST', path, headers={'X-Object-Meta-Auth-Token': token} ).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not save new token: %s %s' % (path, resp.status)) # Get the services information path = quote('/v1/%s/%s/.services' % (self.auth_account, account)) resp = self.make_pre_authed_request( req.environ, 'GET', path).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not obtain services info: %s %s' % (path, resp.status)) detail = json.loads(resp.body) url = detail['storage'][detail['storage']['default']] return Response( request=req, body=resp.body, content_type=CONTENT_TYPE_JSON, headers={'x-auth-token': token, 'x-storage-token': token, 'x-auth-token-expires': str(int(expires - time())), 'x-storage-url': url})
def function[handle_get_token, parameter[self, req]]: constant[Handles the various `request for token and service end point(s)` calls. There are various formats to support the various auth servers in the past. Examples:: GET <auth-prefix>/v1/<act>/auth X-Auth-User: <act>:<usr> or X-Storage-User: <usr> X-Auth-Key: <key> or X-Storage-Pass: <key> GET <auth-prefix>/auth X-Auth-User: <act>:<usr> or X-Storage-User: <act>:<usr> X-Auth-Key: <key> or X-Storage-Pass: <key> GET <auth-prefix>/v1.0 X-Auth-User: <act>:<usr> or X-Storage-User: <act>:<usr> X-Auth-Key: <key> or X-Storage-Pass: <key> Values should be url encoded, "act%3Ausr" instead of "act:usr" for example; however, for backwards compatibility the colon may be included unencoded. On successful authentication, the response will have X-Auth-Token and X-Storage-Token set to the token to use with Swift and X-Storage-URL set to the URL to the default Swift cluster to use. The response body will be set to the account's services JSON object as described here:: {"storage": { # Represents the Swift storage service end points "default": "cluster1", # Indicates which cluster is the default "cluster1": "<URL to use with Swift>", # A Swift cluster that can be used with this account, # "cluster1" is the name of the cluster which is usually a # location indicator (like "dfw" for a datacenter region). "cluster2": "<URL to use with Swift>" # Another Swift cluster that can be used with this account, # there will always be at least one Swift cluster to use or # this whole "storage" dict won't be included at all. }, "servers": { # Represents the Nova server service end points # Expected to be similar to the "storage" dict, but not # implemented yet. }, # Possibly other service dicts, not implemented yet. } One can also include an "X-Auth-New-Token: true" header to force issuing a new token and revoking any old token, even if it hasn't expired yet. :param req: The swob.Request to process. :returns: swob.Response, 2xx on success with data set as explained above. ] <ast.Try object at 0x7da1b04e2860> if <ast.BoolOp object at 0x7da1b04e2d40> begin[:] variable[account] assign[=] call[name[pathsegs]][constant[1]] variable[user] assign[=] call[name[req].headers.get, parameter[constant[x-storage-user]]] if <ast.UnaryOp object at 0x7da1b04e2bc0> begin[:] variable[user] assign[=] call[name[unquote], parameter[call[name[req].headers.get, parameter[constant[x-auth-user], constant[]]]]] if <ast.BoolOp object at 0x7da1b04e17b0> begin[:] return[call[name[HTTPUnauthorized], parameter[]]] <ast.Tuple object at 0x7da18f721cc0> assign[=] call[name[user].split, parameter[constant[:], constant[1]]] if compare[name[account] not_equal[!=] name[account2]] begin[:] return[call[name[HTTPUnauthorized], parameter[]]] variable[key] assign[=] call[name[req].headers.get, parameter[constant[x-storage-pass]]] if <ast.UnaryOp object at 0x7da18f721030> begin[:] variable[key] assign[=] call[name[unquote], parameter[call[name[req].headers.get, parameter[constant[x-auth-key], constant[]]]]] if <ast.UnaryOp object at 0x7da1b0491d80> begin[:] return[call[name[HTTPUnauthorized], parameter[]]] if <ast.BoolOp object at 0x7da1b0493eb0> begin[:] variable[token] assign[=] call[name[self].get_itoken, parameter[name[req].environ]] variable[url] assign[=] binary_operation[constant[%s/%s.auth] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b0490eb0>, <ast.Attribute object at 0x7da1b0490d60>]]] return[call[name[Response], parameter[]]] variable[path] assign[=] call[name[quote], parameter[binary_operation[constant[/v1/%s/%s/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b0558f10>, <ast.Name object at 0x7da1b05593c0>, <ast.Name object at 0x7da1b05587f0>]]]]] variable[resp] assign[=] call[call[name[self].make_pre_authed_request, parameter[name[req].environ, constant[GET], name[path]]].get_response, parameter[name[self].app]] if compare[name[resp].status_int equal[==] constant[404]] begin[:] return[call[name[HTTPUnauthorized], parameter[]]] if compare[binary_operation[name[resp].status_int <ast.FloorDiv object at 0x7da2590d6bc0> constant[100]] not_equal[!=] constant[2]] begin[:] <ast.Raise object at 0x7da1b055ab60> variable[user_detail] assign[=] call[name[json].loads, parameter[name[resp].body]] if <ast.UnaryOp object at 0x7da1b0559a50> begin[:] return[call[name[HTTPUnauthorized], parameter[]]] variable[token] assign[=] constant[None] variable[expires] assign[=] constant[None] variable[candidate_token] assign[=] call[name[resp].headers.get, parameter[constant[x-object-meta-auth-token]]] if name[candidate_token] begin[:] variable[object_name] assign[=] call[name[self]._get_concealed_token, parameter[name[candidate_token]]] variable[path] assign[=] call[name[quote], parameter[binary_operation[constant[/v1/%s/.token_%s/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b0559d20>, <ast.Subscript object at 0x7da1b055ad10>, <ast.Name object at 0x7da1b055add0>]]]]] variable[delete_token] assign[=] constant[False] <ast.Try object at 0x7da1b0559a20> if <ast.UnaryOp object at 0x7da1b0559300> begin[:] variable[path] assign[=] call[name[quote], parameter[binary_operation[constant[/v1/%s/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b055b7f0>, <ast.Name object at 0x7da1b055be50>]]]]] variable[resp] assign[=] call[call[name[self].make_pre_authed_request, parameter[name[req].environ, constant[HEAD], name[path]]].get_response, parameter[name[self].app]] if compare[binary_operation[name[resp].status_int <ast.FloorDiv object at 0x7da2590d6bc0> constant[100]] not_equal[!=] constant[2]] begin[:] <ast.Raise object at 0x7da20c6aafb0> variable[account_id] assign[=] call[name[resp].headers][constant[x-container-meta-account-id]] variable[token] assign[=] binary_operation[constant[%stk%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da20c6a8340>, <ast.Attribute object at 0x7da20c6abd90>]]] variable[object_name] assign[=] call[name[self]._get_concealed_token, parameter[name[token]]] variable[path] assign[=] call[name[quote], parameter[binary_operation[constant[/v1/%s/.token_%s/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da20c6a93c0>, <ast.Subscript object at 0x7da20c6abf10>, <ast.Name object at 0x7da20c6a92a0>]]]]] <ast.Try object at 0x7da20c6a9cf0> variable[expires] assign[=] call[name[int], parameter[binary_operation[call[name[time], parameter[]] + name[token_life]]]] variable[resp] assign[=] call[call[name[self].make_pre_authed_request, parameter[name[req].environ, constant[PUT], name[path], call[name[json].dumps, parameter[dictionary[[<ast.Constant object at 0x7da20c6ab3a0>, <ast.Constant object at 0x7da20c6aa770>, <ast.Constant object at 0x7da20c6ab250>, <ast.Constant object at 0x7da20c6a9e70>, <ast.Constant object at 0x7da20c6aad10>], [<ast.Name object at 0x7da20c6aa920>, <ast.Name object at 0x7da20c6ab9a0>, <ast.Name object at 0x7da20c6ab490>, <ast.Subscript object at 0x7da20c6a9480>, <ast.Name object at 0x7da20c6ab070>]]]]]].get_response, parameter[name[self].app]] if compare[binary_operation[name[resp].status_int <ast.FloorDiv object at 0x7da2590d6bc0> constant[100]] not_equal[!=] constant[2]] begin[:] <ast.Raise object at 0x7da20c6a8310> variable[path] assign[=] call[name[quote], parameter[binary_operation[constant[/v1/%s/%s/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da20c6ab8e0>, <ast.Name object at 0x7da20c6aa020>, <ast.Name object at 0x7da20c6aa530>]]]]] variable[resp] assign[=] call[call[name[self].make_pre_authed_request, parameter[name[req].environ, constant[POST], name[path]]].get_response, parameter[name[self].app]] if compare[binary_operation[name[resp].status_int <ast.FloorDiv object at 0x7da2590d6bc0> constant[100]] not_equal[!=] constant[2]] begin[:] <ast.Raise object at 0x7da1b0529f00> variable[path] assign[=] call[name[quote], parameter[binary_operation[constant[/v1/%s/%s/.services] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b052a6b0>, <ast.Name object at 0x7da1b0529e40>]]]]] variable[resp] assign[=] call[call[name[self].make_pre_authed_request, parameter[name[req].environ, constant[GET], name[path]]].get_response, parameter[name[self].app]] if compare[binary_operation[name[resp].status_int <ast.FloorDiv object at 0x7da2590d6bc0> constant[100]] not_equal[!=] constant[2]] begin[:] <ast.Raise object at 0x7da1b052a470> variable[detail] assign[=] call[name[json].loads, parameter[name[resp].body]] variable[url] assign[=] call[call[name[detail]][constant[storage]]][call[call[name[detail]][constant[storage]]][constant[default]]] return[call[name[Response], parameter[]]]
keyword[def] identifier[handle_get_token] ( identifier[self] , identifier[req] ): literal[string] keyword[try] : identifier[pathsegs] = identifier[split_path] ( identifier[req] . identifier[path_info] , identifier[minsegs] = literal[int] , identifier[maxsegs] = literal[int] , identifier[rest_with_last] = keyword[True] ) keyword[except] identifier[ValueError] : keyword[return] identifier[HTTPNotFound] ( identifier[request] = identifier[req] ) keyword[if] identifier[pathsegs] [ literal[int] ]== literal[string] keyword[and] identifier[pathsegs] [ literal[int] ]== literal[string] : identifier[account] = identifier[pathsegs] [ literal[int] ] identifier[user] = identifier[req] . identifier[headers] . identifier[get] ( literal[string] ) keyword[if] keyword[not] identifier[user] : identifier[user] = identifier[unquote] ( identifier[req] . identifier[headers] . identifier[get] ( literal[string] , literal[string] )) keyword[if] keyword[not] identifier[user] keyword[or] literal[string] keyword[not] keyword[in] identifier[user] : keyword[return] identifier[HTTPUnauthorized] ( identifier[request] = identifier[req] ) identifier[account2] , identifier[user] = identifier[user] . identifier[split] ( literal[string] , literal[int] ) keyword[if] identifier[account] != identifier[account2] : keyword[return] identifier[HTTPUnauthorized] ( identifier[request] = identifier[req] ) identifier[key] = identifier[req] . identifier[headers] . identifier[get] ( literal[string] ) keyword[if] keyword[not] identifier[key] : identifier[key] = identifier[unquote] ( identifier[req] . identifier[headers] . identifier[get] ( literal[string] , literal[string] )) keyword[elif] identifier[pathsegs] [ literal[int] ] keyword[in] ( literal[string] , literal[string] ): identifier[user] = identifier[unquote] ( identifier[req] . identifier[headers] . identifier[get] ( literal[string] , literal[string] )) keyword[if] keyword[not] identifier[user] : identifier[user] = identifier[req] . identifier[headers] . identifier[get] ( literal[string] ) keyword[if] keyword[not] identifier[user] keyword[or] literal[string] keyword[not] keyword[in] identifier[user] : keyword[return] identifier[HTTPUnauthorized] ( identifier[request] = identifier[req] ) identifier[account] , identifier[user] = identifier[user] . identifier[split] ( literal[string] , literal[int] ) identifier[key] = identifier[unquote] ( identifier[req] . identifier[headers] . identifier[get] ( literal[string] , literal[string] )) keyword[if] keyword[not] identifier[key] : identifier[key] = identifier[req] . identifier[headers] . identifier[get] ( literal[string] ) keyword[else] : keyword[return] identifier[HTTPBadRequest] ( identifier[request] = identifier[req] ) keyword[if] keyword[not] identifier[all] (( identifier[account] , identifier[user] , identifier[key] )): keyword[return] identifier[HTTPUnauthorized] ( identifier[request] = identifier[req] ) keyword[if] identifier[user] == literal[string] keyword[and] identifier[self] . identifier[super_admin_key] keyword[and] identifier[key] == identifier[self] . identifier[super_admin_key] : identifier[token] = identifier[self] . identifier[get_itoken] ( identifier[req] . identifier[environ] ) identifier[url] = literal[string] %( identifier[self] . identifier[dsc_url] , identifier[self] . identifier[reseller_prefix] ) keyword[return] identifier[Response] ( identifier[request] = identifier[req] , identifier[content_type] = identifier[CONTENT_TYPE_JSON] , identifier[body] = identifier[json] . identifier[dumps] ({ literal[string] :{ literal[string] : literal[string] , literal[string] : identifier[url] }}), identifier[headers] ={ literal[string] : identifier[token] , literal[string] : identifier[token] , literal[string] : identifier[url] }) identifier[path] = identifier[quote] ( literal[string] %( identifier[self] . identifier[auth_account] , identifier[account] , identifier[user] )) identifier[resp] = identifier[self] . identifier[make_pre_authed_request] ( identifier[req] . identifier[environ] , literal[string] , identifier[path] ). identifier[get_response] ( identifier[self] . identifier[app] ) keyword[if] identifier[resp] . identifier[status_int] == literal[int] : keyword[return] identifier[HTTPUnauthorized] ( identifier[request] = identifier[req] ) keyword[if] identifier[resp] . identifier[status_int] // literal[int] != literal[int] : keyword[raise] identifier[Exception] ( literal[string] % ( identifier[path] , identifier[resp] . identifier[status] )) identifier[user_detail] = identifier[json] . identifier[loads] ( identifier[resp] . identifier[body] ) keyword[if] keyword[not] identifier[self] . identifier[credentials_match] ( identifier[user_detail] , identifier[key] ): keyword[return] identifier[HTTPUnauthorized] ( identifier[request] = identifier[req] ) identifier[token] = keyword[None] identifier[expires] = keyword[None] identifier[candidate_token] = identifier[resp] . identifier[headers] . identifier[get] ( literal[string] ) keyword[if] identifier[candidate_token] : identifier[object_name] = identifier[self] . identifier[_get_concealed_token] ( identifier[candidate_token] ) identifier[path] = identifier[quote] ( literal[string] % ( identifier[self] . identifier[auth_account] , identifier[object_name] [- literal[int] ], identifier[object_name] )) identifier[delete_token] = keyword[False] keyword[try] : keyword[if] identifier[req] . identifier[headers] . identifier[get] ( literal[string] , literal[string] ). identifier[lower] () keyword[in] identifier[TRUE_VALUES] : identifier[delete_token] = keyword[True] keyword[else] : identifier[resp] = identifier[self] . identifier[make_pre_authed_request] ( identifier[req] . identifier[environ] , literal[string] , identifier[path] ). identifier[get_response] ( identifier[self] . identifier[app] ) keyword[if] identifier[resp] . identifier[status_int] // literal[int] == literal[int] : identifier[token_detail] = identifier[json] . identifier[loads] ( identifier[resp] . identifier[body] ) keyword[if] identifier[token_detail] [ literal[string] ]> identifier[time] (): identifier[token] = identifier[candidate_token] identifier[expires] = identifier[token_detail] [ literal[string] ] keyword[else] : identifier[delete_token] = keyword[True] keyword[elif] identifier[resp] . identifier[status_int] != literal[int] : keyword[raise] identifier[Exception] ( literal[string] literal[string] %( identifier[path] , identifier[resp] . identifier[status] )) keyword[finally] : keyword[if] identifier[delete_token] : identifier[self] . identifier[make_pre_authed_request] ( identifier[req] . identifier[environ] , literal[string] , identifier[path] ). identifier[get_response] ( identifier[self] . identifier[app] ) identifier[memcache_client] = identifier[cache_from_env] ( identifier[req] . identifier[environ] ) keyword[if] identifier[memcache_client] : identifier[memcache_key] = literal[string] %( identifier[self] . identifier[reseller_prefix] , identifier[candidate_token] ) identifier[memcache_client] . identifier[delete] ( identifier[memcache_key] ) keyword[if] keyword[not] identifier[token] : identifier[path] = identifier[quote] ( literal[string] %( identifier[self] . identifier[auth_account] , identifier[account] )) identifier[resp] = identifier[self] . identifier[make_pre_authed_request] ( identifier[req] . identifier[environ] , literal[string] , identifier[path] ). identifier[get_response] ( identifier[self] . identifier[app] ) keyword[if] identifier[resp] . identifier[status_int] // literal[int] != literal[int] : keyword[raise] identifier[Exception] ( literal[string] literal[string] %( identifier[path] , identifier[resp] . identifier[status] )) identifier[account_id] = identifier[resp] . identifier[headers] [ literal[string] ] identifier[token] = literal[string] %( identifier[self] . identifier[reseller_prefix] , identifier[uuid4] (). identifier[hex] ) identifier[object_name] = identifier[self] . identifier[_get_concealed_token] ( identifier[token] ) identifier[path] = identifier[quote] ( literal[string] % ( identifier[self] . identifier[auth_account] , identifier[object_name] [- literal[int] ], identifier[object_name] )) keyword[try] : identifier[token_life] = identifier[min] ( identifier[int] ( identifier[req] . identifier[headers] . identifier[get] ( literal[string] , identifier[self] . identifier[token_life] )), identifier[self] . identifier[max_token_life] ) keyword[except] identifier[ValueError] : identifier[token_life] = identifier[self] . identifier[token_life] identifier[expires] = identifier[int] ( identifier[time] ()+ identifier[token_life] ) identifier[resp] = identifier[self] . identifier[make_pre_authed_request] ( identifier[req] . identifier[environ] , literal[string] , identifier[path] , identifier[json] . identifier[dumps] ({ literal[string] : identifier[account] , literal[string] : identifier[user] , literal[string] : identifier[account_id] , literal[string] : identifier[user_detail] [ literal[string] ], literal[string] : identifier[expires] })). identifier[get_response] ( identifier[self] . identifier[app] ) keyword[if] identifier[resp] . identifier[status_int] // literal[int] != literal[int] : keyword[raise] identifier[Exception] ( literal[string] % ( identifier[path] , identifier[resp] . identifier[status] )) identifier[path] = identifier[quote] ( literal[string] %( identifier[self] . identifier[auth_account] , identifier[account] , identifier[user] )) identifier[resp] = identifier[self] . identifier[make_pre_authed_request] ( identifier[req] . identifier[environ] , literal[string] , identifier[path] , identifier[headers] ={ literal[string] : identifier[token] } ). identifier[get_response] ( identifier[self] . identifier[app] ) keyword[if] identifier[resp] . identifier[status_int] // literal[int] != literal[int] : keyword[raise] identifier[Exception] ( literal[string] % ( identifier[path] , identifier[resp] . identifier[status] )) identifier[path] = identifier[quote] ( literal[string] %( identifier[self] . identifier[auth_account] , identifier[account] )) identifier[resp] = identifier[self] . identifier[make_pre_authed_request] ( identifier[req] . identifier[environ] , literal[string] , identifier[path] ). identifier[get_response] ( identifier[self] . identifier[app] ) keyword[if] identifier[resp] . identifier[status_int] // literal[int] != literal[int] : keyword[raise] identifier[Exception] ( literal[string] % ( identifier[path] , identifier[resp] . identifier[status] )) identifier[detail] = identifier[json] . identifier[loads] ( identifier[resp] . identifier[body] ) identifier[url] = identifier[detail] [ literal[string] ][ identifier[detail] [ literal[string] ][ literal[string] ]] keyword[return] identifier[Response] ( identifier[request] = identifier[req] , identifier[body] = identifier[resp] . identifier[body] , identifier[content_type] = identifier[CONTENT_TYPE_JSON] , identifier[headers] ={ literal[string] : identifier[token] , literal[string] : identifier[token] , literal[string] : identifier[str] ( identifier[int] ( identifier[expires] - identifier[time] ())), literal[string] : identifier[url] })
def handle_get_token(self, req): """Handles the various `request for token and service end point(s)` calls. There are various formats to support the various auth servers in the past. Examples:: GET <auth-prefix>/v1/<act>/auth X-Auth-User: <act>:<usr> or X-Storage-User: <usr> X-Auth-Key: <key> or X-Storage-Pass: <key> GET <auth-prefix>/auth X-Auth-User: <act>:<usr> or X-Storage-User: <act>:<usr> X-Auth-Key: <key> or X-Storage-Pass: <key> GET <auth-prefix>/v1.0 X-Auth-User: <act>:<usr> or X-Storage-User: <act>:<usr> X-Auth-Key: <key> or X-Storage-Pass: <key> Values should be url encoded, "act%3Ausr" instead of "act:usr" for example; however, for backwards compatibility the colon may be included unencoded. On successful authentication, the response will have X-Auth-Token and X-Storage-Token set to the token to use with Swift and X-Storage-URL set to the URL to the default Swift cluster to use. The response body will be set to the account's services JSON object as described here:: {"storage": { # Represents the Swift storage service end points "default": "cluster1", # Indicates which cluster is the default "cluster1": "<URL to use with Swift>", # A Swift cluster that can be used with this account, # "cluster1" is the name of the cluster which is usually a # location indicator (like "dfw" for a datacenter region). "cluster2": "<URL to use with Swift>" # Another Swift cluster that can be used with this account, # there will always be at least one Swift cluster to use or # this whole "storage" dict won't be included at all. }, "servers": { # Represents the Nova server service end points # Expected to be similar to the "storage" dict, but not # implemented yet. }, # Possibly other service dicts, not implemented yet. } One can also include an "X-Auth-New-Token: true" header to force issuing a new token and revoking any old token, even if it hasn't expired yet. :param req: The swob.Request to process. :returns: swob.Response, 2xx on success with data set as explained above. """ # Validate the request info try: pathsegs = split_path(req.path_info, minsegs=1, maxsegs=3, rest_with_last=True) # depends on [control=['try'], data=[]] except ValueError: return HTTPNotFound(request=req) # depends on [control=['except'], data=[]] if pathsegs[0] == 'v1' and pathsegs[2] == 'auth': account = pathsegs[1] user = req.headers.get('x-storage-user') if not user: user = unquote(req.headers.get('x-auth-user', '')) if not user or ':' not in user: return HTTPUnauthorized(request=req) # depends on [control=['if'], data=[]] (account2, user) = user.split(':', 1) if account != account2: return HTTPUnauthorized(request=req) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] key = req.headers.get('x-storage-pass') if not key: key = unquote(req.headers.get('x-auth-key', '')) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif pathsegs[0] in ('auth', 'v1.0'): user = unquote(req.headers.get('x-auth-user', '')) if not user: user = req.headers.get('x-storage-user') # depends on [control=['if'], data=[]] if not user or ':' not in user: return HTTPUnauthorized(request=req) # depends on [control=['if'], data=[]] (account, user) = user.split(':', 1) key = unquote(req.headers.get('x-auth-key', '')) if not key: key = req.headers.get('x-storage-pass') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: return HTTPBadRequest(request=req) if not all((account, user, key)): return HTTPUnauthorized(request=req) # depends on [control=['if'], data=[]] if user == '.super_admin' and self.super_admin_key and (key == self.super_admin_key): token = self.get_itoken(req.environ) url = '%s/%s.auth' % (self.dsc_url, self.reseller_prefix) return Response(request=req, content_type=CONTENT_TYPE_JSON, body=json.dumps({'storage': {'default': 'local', 'local': url}}), headers={'x-auth-token': token, 'x-storage-token': token, 'x-storage-url': url}) # depends on [control=['if'], data=[]] # Authenticate user path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) resp = self.make_pre_authed_request(req.environ, 'GET', path).get_response(self.app) if resp.status_int == 404: return HTTPUnauthorized(request=req) # depends on [control=['if'], data=[]] if resp.status_int // 100 != 2: raise Exception('Could not obtain user details: %s %s' % (path, resp.status)) # depends on [control=['if'], data=[]] user_detail = json.loads(resp.body) if not self.credentials_match(user_detail, key): return HTTPUnauthorized(request=req) # depends on [control=['if'], data=[]] # See if a token already exists and hasn't expired token = None expires = None candidate_token = resp.headers.get('x-object-meta-auth-token') if candidate_token: object_name = self._get_concealed_token(candidate_token) path = quote('/v1/%s/.token_%s/%s' % (self.auth_account, object_name[-1], object_name)) delete_token = False try: if req.headers.get('x-auth-new-token', 'false').lower() in TRUE_VALUES: delete_token = True # depends on [control=['if'], data=[]] else: resp = self.make_pre_authed_request(req.environ, 'GET', path).get_response(self.app) if resp.status_int // 100 == 2: token_detail = json.loads(resp.body) if token_detail['expires'] > time(): token = candidate_token expires = token_detail['expires'] # depends on [control=['if'], data=[]] else: delete_token = True # depends on [control=['if'], data=[]] elif resp.status_int != 404: raise Exception('Could not detect whether a token already exists: %s %s' % (path, resp.status)) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] finally: if delete_token: self.make_pre_authed_request(req.environ, 'DELETE', path).get_response(self.app) memcache_client = cache_from_env(req.environ) if memcache_client: memcache_key = '%s/auth/%s' % (self.reseller_prefix, candidate_token) memcache_client.delete(memcache_key) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # Create a new token if one didn't exist if not token: # Retrieve account id, we'll save this in the token path = quote('/v1/%s/%s' % (self.auth_account, account)) resp = self.make_pre_authed_request(req.environ, 'HEAD', path).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not retrieve account id value: %s %s' % (path, resp.status)) # depends on [control=['if'], data=[]] account_id = resp.headers['x-container-meta-account-id'] # Generate new token token = '%stk%s' % (self.reseller_prefix, uuid4().hex) # Save token info object_name = self._get_concealed_token(token) path = quote('/v1/%s/.token_%s/%s' % (self.auth_account, object_name[-1], object_name)) try: token_life = min(int(req.headers.get('x-auth-token-lifetime', self.token_life)), self.max_token_life) # depends on [control=['try'], data=[]] except ValueError: token_life = self.token_life # depends on [control=['except'], data=[]] expires = int(time() + token_life) resp = self.make_pre_authed_request(req.environ, 'PUT', path, json.dumps({'account': account, 'user': user, 'account_id': account_id, 'groups': user_detail['groups'], 'expires': expires})).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not create new token: %s %s' % (path, resp.status)) # depends on [control=['if'], data=[]] # Record the token with the user info for future use. path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) resp = self.make_pre_authed_request(req.environ, 'POST', path, headers={'X-Object-Meta-Auth-Token': token}).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not save new token: %s %s' % (path, resp.status)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # Get the services information path = quote('/v1/%s/%s/.services' % (self.auth_account, account)) resp = self.make_pre_authed_request(req.environ, 'GET', path).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not obtain services info: %s %s' % (path, resp.status)) # depends on [control=['if'], data=[]] detail = json.loads(resp.body) url = detail['storage'][detail['storage']['default']] return Response(request=req, body=resp.body, content_type=CONTENT_TYPE_JSON, headers={'x-auth-token': token, 'x-storage-token': token, 'x-auth-token-expires': str(int(expires - time())), 'x-storage-url': url})
def aseg_on_mri(mri_spec, aseg_spec, alpha_mri=1.0, alpha_seg=1.0, num_rows=2, num_cols=6, rescale_method='global', aseg_cmap='freesurfer', sub_cortical=False, annot=None, padding=5, bkground_thresh=0.05, output_path=None, figsize=None, **kwargs): "Produces a collage of various slices from different orientations in the given 3D image" num_rows, num_cols, padding = check_params(num_rows, num_cols, padding) mri = read_image(mri_spec, bkground_thresh=bkground_thresh) seg = read_image(aseg_spec, bkground_thresh=0) mri, seg = crop_to_seg_extents(mri, seg, padding) num_slices_per_view = num_rows * num_cols slices = pick_slices(seg, num_slices_per_view) plt.style.use('dark_background') num_axes = 3 if figsize is None: figsize = [5 * num_axes * num_rows, 5 * num_cols] fig, ax = plt.subplots(num_axes * num_rows, num_cols, figsize=figsize) # displaying some annotation text if provided if annot is not None: fig.suptitle(annot, backgroundcolor='black', color='g') display_params_mri = dict(interpolation='none', aspect='equal', origin='lower', cmap='gray', alpha=alpha_mri, vmin=mri.min(), vmax=mri.max()) display_params_seg = dict(interpolation='none', aspect='equal', origin='lower', alpha=alpha_seg) normalize_labels = colors.Normalize(vmin=seg.min(), vmax=seg.max(), clip=True) fs_cmap = get_freesurfer_cmap(sub_cortical) label_mapper = cm.ScalarMappable(norm=normalize_labels, cmap=fs_cmap) ax = ax.flatten() ax_counter = 0 for dim_index in range(3): for slice_num in slices[dim_index]: plt.sca(ax[ax_counter]) ax_counter = ax_counter + 1 slice_mri = get_axis(mri, dim_index, slice_num) slice_seg = get_axis(seg, dim_index, slice_num) # # masking data to set no-value pixels to transparent # seg_background = np.isclose(slice_seg, 0.0) # slice_seg = np.ma.masked_where(seg_background, slice_seg) # slice_mri = np.ma.masked_where(np.logical_not(seg_background), slice_mri) seg_rgb = label_mapper.to_rgba(slice_seg) plt.imshow(seg_rgb, **display_params_seg) plt.imshow(slice_mri, **display_params_mri) plt.axis('off') # plt.subplots_adjust(wspace=0.0, hspace=0.0) plt.subplots_adjust(left=0.01, right=0.99, bottom=0.01, top=0.99, wspace=0.05, hspace=0.02) # fig.tight_layout() if output_path is not None: output_path = output_path.replace(' ', '_') fig.savefig(output_path + '.png', bbox_inches='tight') # plt.close() return fig
def function[aseg_on_mri, parameter[mri_spec, aseg_spec, alpha_mri, alpha_seg, num_rows, num_cols, rescale_method, aseg_cmap, sub_cortical, annot, padding, bkground_thresh, output_path, figsize]]: constant[Produces a collage of various slices from different orientations in the given 3D image] <ast.Tuple object at 0x7da20c794a60> assign[=] call[name[check_params], parameter[name[num_rows], name[num_cols], name[padding]]] variable[mri] assign[=] call[name[read_image], parameter[name[mri_spec]]] variable[seg] assign[=] call[name[read_image], parameter[name[aseg_spec]]] <ast.Tuple object at 0x7da20c795960> assign[=] call[name[crop_to_seg_extents], parameter[name[mri], name[seg], name[padding]]] variable[num_slices_per_view] assign[=] binary_operation[name[num_rows] * name[num_cols]] variable[slices] assign[=] call[name[pick_slices], parameter[name[seg], name[num_slices_per_view]]] call[name[plt].style.use, parameter[constant[dark_background]]] variable[num_axes] assign[=] constant[3] if compare[name[figsize] is constant[None]] begin[:] variable[figsize] assign[=] list[[<ast.BinOp object at 0x7da20c794670>, <ast.BinOp object at 0x7da20c796aa0>]] <ast.Tuple object at 0x7da20c795600> assign[=] call[name[plt].subplots, parameter[binary_operation[name[num_axes] * name[num_rows]], name[num_cols]]] if compare[name[annot] is_not constant[None]] begin[:] call[name[fig].suptitle, parameter[name[annot]]] variable[display_params_mri] assign[=] call[name[dict], parameter[]] variable[display_params_seg] assign[=] call[name[dict], parameter[]] variable[normalize_labels] assign[=] call[name[colors].Normalize, parameter[]] variable[fs_cmap] assign[=] call[name[get_freesurfer_cmap], parameter[name[sub_cortical]]] variable[label_mapper] assign[=] call[name[cm].ScalarMappable, parameter[]] variable[ax] assign[=] call[name[ax].flatten, parameter[]] variable[ax_counter] assign[=] constant[0] for taget[name[dim_index]] in starred[call[name[range], parameter[constant[3]]]] begin[:] for taget[name[slice_num]] in starred[call[name[slices]][name[dim_index]]] begin[:] call[name[plt].sca, parameter[call[name[ax]][name[ax_counter]]]] variable[ax_counter] assign[=] binary_operation[name[ax_counter] + constant[1]] variable[slice_mri] assign[=] call[name[get_axis], parameter[name[mri], name[dim_index], name[slice_num]]] variable[slice_seg] assign[=] call[name[get_axis], parameter[name[seg], name[dim_index], name[slice_num]]] variable[seg_rgb] assign[=] call[name[label_mapper].to_rgba, parameter[name[slice_seg]]] call[name[plt].imshow, parameter[name[seg_rgb]]] call[name[plt].imshow, parameter[name[slice_mri]]] call[name[plt].axis, parameter[constant[off]]] call[name[plt].subplots_adjust, parameter[]] if compare[name[output_path] is_not constant[None]] begin[:] variable[output_path] assign[=] call[name[output_path].replace, parameter[constant[ ], constant[_]]] call[name[fig].savefig, parameter[binary_operation[name[output_path] + constant[.png]]]] return[name[fig]]
keyword[def] identifier[aseg_on_mri] ( identifier[mri_spec] , identifier[aseg_spec] , identifier[alpha_mri] = literal[int] , identifier[alpha_seg] = literal[int] , identifier[num_rows] = literal[int] , identifier[num_cols] = literal[int] , identifier[rescale_method] = literal[string] , identifier[aseg_cmap] = literal[string] , identifier[sub_cortical] = keyword[False] , identifier[annot] = keyword[None] , identifier[padding] = literal[int] , identifier[bkground_thresh] = literal[int] , identifier[output_path] = keyword[None] , identifier[figsize] = keyword[None] , ** identifier[kwargs] ): literal[string] identifier[num_rows] , identifier[num_cols] , identifier[padding] = identifier[check_params] ( identifier[num_rows] , identifier[num_cols] , identifier[padding] ) identifier[mri] = identifier[read_image] ( identifier[mri_spec] , identifier[bkground_thresh] = identifier[bkground_thresh] ) identifier[seg] = identifier[read_image] ( identifier[aseg_spec] , identifier[bkground_thresh] = literal[int] ) identifier[mri] , identifier[seg] = identifier[crop_to_seg_extents] ( identifier[mri] , identifier[seg] , identifier[padding] ) identifier[num_slices_per_view] = identifier[num_rows] * identifier[num_cols] identifier[slices] = identifier[pick_slices] ( identifier[seg] , identifier[num_slices_per_view] ) identifier[plt] . identifier[style] . identifier[use] ( literal[string] ) identifier[num_axes] = literal[int] keyword[if] identifier[figsize] keyword[is] keyword[None] : identifier[figsize] =[ literal[int] * identifier[num_axes] * identifier[num_rows] , literal[int] * identifier[num_cols] ] identifier[fig] , identifier[ax] = identifier[plt] . identifier[subplots] ( identifier[num_axes] * identifier[num_rows] , identifier[num_cols] , identifier[figsize] = identifier[figsize] ) keyword[if] identifier[annot] keyword[is] keyword[not] keyword[None] : identifier[fig] . identifier[suptitle] ( identifier[annot] , identifier[backgroundcolor] = literal[string] , identifier[color] = literal[string] ) identifier[display_params_mri] = identifier[dict] ( identifier[interpolation] = literal[string] , identifier[aspect] = literal[string] , identifier[origin] = literal[string] , identifier[cmap] = literal[string] , identifier[alpha] = identifier[alpha_mri] , identifier[vmin] = identifier[mri] . identifier[min] (), identifier[vmax] = identifier[mri] . identifier[max] ()) identifier[display_params_seg] = identifier[dict] ( identifier[interpolation] = literal[string] , identifier[aspect] = literal[string] , identifier[origin] = literal[string] , identifier[alpha] = identifier[alpha_seg] ) identifier[normalize_labels] = identifier[colors] . identifier[Normalize] ( identifier[vmin] = identifier[seg] . identifier[min] (), identifier[vmax] = identifier[seg] . identifier[max] (), identifier[clip] = keyword[True] ) identifier[fs_cmap] = identifier[get_freesurfer_cmap] ( identifier[sub_cortical] ) identifier[label_mapper] = identifier[cm] . identifier[ScalarMappable] ( identifier[norm] = identifier[normalize_labels] , identifier[cmap] = identifier[fs_cmap] ) identifier[ax] = identifier[ax] . identifier[flatten] () identifier[ax_counter] = literal[int] keyword[for] identifier[dim_index] keyword[in] identifier[range] ( literal[int] ): keyword[for] identifier[slice_num] keyword[in] identifier[slices] [ identifier[dim_index] ]: identifier[plt] . identifier[sca] ( identifier[ax] [ identifier[ax_counter] ]) identifier[ax_counter] = identifier[ax_counter] + literal[int] identifier[slice_mri] = identifier[get_axis] ( identifier[mri] , identifier[dim_index] , identifier[slice_num] ) identifier[slice_seg] = identifier[get_axis] ( identifier[seg] , identifier[dim_index] , identifier[slice_num] ) identifier[seg_rgb] = identifier[label_mapper] . identifier[to_rgba] ( identifier[slice_seg] ) identifier[plt] . identifier[imshow] ( identifier[seg_rgb] ,** identifier[display_params_seg] ) identifier[plt] . identifier[imshow] ( identifier[slice_mri] ,** identifier[display_params_mri] ) identifier[plt] . identifier[axis] ( literal[string] ) identifier[plt] . identifier[subplots_adjust] ( identifier[left] = literal[int] , identifier[right] = literal[int] , identifier[bottom] = literal[int] , identifier[top] = literal[int] , identifier[wspace] = literal[int] , identifier[hspace] = literal[int] ) keyword[if] identifier[output_path] keyword[is] keyword[not] keyword[None] : identifier[output_path] = identifier[output_path] . identifier[replace] ( literal[string] , literal[string] ) identifier[fig] . identifier[savefig] ( identifier[output_path] + literal[string] , identifier[bbox_inches] = literal[string] ) keyword[return] identifier[fig]
def aseg_on_mri(mri_spec, aseg_spec, alpha_mri=1.0, alpha_seg=1.0, num_rows=2, num_cols=6, rescale_method='global', aseg_cmap='freesurfer', sub_cortical=False, annot=None, padding=5, bkground_thresh=0.05, output_path=None, figsize=None, **kwargs): """Produces a collage of various slices from different orientations in the given 3D image""" (num_rows, num_cols, padding) = check_params(num_rows, num_cols, padding) mri = read_image(mri_spec, bkground_thresh=bkground_thresh) seg = read_image(aseg_spec, bkground_thresh=0) (mri, seg) = crop_to_seg_extents(mri, seg, padding) num_slices_per_view = num_rows * num_cols slices = pick_slices(seg, num_slices_per_view) plt.style.use('dark_background') num_axes = 3 if figsize is None: figsize = [5 * num_axes * num_rows, 5 * num_cols] # depends on [control=['if'], data=['figsize']] (fig, ax) = plt.subplots(num_axes * num_rows, num_cols, figsize=figsize) # displaying some annotation text if provided if annot is not None: fig.suptitle(annot, backgroundcolor='black', color='g') # depends on [control=['if'], data=['annot']] display_params_mri = dict(interpolation='none', aspect='equal', origin='lower', cmap='gray', alpha=alpha_mri, vmin=mri.min(), vmax=mri.max()) display_params_seg = dict(interpolation='none', aspect='equal', origin='lower', alpha=alpha_seg) normalize_labels = colors.Normalize(vmin=seg.min(), vmax=seg.max(), clip=True) fs_cmap = get_freesurfer_cmap(sub_cortical) label_mapper = cm.ScalarMappable(norm=normalize_labels, cmap=fs_cmap) ax = ax.flatten() ax_counter = 0 for dim_index in range(3): for slice_num in slices[dim_index]: plt.sca(ax[ax_counter]) ax_counter = ax_counter + 1 slice_mri = get_axis(mri, dim_index, slice_num) slice_seg = get_axis(seg, dim_index, slice_num) # # masking data to set no-value pixels to transparent # seg_background = np.isclose(slice_seg, 0.0) # slice_seg = np.ma.masked_where(seg_background, slice_seg) # slice_mri = np.ma.masked_where(np.logical_not(seg_background), slice_mri) seg_rgb = label_mapper.to_rgba(slice_seg) plt.imshow(seg_rgb, **display_params_seg) plt.imshow(slice_mri, **display_params_mri) plt.axis('off') # depends on [control=['for'], data=['slice_num']] # depends on [control=['for'], data=['dim_index']] # plt.subplots_adjust(wspace=0.0, hspace=0.0) plt.subplots_adjust(left=0.01, right=0.99, bottom=0.01, top=0.99, wspace=0.05, hspace=0.02) # fig.tight_layout() if output_path is not None: output_path = output_path.replace(' ', '_') fig.savefig(output_path + '.png', bbox_inches='tight') # depends on [control=['if'], data=['output_path']] # plt.close() return fig
def kv(d): """Equivalent to dict.items(). Usage:: >>> for key, node in DictTree.kv(d): >>> print(key, DictTree.getattr(node, "population")) MD 200000 VA 100000 """ return ((key, value) for key, value in iteritems(d) if key != _meta)
def function[kv, parameter[d]]: constant[Equivalent to dict.items(). Usage:: >>> for key, node in DictTree.kv(d): >>> print(key, DictTree.getattr(node, "population")) MD 200000 VA 100000 ] return[<ast.GeneratorExp object at 0x7da1b1435cc0>]
keyword[def] identifier[kv] ( identifier[d] ): literal[string] keyword[return] (( identifier[key] , identifier[value] ) keyword[for] identifier[key] , identifier[value] keyword[in] identifier[iteritems] ( identifier[d] ) keyword[if] identifier[key] != identifier[_meta] )
def kv(d): """Equivalent to dict.items(). Usage:: >>> for key, node in DictTree.kv(d): >>> print(key, DictTree.getattr(node, "population")) MD 200000 VA 100000 """ return ((key, value) for (key, value) in iteritems(d) if key != _meta)
def _set_affected_target_count_in_runtracker(self): """Sets the realized target count in the run tracker's daemon stats object.""" target_count = len(self.build_graph) self.run_tracker.pantsd_stats.set_affected_targets_size(target_count) return target_count
def function[_set_affected_target_count_in_runtracker, parameter[self]]: constant[Sets the realized target count in the run tracker's daemon stats object.] variable[target_count] assign[=] call[name[len], parameter[name[self].build_graph]] call[name[self].run_tracker.pantsd_stats.set_affected_targets_size, parameter[name[target_count]]] return[name[target_count]]
keyword[def] identifier[_set_affected_target_count_in_runtracker] ( identifier[self] ): literal[string] identifier[target_count] = identifier[len] ( identifier[self] . identifier[build_graph] ) identifier[self] . identifier[run_tracker] . identifier[pantsd_stats] . identifier[set_affected_targets_size] ( identifier[target_count] ) keyword[return] identifier[target_count]
def _set_affected_target_count_in_runtracker(self): """Sets the realized target count in the run tracker's daemon stats object.""" target_count = len(self.build_graph) self.run_tracker.pantsd_stats.set_affected_targets_size(target_count) return target_count
def dest(self): """Create link destination details.""" if hasattr(self, "parent") and self.parent is None: raise ValueError("orphaned object: parent is None") if self.parent.parent.isClosed or self.parent.parent.isEncrypted: raise ValueError("operation illegal for closed / encrypted doc") doc = self.parent.parent if self.isExternal or self.uri.startswith("#"): uri = None else: uri = doc.resolveLink(self.uri) return linkDest(self, uri)
def function[dest, parameter[self]]: constant[Create link destination details.] if <ast.BoolOp object at 0x7da18c4cd9c0> begin[:] <ast.Raise object at 0x7da1b26acc10> if <ast.BoolOp object at 0x7da1b26ac6a0> begin[:] <ast.Raise object at 0x7da1b26ad3c0> variable[doc] assign[=] name[self].parent.parent if <ast.BoolOp object at 0x7da1b26ae920> begin[:] variable[uri] assign[=] constant[None] return[call[name[linkDest], parameter[name[self], name[uri]]]]
keyword[def] identifier[dest] ( identifier[self] ): literal[string] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[parent] keyword[is] keyword[None] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[self] . identifier[parent] . identifier[parent] . identifier[isClosed] keyword[or] identifier[self] . identifier[parent] . identifier[parent] . identifier[isEncrypted] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[doc] = identifier[self] . identifier[parent] . identifier[parent] keyword[if] identifier[self] . identifier[isExternal] keyword[or] identifier[self] . identifier[uri] . identifier[startswith] ( literal[string] ): identifier[uri] = keyword[None] keyword[else] : identifier[uri] = identifier[doc] . identifier[resolveLink] ( identifier[self] . identifier[uri] ) keyword[return] identifier[linkDest] ( identifier[self] , identifier[uri] )
def dest(self): """Create link destination details.""" if hasattr(self, 'parent') and self.parent is None: raise ValueError('orphaned object: parent is None') # depends on [control=['if'], data=[]] if self.parent.parent.isClosed or self.parent.parent.isEncrypted: raise ValueError('operation illegal for closed / encrypted doc') # depends on [control=['if'], data=[]] doc = self.parent.parent if self.isExternal or self.uri.startswith('#'): uri = None # depends on [control=['if'], data=[]] else: uri = doc.resolveLink(self.uri) return linkDest(self, uri)
def get_qr(self, filename=None): """Get pairing QR code from client""" if "Click to reload QR code" in self.driver.page_source: self.reload_qr() qr = self.driver.find_element_by_css_selector(self._SELECTORS['qrCode']) if filename is None: fd, fn_png = tempfile.mkstemp(prefix=self.username, suffix='.png') else: fd = os.open(filename, os.O_RDWR | os.O_CREAT) fn_png = os.path.abspath(filename) self.logger.debug("QRcode image saved at %s" % fn_png) qr.screenshot(fn_png) os.close(fd) return fn_png
def function[get_qr, parameter[self, filename]]: constant[Get pairing QR code from client] if compare[constant[Click to reload QR code] in name[self].driver.page_source] begin[:] call[name[self].reload_qr, parameter[]] variable[qr] assign[=] call[name[self].driver.find_element_by_css_selector, parameter[call[name[self]._SELECTORS][constant[qrCode]]]] if compare[name[filename] is constant[None]] begin[:] <ast.Tuple object at 0x7da1b21e2020> assign[=] call[name[tempfile].mkstemp, parameter[]] call[name[self].logger.debug, parameter[binary_operation[constant[QRcode image saved at %s] <ast.Mod object at 0x7da2590d6920> name[fn_png]]]] call[name[qr].screenshot, parameter[name[fn_png]]] call[name[os].close, parameter[name[fd]]] return[name[fn_png]]
keyword[def] identifier[get_qr] ( identifier[self] , identifier[filename] = keyword[None] ): literal[string] keyword[if] literal[string] keyword[in] identifier[self] . identifier[driver] . identifier[page_source] : identifier[self] . identifier[reload_qr] () identifier[qr] = identifier[self] . identifier[driver] . identifier[find_element_by_css_selector] ( identifier[self] . identifier[_SELECTORS] [ literal[string] ]) keyword[if] identifier[filename] keyword[is] keyword[None] : identifier[fd] , identifier[fn_png] = identifier[tempfile] . identifier[mkstemp] ( identifier[prefix] = identifier[self] . identifier[username] , identifier[suffix] = literal[string] ) keyword[else] : identifier[fd] = identifier[os] . identifier[open] ( identifier[filename] , identifier[os] . identifier[O_RDWR] | identifier[os] . identifier[O_CREAT] ) identifier[fn_png] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[filename] ) identifier[self] . identifier[logger] . identifier[debug] ( literal[string] % identifier[fn_png] ) identifier[qr] . identifier[screenshot] ( identifier[fn_png] ) identifier[os] . identifier[close] ( identifier[fd] ) keyword[return] identifier[fn_png]
def get_qr(self, filename=None): """Get pairing QR code from client""" if 'Click to reload QR code' in self.driver.page_source: self.reload_qr() # depends on [control=['if'], data=[]] qr = self.driver.find_element_by_css_selector(self._SELECTORS['qrCode']) if filename is None: (fd, fn_png) = tempfile.mkstemp(prefix=self.username, suffix='.png') # depends on [control=['if'], data=[]] else: fd = os.open(filename, os.O_RDWR | os.O_CREAT) fn_png = os.path.abspath(filename) self.logger.debug('QRcode image saved at %s' % fn_png) qr.screenshot(fn_png) os.close(fd) return fn_png
def add_aliases(self_or_cls, **kwargs): """ Conveniently add new aliases as keyword arguments. For instance you can add a new alias with add_aliases(short='Longer string') """ self_or_cls.aliases.update({v:k for k,v in kwargs.items()})
def function[add_aliases, parameter[self_or_cls]]: constant[ Conveniently add new aliases as keyword arguments. For instance you can add a new alias with add_aliases(short='Longer string') ] call[name[self_or_cls].aliases.update, parameter[<ast.DictComp object at 0x7da18dc06a40>]]
keyword[def] identifier[add_aliases] ( identifier[self_or_cls] ,** identifier[kwargs] ): literal[string] identifier[self_or_cls] . identifier[aliases] . identifier[update] ({ identifier[v] : identifier[k] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[kwargs] . identifier[items] ()})
def add_aliases(self_or_cls, **kwargs): """ Conveniently add new aliases as keyword arguments. For instance you can add a new alias with add_aliases(short='Longer string') """ self_or_cls.aliases.update({v: k for (k, v) in kwargs.items()})
def std(self, ddof=1, *args, **kwargs): """ Compute standard deviation of groups, excluding missing values. Parameters ---------- ddof : integer, default 1 Degrees of freedom. """ nv.validate_resampler_func('std', args, kwargs) return self._downsample('std', ddof=ddof)
def function[std, parameter[self, ddof]]: constant[ Compute standard deviation of groups, excluding missing values. Parameters ---------- ddof : integer, default 1 Degrees of freedom. ] call[name[nv].validate_resampler_func, parameter[constant[std], name[args], name[kwargs]]] return[call[name[self]._downsample, parameter[constant[std]]]]
keyword[def] identifier[std] ( identifier[self] , identifier[ddof] = literal[int] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[nv] . identifier[validate_resampler_func] ( literal[string] , identifier[args] , identifier[kwargs] ) keyword[return] identifier[self] . identifier[_downsample] ( literal[string] , identifier[ddof] = identifier[ddof] )
def std(self, ddof=1, *args, **kwargs): """ Compute standard deviation of groups, excluding missing values. Parameters ---------- ddof : integer, default 1 Degrees of freedom. """ nv.validate_resampler_func('std', args, kwargs) return self._downsample('std', ddof=ddof)
def set_label(self, label, lang=None): """Sets the `label` metadata property on your Thing/Point. Only one label is allowed per language, so any other labels in this language are removed before adding this one Raises `ValueError` containing an error message if the parameters fail validation `label` (mandatory) (string) the new text of the label `lang` (optional) (string) The two-character ISO 639-1 language code to use for your label. None means use the default language for your agent. See [Config](./Config.m.html#IoticAgent.IOT.Config.Config.__init__) """ label = Validation.label_check_convert(label) lang = Validation.lang_check_convert(lang, default=self._default_lang) # remove any other labels with this language before adding self.delete_label(lang) subj = self._get_uuid_uriref() self._graph.add((subj, self._labelPredicate, Literal(label, lang)))
def function[set_label, parameter[self, label, lang]]: constant[Sets the `label` metadata property on your Thing/Point. Only one label is allowed per language, so any other labels in this language are removed before adding this one Raises `ValueError` containing an error message if the parameters fail validation `label` (mandatory) (string) the new text of the label `lang` (optional) (string) The two-character ISO 639-1 language code to use for your label. None means use the default language for your agent. See [Config](./Config.m.html#IoticAgent.IOT.Config.Config.__init__) ] variable[label] assign[=] call[name[Validation].label_check_convert, parameter[name[label]]] variable[lang] assign[=] call[name[Validation].lang_check_convert, parameter[name[lang]]] call[name[self].delete_label, parameter[name[lang]]] variable[subj] assign[=] call[name[self]._get_uuid_uriref, parameter[]] call[name[self]._graph.add, parameter[tuple[[<ast.Name object at 0x7da1b1c62380>, <ast.Attribute object at 0x7da1b1c63070>, <ast.Call object at 0x7da1b1c61480>]]]]
keyword[def] identifier[set_label] ( identifier[self] , identifier[label] , identifier[lang] = keyword[None] ): literal[string] identifier[label] = identifier[Validation] . identifier[label_check_convert] ( identifier[label] ) identifier[lang] = identifier[Validation] . identifier[lang_check_convert] ( identifier[lang] , identifier[default] = identifier[self] . identifier[_default_lang] ) identifier[self] . identifier[delete_label] ( identifier[lang] ) identifier[subj] = identifier[self] . identifier[_get_uuid_uriref] () identifier[self] . identifier[_graph] . identifier[add] (( identifier[subj] , identifier[self] . identifier[_labelPredicate] , identifier[Literal] ( identifier[label] , identifier[lang] )))
def set_label(self, label, lang=None): """Sets the `label` metadata property on your Thing/Point. Only one label is allowed per language, so any other labels in this language are removed before adding this one Raises `ValueError` containing an error message if the parameters fail validation `label` (mandatory) (string) the new text of the label `lang` (optional) (string) The two-character ISO 639-1 language code to use for your label. None means use the default language for your agent. See [Config](./Config.m.html#IoticAgent.IOT.Config.Config.__init__) """ label = Validation.label_check_convert(label) lang = Validation.lang_check_convert(lang, default=self._default_lang) # remove any other labels with this language before adding self.delete_label(lang) subj = self._get_uuid_uriref() self._graph.add((subj, self._labelPredicate, Literal(label, lang)))
def search_media(self, mq={}, rq={}, limit=100, offset=0, sort=None, fields=None, fields_exclude=FIELDS_EXCLUDE_DEFAULT): """ mq Search Query in iDigBio Query Format, using Media Query Fields rq Search Query in iDigBio Query Format, using Record Query Fields sort field to sort on, pick from Media Query Fields fields a list of fields to return, specified using the fieldName parameter from Fields with type mediarecords fields_exclude a list of fields to exclude, specified using the fieldName parameter from Fields with type records limit max results offset skip results Returns idigbio record format (legacy api), plus additional top level keys with parsed index terms. Returns None on error. """ if fields is not None and fields_exclude is FIELDS_EXCLUDE_DEFAULT: fields_exclude = None return self._api_post("/v2/search/media", rq=rq, mq=mq, limit=limit, offset=offset, sort=sort, fields=fields, fields_exclude=fields_exclude)
def function[search_media, parameter[self, mq, rq, limit, offset, sort, fields, fields_exclude]]: constant[ mq Search Query in iDigBio Query Format, using Media Query Fields rq Search Query in iDigBio Query Format, using Record Query Fields sort field to sort on, pick from Media Query Fields fields a list of fields to return, specified using the fieldName parameter from Fields with type mediarecords fields_exclude a list of fields to exclude, specified using the fieldName parameter from Fields with type records limit max results offset skip results Returns idigbio record format (legacy api), plus additional top level keys with parsed index terms. Returns None on error. ] if <ast.BoolOp object at 0x7da1b0926ec0> begin[:] variable[fields_exclude] assign[=] constant[None] return[call[name[self]._api_post, parameter[constant[/v2/search/media]]]]
keyword[def] identifier[search_media] ( identifier[self] , identifier[mq] ={}, identifier[rq] ={}, identifier[limit] = literal[int] , identifier[offset] = literal[int] , identifier[sort] = keyword[None] , identifier[fields] = keyword[None] , identifier[fields_exclude] = identifier[FIELDS_EXCLUDE_DEFAULT] ): literal[string] keyword[if] identifier[fields] keyword[is] keyword[not] keyword[None] keyword[and] identifier[fields_exclude] keyword[is] identifier[FIELDS_EXCLUDE_DEFAULT] : identifier[fields_exclude] = keyword[None] keyword[return] identifier[self] . identifier[_api_post] ( literal[string] , identifier[rq] = identifier[rq] , identifier[mq] = identifier[mq] , identifier[limit] = identifier[limit] , identifier[offset] = identifier[offset] , identifier[sort] = identifier[sort] , identifier[fields] = identifier[fields] , identifier[fields_exclude] = identifier[fields_exclude] )
def search_media(self, mq={}, rq={}, limit=100, offset=0, sort=None, fields=None, fields_exclude=FIELDS_EXCLUDE_DEFAULT): """ mq Search Query in iDigBio Query Format, using Media Query Fields rq Search Query in iDigBio Query Format, using Record Query Fields sort field to sort on, pick from Media Query Fields fields a list of fields to return, specified using the fieldName parameter from Fields with type mediarecords fields_exclude a list of fields to exclude, specified using the fieldName parameter from Fields with type records limit max results offset skip results Returns idigbio record format (legacy api), plus additional top level keys with parsed index terms. Returns None on error. """ if fields is not None and fields_exclude is FIELDS_EXCLUDE_DEFAULT: fields_exclude = None # depends on [control=['if'], data=[]] return self._api_post('/v2/search/media', rq=rq, mq=mq, limit=limit, offset=offset, sort=sort, fields=fields, fields_exclude=fields_exclude)
def defer(self, timeout_seconds, callback): """This method allows to invoke the callback with specified arguments once the specified amount of time. :returns: EventHandle object. Call .remove() on it to cancel the event. """ priority = 1 event = self._scheduler.enter(timeout_seconds, priority, callback, ()) return self._make_sched_handle(lambda: event)
def function[defer, parameter[self, timeout_seconds, callback]]: constant[This method allows to invoke the callback with specified arguments once the specified amount of time. :returns: EventHandle object. Call .remove() on it to cancel the event. ] variable[priority] assign[=] constant[1] variable[event] assign[=] call[name[self]._scheduler.enter, parameter[name[timeout_seconds], name[priority], name[callback], tuple[[]]]] return[call[name[self]._make_sched_handle, parameter[<ast.Lambda object at 0x7da18bc73cd0>]]]
keyword[def] identifier[defer] ( identifier[self] , identifier[timeout_seconds] , identifier[callback] ): literal[string] identifier[priority] = literal[int] identifier[event] = identifier[self] . identifier[_scheduler] . identifier[enter] ( identifier[timeout_seconds] , identifier[priority] , identifier[callback] ,()) keyword[return] identifier[self] . identifier[_make_sched_handle] ( keyword[lambda] : identifier[event] )
def defer(self, timeout_seconds, callback): """This method allows to invoke the callback with specified arguments once the specified amount of time. :returns: EventHandle object. Call .remove() on it to cancel the event. """ priority = 1 event = self._scheduler.enter(timeout_seconds, priority, callback, ()) return self._make_sched_handle(lambda : event)
def startDrag(self, dropActions): """Reimplement Qt Method - handle drag event""" data = QMimeData() data.setUrls([QUrl(fname) for fname in self.get_selected_filenames()]) drag = QDrag(self) drag.setMimeData(data) drag.exec_()
def function[startDrag, parameter[self, dropActions]]: constant[Reimplement Qt Method - handle drag event] variable[data] assign[=] call[name[QMimeData], parameter[]] call[name[data].setUrls, parameter[<ast.ListComp object at 0x7da18f00c2b0>]] variable[drag] assign[=] call[name[QDrag], parameter[name[self]]] call[name[drag].setMimeData, parameter[name[data]]] call[name[drag].exec_, parameter[]]
keyword[def] identifier[startDrag] ( identifier[self] , identifier[dropActions] ): literal[string] identifier[data] = identifier[QMimeData] () identifier[data] . identifier[setUrls] ([ identifier[QUrl] ( identifier[fname] ) keyword[for] identifier[fname] keyword[in] identifier[self] . identifier[get_selected_filenames] ()]) identifier[drag] = identifier[QDrag] ( identifier[self] ) identifier[drag] . identifier[setMimeData] ( identifier[data] ) identifier[drag] . identifier[exec_] ()
def startDrag(self, dropActions): """Reimplement Qt Method - handle drag event""" data = QMimeData() data.setUrls([QUrl(fname) for fname in self.get_selected_filenames()]) drag = QDrag(self) drag.setMimeData(data) drag.exec_()
def paramiko_tunnel(lport, rport, server, remoteip='127.0.0.1', keyfile=None, password=None, timeout=60): """launch a tunner with paramiko in a subprocess. This should only be used when shell ssh is unavailable (e.g. Windows). This creates a tunnel redirecting `localhost:lport` to `remoteip:rport`, as seen from `server`. If you are familiar with ssh tunnels, this creates the tunnel: ssh server -L localhost:lport:remoteip:rport keyfile and password may be specified, but ssh config is checked for defaults. Parameters ---------- lport : int local port for connecting to the tunnel from this machine. rport : int port on the remote machine to connect to. server : str The ssh server to connect to. The full ssh server string will be parsed. user@server:port remoteip : str [Default: 127.0.0.1] The remote ip, specifying the destination of the tunnel. Default is localhost, which means that the tunnel would redirect localhost:lport on this machine to localhost:rport on the *server*. keyfile : str; path to public key file This specifies a key to be used in ssh login, default None. Regular default ssh keys will be used without specifying this argument. password : str; Your ssh password to the ssh server. Note that if this is left None, you will be prompted for it if passwordless key based login is unavailable. timeout : int [default: 60] The time (in seconds) after which no activity will result in the tunnel closing. This prevents orphaned tunnels from running forever. """ if paramiko is None: raise ImportError("Paramiko not available") if password is None: if not _try_passwordless_paramiko(server, keyfile): password = getpass("%s's password: "%(server)) p = Process(target=_paramiko_tunnel, args=(lport, rport, server, remoteip), kwargs=dict(keyfile=keyfile, password=password)) p.daemon=False p.start() atexit.register(_shutdown_process, p) return p
def function[paramiko_tunnel, parameter[lport, rport, server, remoteip, keyfile, password, timeout]]: constant[launch a tunner with paramiko in a subprocess. This should only be used when shell ssh is unavailable (e.g. Windows). This creates a tunnel redirecting `localhost:lport` to `remoteip:rport`, as seen from `server`. If you are familiar with ssh tunnels, this creates the tunnel: ssh server -L localhost:lport:remoteip:rport keyfile and password may be specified, but ssh config is checked for defaults. Parameters ---------- lport : int local port for connecting to the tunnel from this machine. rport : int port on the remote machine to connect to. server : str The ssh server to connect to. The full ssh server string will be parsed. user@server:port remoteip : str [Default: 127.0.0.1] The remote ip, specifying the destination of the tunnel. Default is localhost, which means that the tunnel would redirect localhost:lport on this machine to localhost:rport on the *server*. keyfile : str; path to public key file This specifies a key to be used in ssh login, default None. Regular default ssh keys will be used without specifying this argument. password : str; Your ssh password to the ssh server. Note that if this is left None, you will be prompted for it if passwordless key based login is unavailable. timeout : int [default: 60] The time (in seconds) after which no activity will result in the tunnel closing. This prevents orphaned tunnels from running forever. ] if compare[name[paramiko] is constant[None]] begin[:] <ast.Raise object at 0x7da18ede68c0> if compare[name[password] is constant[None]] begin[:] if <ast.UnaryOp object at 0x7da18ede42b0> begin[:] variable[password] assign[=] call[name[getpass], parameter[binary_operation[constant[%s's password: ] <ast.Mod object at 0x7da2590d6920> name[server]]]] variable[p] assign[=] call[name[Process], parameter[]] name[p].daemon assign[=] constant[False] call[name[p].start, parameter[]] call[name[atexit].register, parameter[name[_shutdown_process], name[p]]] return[name[p]]
keyword[def] identifier[paramiko_tunnel] ( identifier[lport] , identifier[rport] , identifier[server] , identifier[remoteip] = literal[string] , identifier[keyfile] = keyword[None] , identifier[password] = keyword[None] , identifier[timeout] = literal[int] ): literal[string] keyword[if] identifier[paramiko] keyword[is] keyword[None] : keyword[raise] identifier[ImportError] ( literal[string] ) keyword[if] identifier[password] keyword[is] keyword[None] : keyword[if] keyword[not] identifier[_try_passwordless_paramiko] ( identifier[server] , identifier[keyfile] ): identifier[password] = identifier[getpass] ( literal[string] %( identifier[server] )) identifier[p] = identifier[Process] ( identifier[target] = identifier[_paramiko_tunnel] , identifier[args] =( identifier[lport] , identifier[rport] , identifier[server] , identifier[remoteip] ), identifier[kwargs] = identifier[dict] ( identifier[keyfile] = identifier[keyfile] , identifier[password] = identifier[password] )) identifier[p] . identifier[daemon] = keyword[False] identifier[p] . identifier[start] () identifier[atexit] . identifier[register] ( identifier[_shutdown_process] , identifier[p] ) keyword[return] identifier[p]
def paramiko_tunnel(lport, rport, server, remoteip='127.0.0.1', keyfile=None, password=None, timeout=60): """launch a tunner with paramiko in a subprocess. This should only be used when shell ssh is unavailable (e.g. Windows). This creates a tunnel redirecting `localhost:lport` to `remoteip:rport`, as seen from `server`. If you are familiar with ssh tunnels, this creates the tunnel: ssh server -L localhost:lport:remoteip:rport keyfile and password may be specified, but ssh config is checked for defaults. Parameters ---------- lport : int local port for connecting to the tunnel from this machine. rport : int port on the remote machine to connect to. server : str The ssh server to connect to. The full ssh server string will be parsed. user@server:port remoteip : str [Default: 127.0.0.1] The remote ip, specifying the destination of the tunnel. Default is localhost, which means that the tunnel would redirect localhost:lport on this machine to localhost:rport on the *server*. keyfile : str; path to public key file This specifies a key to be used in ssh login, default None. Regular default ssh keys will be used without specifying this argument. password : str; Your ssh password to the ssh server. Note that if this is left None, you will be prompted for it if passwordless key based login is unavailable. timeout : int [default: 60] The time (in seconds) after which no activity will result in the tunnel closing. This prevents orphaned tunnels from running forever. """ if paramiko is None: raise ImportError('Paramiko not available') # depends on [control=['if'], data=[]] if password is None: if not _try_passwordless_paramiko(server, keyfile): password = getpass("%s's password: " % server) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['password']] p = Process(target=_paramiko_tunnel, args=(lport, rport, server, remoteip), kwargs=dict(keyfile=keyfile, password=password)) p.daemon = False p.start() atexit.register(_shutdown_process, p) return p
def p_measure(self, program): """ measure : MEASURE primary ASSIGN primary """ program[0] = node.Measure([program[2], program[4]]) self.verify_reg(program[2], 'qreg') self.verify_reg(program[4], 'creg')
def function[p_measure, parameter[self, program]]: constant[ measure : MEASURE primary ASSIGN primary ] call[name[program]][constant[0]] assign[=] call[name[node].Measure, parameter[list[[<ast.Subscript object at 0x7da1b03821a0>, <ast.Subscript object at 0x7da1b0381ed0>]]]] call[name[self].verify_reg, parameter[call[name[program]][constant[2]], constant[qreg]]] call[name[self].verify_reg, parameter[call[name[program]][constant[4]], constant[creg]]]
keyword[def] identifier[p_measure] ( identifier[self] , identifier[program] ): literal[string] identifier[program] [ literal[int] ]= identifier[node] . identifier[Measure] ([ identifier[program] [ literal[int] ], identifier[program] [ literal[int] ]]) identifier[self] . identifier[verify_reg] ( identifier[program] [ literal[int] ], literal[string] ) identifier[self] . identifier[verify_reg] ( identifier[program] [ literal[int] ], literal[string] )
def p_measure(self, program): """ measure : MEASURE primary ASSIGN primary """ program[0] = node.Measure([program[2], program[4]]) self.verify_reg(program[2], 'qreg') self.verify_reg(program[4], 'creg')
def visit_and_update(self, visitor_fn): """Create an updated version (if needed) of BetweenClause via the visitor pattern.""" new_lower_bound = self.lower_bound.visit_and_update(visitor_fn) new_upper_bound = self.upper_bound.visit_and_update(visitor_fn) if new_lower_bound is not self.lower_bound or new_upper_bound is not self.upper_bound: return visitor_fn(BetweenClause(self.field, new_lower_bound, new_upper_bound)) else: return visitor_fn(self)
def function[visit_and_update, parameter[self, visitor_fn]]: constant[Create an updated version (if needed) of BetweenClause via the visitor pattern.] variable[new_lower_bound] assign[=] call[name[self].lower_bound.visit_and_update, parameter[name[visitor_fn]]] variable[new_upper_bound] assign[=] call[name[self].upper_bound.visit_and_update, parameter[name[visitor_fn]]] if <ast.BoolOp object at 0x7da1b170e680> begin[:] return[call[name[visitor_fn], parameter[call[name[BetweenClause], parameter[name[self].field, name[new_lower_bound], name[new_upper_bound]]]]]]
keyword[def] identifier[visit_and_update] ( identifier[self] , identifier[visitor_fn] ): literal[string] identifier[new_lower_bound] = identifier[self] . identifier[lower_bound] . identifier[visit_and_update] ( identifier[visitor_fn] ) identifier[new_upper_bound] = identifier[self] . identifier[upper_bound] . identifier[visit_and_update] ( identifier[visitor_fn] ) keyword[if] identifier[new_lower_bound] keyword[is] keyword[not] identifier[self] . identifier[lower_bound] keyword[or] identifier[new_upper_bound] keyword[is] keyword[not] identifier[self] . identifier[upper_bound] : keyword[return] identifier[visitor_fn] ( identifier[BetweenClause] ( identifier[self] . identifier[field] , identifier[new_lower_bound] , identifier[new_upper_bound] )) keyword[else] : keyword[return] identifier[visitor_fn] ( identifier[self] )
def visit_and_update(self, visitor_fn): """Create an updated version (if needed) of BetweenClause via the visitor pattern.""" new_lower_bound = self.lower_bound.visit_and_update(visitor_fn) new_upper_bound = self.upper_bound.visit_and_update(visitor_fn) if new_lower_bound is not self.lower_bound or new_upper_bound is not self.upper_bound: return visitor_fn(BetweenClause(self.field, new_lower_bound, new_upper_bound)) # depends on [control=['if'], data=[]] else: return visitor_fn(self)
def _cleanup_api(self): ''' Helper method to clean up resources and models if we detected a change in the swagger file for a stage ''' resources = __salt__['boto_apigateway.describe_api_resources'](restApiId=self.restApiId, **self._common_aws_args) if resources.get('resources'): res = resources.get('resources')[1:] res.reverse() for resource in res: delres = __salt__['boto_apigateway.delete_api_resources'](restApiId=self.restApiId, path=resource.get('path'), **self._common_aws_args) if not delres.get('deleted'): return delres models = __salt__['boto_apigateway.describe_api_models'](restApiId=self.restApiId, **self._common_aws_args) if models.get('models'): for model in models.get('models'): delres = __salt__['boto_apigateway.delete_api_model'](restApiId=self.restApiId, modelName=model.get('name'), **self._common_aws_args) if not delres.get('deleted'): return delres return {'deleted': True}
def function[_cleanup_api, parameter[self]]: constant[ Helper method to clean up resources and models if we detected a change in the swagger file for a stage ] variable[resources] assign[=] call[call[name[__salt__]][constant[boto_apigateway.describe_api_resources]], parameter[]] if call[name[resources].get, parameter[constant[resources]]] begin[:] variable[res] assign[=] call[call[name[resources].get, parameter[constant[resources]]]][<ast.Slice object at 0x7da1b217b9d0>] call[name[res].reverse, parameter[]] for taget[name[resource]] in starred[name[res]] begin[:] variable[delres] assign[=] call[call[name[__salt__]][constant[boto_apigateway.delete_api_resources]], parameter[]] if <ast.UnaryOp object at 0x7da1b1f48250> begin[:] return[name[delres]] variable[models] assign[=] call[call[name[__salt__]][constant[boto_apigateway.describe_api_models]], parameter[]] if call[name[models].get, parameter[constant[models]]] begin[:] for taget[name[model]] in starred[call[name[models].get, parameter[constant[models]]]] begin[:] variable[delres] assign[=] call[call[name[__salt__]][constant[boto_apigateway.delete_api_model]], parameter[]] if <ast.UnaryOp object at 0x7da1b1f48d90> begin[:] return[name[delres]] return[dictionary[[<ast.Constant object at 0x7da1b21a1c30>], [<ast.Constant object at 0x7da1b21a0130>]]]
keyword[def] identifier[_cleanup_api] ( identifier[self] ): literal[string] identifier[resources] = identifier[__salt__] [ literal[string] ]( identifier[restApiId] = identifier[self] . identifier[restApiId] , ** identifier[self] . identifier[_common_aws_args] ) keyword[if] identifier[resources] . identifier[get] ( literal[string] ): identifier[res] = identifier[resources] . identifier[get] ( literal[string] )[ literal[int] :] identifier[res] . identifier[reverse] () keyword[for] identifier[resource] keyword[in] identifier[res] : identifier[delres] = identifier[__salt__] [ literal[string] ]( identifier[restApiId] = identifier[self] . identifier[restApiId] , identifier[path] = identifier[resource] . identifier[get] ( literal[string] ), ** identifier[self] . identifier[_common_aws_args] ) keyword[if] keyword[not] identifier[delres] . identifier[get] ( literal[string] ): keyword[return] identifier[delres] identifier[models] = identifier[__salt__] [ literal[string] ]( identifier[restApiId] = identifier[self] . identifier[restApiId] ,** identifier[self] . identifier[_common_aws_args] ) keyword[if] identifier[models] . identifier[get] ( literal[string] ): keyword[for] identifier[model] keyword[in] identifier[models] . identifier[get] ( literal[string] ): identifier[delres] = identifier[__salt__] [ literal[string] ]( identifier[restApiId] = identifier[self] . identifier[restApiId] , identifier[modelName] = identifier[model] . identifier[get] ( literal[string] ), ** identifier[self] . identifier[_common_aws_args] ) keyword[if] keyword[not] identifier[delres] . identifier[get] ( literal[string] ): keyword[return] identifier[delres] keyword[return] { literal[string] : keyword[True] }
def _cleanup_api(self): """ Helper method to clean up resources and models if we detected a change in the swagger file for a stage """ resources = __salt__['boto_apigateway.describe_api_resources'](restApiId=self.restApiId, **self._common_aws_args) if resources.get('resources'): res = resources.get('resources')[1:] res.reverse() for resource in res: delres = __salt__['boto_apigateway.delete_api_resources'](restApiId=self.restApiId, path=resource.get('path'), **self._common_aws_args) if not delres.get('deleted'): return delres # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['resource']] # depends on [control=['if'], data=[]] models = __salt__['boto_apigateway.describe_api_models'](restApiId=self.restApiId, **self._common_aws_args) if models.get('models'): for model in models.get('models'): delres = __salt__['boto_apigateway.delete_api_model'](restApiId=self.restApiId, modelName=model.get('name'), **self._common_aws_args) if not delres.get('deleted'): return delres # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['model']] # depends on [control=['if'], data=[]] return {'deleted': True}
def subst_dict(target, source): """Create a dictionary for substitution of special construction variables. This translates the following special arguments: target - the target (object or array of objects), used to generate the TARGET and TARGETS construction variables source - the source (object or array of objects), used to generate the SOURCES and SOURCE construction variables """ dict = {} if target: def get_tgt_subst_proxy(thing): try: subst_proxy = thing.get_subst_proxy() except AttributeError: subst_proxy = thing # probably a string, just return it return subst_proxy tnl = NLWrapper(target, get_tgt_subst_proxy) dict['TARGETS'] = Targets_or_Sources(tnl) dict['TARGET'] = Target_or_Source(tnl) # This is a total cheat, but hopefully this dictionary goes # away soon anyway. We just let these expand to $TARGETS # because that's "good enough" for the use of ToolSurrogates # (see test/ToolSurrogate.py) to generate documentation. dict['CHANGED_TARGETS'] = '$TARGETS' dict['UNCHANGED_TARGETS'] = '$TARGETS' else: dict['TARGETS'] = NullNodesList dict['TARGET'] = NullNodesList if source: def get_src_subst_proxy(node): try: rfile = node.rfile except AttributeError: pass else: node = rfile() try: return node.get_subst_proxy() except AttributeError: return node # probably a String, just return it snl = NLWrapper(source, get_src_subst_proxy) dict['SOURCES'] = Targets_or_Sources(snl) dict['SOURCE'] = Target_or_Source(snl) # This is a total cheat, but hopefully this dictionary goes # away soon anyway. We just let these expand to $TARGETS # because that's "good enough" for the use of ToolSurrogates # (see test/ToolSurrogate.py) to generate documentation. dict['CHANGED_SOURCES'] = '$SOURCES' dict['UNCHANGED_SOURCES'] = '$SOURCES' else: dict['SOURCES'] = NullNodesList dict['SOURCE'] = NullNodesList return dict
def function[subst_dict, parameter[target, source]]: constant[Create a dictionary for substitution of special construction variables. This translates the following special arguments: target - the target (object or array of objects), used to generate the TARGET and TARGETS construction variables source - the source (object or array of objects), used to generate the SOURCES and SOURCE construction variables ] variable[dict] assign[=] dictionary[[], []] if name[target] begin[:] def function[get_tgt_subst_proxy, parameter[thing]]: <ast.Try object at 0x7da204961bd0> return[name[subst_proxy]] variable[tnl] assign[=] call[name[NLWrapper], parameter[name[target], name[get_tgt_subst_proxy]]] call[name[dict]][constant[TARGETS]] assign[=] call[name[Targets_or_Sources], parameter[name[tnl]]] call[name[dict]][constant[TARGET]] assign[=] call[name[Target_or_Source], parameter[name[tnl]]] call[name[dict]][constant[CHANGED_TARGETS]] assign[=] constant[$TARGETS] call[name[dict]][constant[UNCHANGED_TARGETS]] assign[=] constant[$TARGETS] if name[source] begin[:] def function[get_src_subst_proxy, parameter[node]]: <ast.Try object at 0x7da2041da710> <ast.Try object at 0x7da2041d8f70> variable[snl] assign[=] call[name[NLWrapper], parameter[name[source], name[get_src_subst_proxy]]] call[name[dict]][constant[SOURCES]] assign[=] call[name[Targets_or_Sources], parameter[name[snl]]] call[name[dict]][constant[SOURCE]] assign[=] call[name[Target_or_Source], parameter[name[snl]]] call[name[dict]][constant[CHANGED_SOURCES]] assign[=] constant[$SOURCES] call[name[dict]][constant[UNCHANGED_SOURCES]] assign[=] constant[$SOURCES] return[name[dict]]
keyword[def] identifier[subst_dict] ( identifier[target] , identifier[source] ): literal[string] identifier[dict] ={} keyword[if] identifier[target] : keyword[def] identifier[get_tgt_subst_proxy] ( identifier[thing] ): keyword[try] : identifier[subst_proxy] = identifier[thing] . identifier[get_subst_proxy] () keyword[except] identifier[AttributeError] : identifier[subst_proxy] = identifier[thing] keyword[return] identifier[subst_proxy] identifier[tnl] = identifier[NLWrapper] ( identifier[target] , identifier[get_tgt_subst_proxy] ) identifier[dict] [ literal[string] ]= identifier[Targets_or_Sources] ( identifier[tnl] ) identifier[dict] [ literal[string] ]= identifier[Target_or_Source] ( identifier[tnl] ) identifier[dict] [ literal[string] ]= literal[string] identifier[dict] [ literal[string] ]= literal[string] keyword[else] : identifier[dict] [ literal[string] ]= identifier[NullNodesList] identifier[dict] [ literal[string] ]= identifier[NullNodesList] keyword[if] identifier[source] : keyword[def] identifier[get_src_subst_proxy] ( identifier[node] ): keyword[try] : identifier[rfile] = identifier[node] . identifier[rfile] keyword[except] identifier[AttributeError] : keyword[pass] keyword[else] : identifier[node] = identifier[rfile] () keyword[try] : keyword[return] identifier[node] . identifier[get_subst_proxy] () keyword[except] identifier[AttributeError] : keyword[return] identifier[node] identifier[snl] = identifier[NLWrapper] ( identifier[source] , identifier[get_src_subst_proxy] ) identifier[dict] [ literal[string] ]= identifier[Targets_or_Sources] ( identifier[snl] ) identifier[dict] [ literal[string] ]= identifier[Target_or_Source] ( identifier[snl] ) identifier[dict] [ literal[string] ]= literal[string] identifier[dict] [ literal[string] ]= literal[string] keyword[else] : identifier[dict] [ literal[string] ]= identifier[NullNodesList] identifier[dict] [ literal[string] ]= identifier[NullNodesList] keyword[return] identifier[dict]
def subst_dict(target, source): """Create a dictionary for substitution of special construction variables. This translates the following special arguments: target - the target (object or array of objects), used to generate the TARGET and TARGETS construction variables source - the source (object or array of objects), used to generate the SOURCES and SOURCE construction variables """ dict = {} if target: def get_tgt_subst_proxy(thing): try: subst_proxy = thing.get_subst_proxy() # depends on [control=['try'], data=[]] except AttributeError: subst_proxy = thing # probably a string, just return it # depends on [control=['except'], data=[]] return subst_proxy tnl = NLWrapper(target, get_tgt_subst_proxy) dict['TARGETS'] = Targets_or_Sources(tnl) dict['TARGET'] = Target_or_Source(tnl) # This is a total cheat, but hopefully this dictionary goes # away soon anyway. We just let these expand to $TARGETS # because that's "good enough" for the use of ToolSurrogates # (see test/ToolSurrogate.py) to generate documentation. dict['CHANGED_TARGETS'] = '$TARGETS' dict['UNCHANGED_TARGETS'] = '$TARGETS' # depends on [control=['if'], data=[]] else: dict['TARGETS'] = NullNodesList dict['TARGET'] = NullNodesList if source: def get_src_subst_proxy(node): try: rfile = node.rfile # depends on [control=['try'], data=[]] except AttributeError: pass # depends on [control=['except'], data=[]] else: node = rfile() try: return node.get_subst_proxy() # depends on [control=['try'], data=[]] except AttributeError: return node # probably a String, just return it # depends on [control=['except'], data=[]] snl = NLWrapper(source, get_src_subst_proxy) dict['SOURCES'] = Targets_or_Sources(snl) dict['SOURCE'] = Target_or_Source(snl) # This is a total cheat, but hopefully this dictionary goes # away soon anyway. We just let these expand to $TARGETS # because that's "good enough" for the use of ToolSurrogates # (see test/ToolSurrogate.py) to generate documentation. dict['CHANGED_SOURCES'] = '$SOURCES' dict['UNCHANGED_SOURCES'] = '$SOURCES' # depends on [control=['if'], data=[]] else: dict['SOURCES'] = NullNodesList dict['SOURCE'] = NullNodesList return dict
def _get_upload_cmd(self, mirror=False): """Generate the S3 CLI upload command Args: mirror (bool): If true, uses a flat directory structure instead of nesting under a version. Returns: str: The full CLI command to run. """ if mirror: dest_uri = self.s3_mirror_uri else: dest_uri = self.s3_version_uri cmd = 'aws s3 sync {} {} --delete --exact-timestamps --profile {}'.format(self.artifact_path, dest_uri, self.env) return cmd
def function[_get_upload_cmd, parameter[self, mirror]]: constant[Generate the S3 CLI upload command Args: mirror (bool): If true, uses a flat directory structure instead of nesting under a version. Returns: str: The full CLI command to run. ] if name[mirror] begin[:] variable[dest_uri] assign[=] name[self].s3_mirror_uri variable[cmd] assign[=] call[constant[aws s3 sync {} {} --delete --exact-timestamps --profile {}].format, parameter[name[self].artifact_path, name[dest_uri], name[self].env]] return[name[cmd]]
keyword[def] identifier[_get_upload_cmd] ( identifier[self] , identifier[mirror] = keyword[False] ): literal[string] keyword[if] identifier[mirror] : identifier[dest_uri] = identifier[self] . identifier[s3_mirror_uri] keyword[else] : identifier[dest_uri] = identifier[self] . identifier[s3_version_uri] identifier[cmd] = literal[string] . identifier[format] ( identifier[self] . identifier[artifact_path] , identifier[dest_uri] , identifier[self] . identifier[env] ) keyword[return] identifier[cmd]
def _get_upload_cmd(self, mirror=False): """Generate the S3 CLI upload command Args: mirror (bool): If true, uses a flat directory structure instead of nesting under a version. Returns: str: The full CLI command to run. """ if mirror: dest_uri = self.s3_mirror_uri # depends on [control=['if'], data=[]] else: dest_uri = self.s3_version_uri cmd = 'aws s3 sync {} {} --delete --exact-timestamps --profile {}'.format(self.artifact_path, dest_uri, self.env) return cmd
def main(commit_only=True): """ Run the configured code checks. Return system exit code. 1 - reject commit 0 - accept commit """ global TEMP_FOLDER exit_code = 0 hook_checks = HookConfig(get_config_file()) with files_to_check(commit_only) as files: for name, mod in checks(): default = getattr(mod, 'DEFAULT', 'off') if hook_checks.is_enabled(name, default=default): if hasattr(mod, 'REQUIRED_FILES'): for filename in mod.REQUIRED_FILES: if os.path.isfile(filename): try: shutil.copy(filename, TEMP_FOLDER) except shutil.Error: # Copied over by a previous check continue args = hook_checks.arguments(name) tmp_files = [os.path.join(TEMP_FOLDER, f) for f in files] if args: errors = mod.run(tmp_files, TEMP_FOLDER, args) else: errors = mod.run(tmp_files, TEMP_FOLDER) if errors: title_print("Checking {0}".format(name)) print((errors.replace(TEMP_FOLDER + "/", ''))) print("") exit_code = 1 if exit_code == 1: title_print("Rejecting commit") return exit_code
def function[main, parameter[commit_only]]: constant[ Run the configured code checks. Return system exit code. 1 - reject commit 0 - accept commit ] <ast.Global object at 0x7da1b0fa7e50> variable[exit_code] assign[=] constant[0] variable[hook_checks] assign[=] call[name[HookConfig], parameter[call[name[get_config_file], parameter[]]]] with call[name[files_to_check], parameter[name[commit_only]]] begin[:] for taget[tuple[[<ast.Name object at 0x7da1b0fa7a90>, <ast.Name object at 0x7da1b0fa7a60>]]] in starred[call[name[checks], parameter[]]] begin[:] variable[default] assign[=] call[name[getattr], parameter[name[mod], constant[DEFAULT], constant[off]]] if call[name[hook_checks].is_enabled, parameter[name[name]]] begin[:] if call[name[hasattr], parameter[name[mod], constant[REQUIRED_FILES]]] begin[:] for taget[name[filename]] in starred[name[mod].REQUIRED_FILES] begin[:] if call[name[os].path.isfile, parameter[name[filename]]] begin[:] <ast.Try object at 0x7da1b0f334f0> variable[args] assign[=] call[name[hook_checks].arguments, parameter[name[name]]] variable[tmp_files] assign[=] <ast.ListComp object at 0x7da1b0f31ea0> if name[args] begin[:] variable[errors] assign[=] call[name[mod].run, parameter[name[tmp_files], name[TEMP_FOLDER], name[args]]] if name[errors] begin[:] call[name[title_print], parameter[call[constant[Checking {0}].format, parameter[name[name]]]]] call[name[print], parameter[call[name[errors].replace, parameter[binary_operation[name[TEMP_FOLDER] + constant[/]], constant[]]]]] call[name[print], parameter[constant[]]] variable[exit_code] assign[=] constant[1] if compare[name[exit_code] equal[==] constant[1]] begin[:] call[name[title_print], parameter[constant[Rejecting commit]]] return[name[exit_code]]
keyword[def] identifier[main] ( identifier[commit_only] = keyword[True] ): literal[string] keyword[global] identifier[TEMP_FOLDER] identifier[exit_code] = literal[int] identifier[hook_checks] = identifier[HookConfig] ( identifier[get_config_file] ()) keyword[with] identifier[files_to_check] ( identifier[commit_only] ) keyword[as] identifier[files] : keyword[for] identifier[name] , identifier[mod] keyword[in] identifier[checks] (): identifier[default] = identifier[getattr] ( identifier[mod] , literal[string] , literal[string] ) keyword[if] identifier[hook_checks] . identifier[is_enabled] ( identifier[name] , identifier[default] = identifier[default] ): keyword[if] identifier[hasattr] ( identifier[mod] , literal[string] ): keyword[for] identifier[filename] keyword[in] identifier[mod] . identifier[REQUIRED_FILES] : keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[filename] ): keyword[try] : identifier[shutil] . identifier[copy] ( identifier[filename] , identifier[TEMP_FOLDER] ) keyword[except] identifier[shutil] . identifier[Error] : keyword[continue] identifier[args] = identifier[hook_checks] . identifier[arguments] ( identifier[name] ) identifier[tmp_files] =[ identifier[os] . identifier[path] . identifier[join] ( identifier[TEMP_FOLDER] , identifier[f] ) keyword[for] identifier[f] keyword[in] identifier[files] ] keyword[if] identifier[args] : identifier[errors] = identifier[mod] . identifier[run] ( identifier[tmp_files] , identifier[TEMP_FOLDER] , identifier[args] ) keyword[else] : identifier[errors] = identifier[mod] . identifier[run] ( identifier[tmp_files] , identifier[TEMP_FOLDER] ) keyword[if] identifier[errors] : identifier[title_print] ( literal[string] . identifier[format] ( identifier[name] )) identifier[print] (( identifier[errors] . identifier[replace] ( identifier[TEMP_FOLDER] + literal[string] , literal[string] ))) identifier[print] ( literal[string] ) identifier[exit_code] = literal[int] keyword[if] identifier[exit_code] == literal[int] : identifier[title_print] ( literal[string] ) keyword[return] identifier[exit_code]
def main(commit_only=True): """ Run the configured code checks. Return system exit code. 1 - reject commit 0 - accept commit """ global TEMP_FOLDER exit_code = 0 hook_checks = HookConfig(get_config_file()) with files_to_check(commit_only) as files: for (name, mod) in checks(): default = getattr(mod, 'DEFAULT', 'off') if hook_checks.is_enabled(name, default=default): if hasattr(mod, 'REQUIRED_FILES'): for filename in mod.REQUIRED_FILES: if os.path.isfile(filename): try: shutil.copy(filename, TEMP_FOLDER) # depends on [control=['try'], data=[]] except shutil.Error: # Copied over by a previous check continue # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['filename']] # depends on [control=['if'], data=[]] args = hook_checks.arguments(name) tmp_files = [os.path.join(TEMP_FOLDER, f) for f in files] if args: errors = mod.run(tmp_files, TEMP_FOLDER, args) # depends on [control=['if'], data=[]] else: errors = mod.run(tmp_files, TEMP_FOLDER) if errors: title_print('Checking {0}'.format(name)) print(errors.replace(TEMP_FOLDER + '/', '')) print('') exit_code = 1 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['with'], data=['files']] if exit_code == 1: title_print('Rejecting commit') # depends on [control=['if'], data=[]] return exit_code
def move_todo_item(self, item_id, to): """ Changes the position of an item within its parent list. It does not currently support reparenting an item. Position 1 is at the top of the list. Moving an item beyond the end of the list puts it at the bottom of the list. """ path = '/todos/move_item/%u' % item_id req = ET.Element('request') ET.SubElement(req, 'to').text = str(int(to)) return self._request(path, req)
def function[move_todo_item, parameter[self, item_id, to]]: constant[ Changes the position of an item within its parent list. It does not currently support reparenting an item. Position 1 is at the top of the list. Moving an item beyond the end of the list puts it at the bottom of the list. ] variable[path] assign[=] binary_operation[constant[/todos/move_item/%u] <ast.Mod object at 0x7da2590d6920> name[item_id]] variable[req] assign[=] call[name[ET].Element, parameter[constant[request]]] call[name[ET].SubElement, parameter[name[req], constant[to]]].text assign[=] call[name[str], parameter[call[name[int], parameter[name[to]]]]] return[call[name[self]._request, parameter[name[path], name[req]]]]
keyword[def] identifier[move_todo_item] ( identifier[self] , identifier[item_id] , identifier[to] ): literal[string] identifier[path] = literal[string] % identifier[item_id] identifier[req] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[ET] . identifier[SubElement] ( identifier[req] , literal[string] ). identifier[text] = identifier[str] ( identifier[int] ( identifier[to] )) keyword[return] identifier[self] . identifier[_request] ( identifier[path] , identifier[req] )
def move_todo_item(self, item_id, to): """ Changes the position of an item within its parent list. It does not currently support reparenting an item. Position 1 is at the top of the list. Moving an item beyond the end of the list puts it at the bottom of the list. """ path = '/todos/move_item/%u' % item_id req = ET.Element('request') ET.SubElement(req, 'to').text = str(int(to)) return self._request(path, req)
def log(level, msg, *args, **kwargs): """Logs 'msg % args' at absl logging level 'level'. If no args are given just print msg, ignoring any interpolation specifiers. Args: level: int, the absl logging level at which to log the message (logging.DEBUG|INFO|WARNING|ERROR|FATAL). While some C++ verbose logging level constants are also supported, callers should prefer explicit logging.vlog() calls for such purpose. msg: str, the message to be logged. *args: The args to be substitued into the msg. **kwargs: May contain exc_info to add exception traceback to message. """ if level > converter.ABSL_DEBUG: # Even though this function supports level that is greater than 1, users # should use logging.vlog instead for such cases. # Treat this as vlog, 1 is equivalent to DEBUG. standard_level = converter.STANDARD_DEBUG - (level - 1) else: if level < converter.ABSL_FATAL: level = converter.ABSL_FATAL standard_level = converter.absl_to_standard(level) _absl_logger.log(standard_level, msg, *args, **kwargs)
def function[log, parameter[level, msg]]: constant[Logs 'msg % args' at absl logging level 'level'. If no args are given just print msg, ignoring any interpolation specifiers. Args: level: int, the absl logging level at which to log the message (logging.DEBUG|INFO|WARNING|ERROR|FATAL). While some C++ verbose logging level constants are also supported, callers should prefer explicit logging.vlog() calls for such purpose. msg: str, the message to be logged. *args: The args to be substitued into the msg. **kwargs: May contain exc_info to add exception traceback to message. ] if compare[name[level] greater[>] name[converter].ABSL_DEBUG] begin[:] variable[standard_level] assign[=] binary_operation[name[converter].STANDARD_DEBUG - binary_operation[name[level] - constant[1]]] call[name[_absl_logger].log, parameter[name[standard_level], name[msg], <ast.Starred object at 0x7da1b18ee860>]]
keyword[def] identifier[log] ( identifier[level] , identifier[msg] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[level] > identifier[converter] . identifier[ABSL_DEBUG] : identifier[standard_level] = identifier[converter] . identifier[STANDARD_DEBUG] -( identifier[level] - literal[int] ) keyword[else] : keyword[if] identifier[level] < identifier[converter] . identifier[ABSL_FATAL] : identifier[level] = identifier[converter] . identifier[ABSL_FATAL] identifier[standard_level] = identifier[converter] . identifier[absl_to_standard] ( identifier[level] ) identifier[_absl_logger] . identifier[log] ( identifier[standard_level] , identifier[msg] ,* identifier[args] ,** identifier[kwargs] )
def log(level, msg, *args, **kwargs): """Logs 'msg % args' at absl logging level 'level'. If no args are given just print msg, ignoring any interpolation specifiers. Args: level: int, the absl logging level at which to log the message (logging.DEBUG|INFO|WARNING|ERROR|FATAL). While some C++ verbose logging level constants are also supported, callers should prefer explicit logging.vlog() calls for such purpose. msg: str, the message to be logged. *args: The args to be substitued into the msg. **kwargs: May contain exc_info to add exception traceback to message. """ if level > converter.ABSL_DEBUG: # Even though this function supports level that is greater than 1, users # should use logging.vlog instead for such cases. # Treat this as vlog, 1 is equivalent to DEBUG. standard_level = converter.STANDARD_DEBUG - (level - 1) # depends on [control=['if'], data=['level']] else: if level < converter.ABSL_FATAL: level = converter.ABSL_FATAL # depends on [control=['if'], data=['level']] standard_level = converter.absl_to_standard(level) _absl_logger.log(standard_level, msg, *args, **kwargs)
def DbGetAttributeAlias2(self, argin): """ Get the attribute alias from the attribute name. Returns one empty string if nothing found in database :param argin: The attribute name (dev_name/att_name) :type: tango.DevString :return: The attribute alias name (or empty string) :rtype: tango.DevString """ self._log.debug("In DbGetAttributeAlias2()") attr_name = argin[0] return self.db.get_attribute_alias2(attr_name)
def function[DbGetAttributeAlias2, parameter[self, argin]]: constant[ Get the attribute alias from the attribute name. Returns one empty string if nothing found in database :param argin: The attribute name (dev_name/att_name) :type: tango.DevString :return: The attribute alias name (or empty string) :rtype: tango.DevString ] call[name[self]._log.debug, parameter[constant[In DbGetAttributeAlias2()]]] variable[attr_name] assign[=] call[name[argin]][constant[0]] return[call[name[self].db.get_attribute_alias2, parameter[name[attr_name]]]]
keyword[def] identifier[DbGetAttributeAlias2] ( identifier[self] , identifier[argin] ): literal[string] identifier[self] . identifier[_log] . identifier[debug] ( literal[string] ) identifier[attr_name] = identifier[argin] [ literal[int] ] keyword[return] identifier[self] . identifier[db] . identifier[get_attribute_alias2] ( identifier[attr_name] )
def DbGetAttributeAlias2(self, argin): """ Get the attribute alias from the attribute name. Returns one empty string if nothing found in database :param argin: The attribute name (dev_name/att_name) :type: tango.DevString :return: The attribute alias name (or empty string) :rtype: tango.DevString """ self._log.debug('In DbGetAttributeAlias2()') attr_name = argin[0] return self.db.get_attribute_alias2(attr_name)
def generate(args=None, namespace=None, file=None): """ Genereate DDL from data sources named. :args: String or list of strings to be parsed for arguments :namespace: Namespace to extract arguments from :file: Write to this open file object (default stdout) """ if hasattr(args, 'split'): args = args.split() args = parser.parse_args(args, namespace) set_logging(args) logging.info(str(args)) if args.dialect in ('pg', 'pgsql', 'postgres'): args.dialect = 'postgresql' if args.dialect.startswith('dj'): args.dialect = 'django' elif args.dialect.startswith('sqla'): args.dialect = 'sqlalchemy' if args.dialect not in dialect_names: raise NotImplementedError('First arg must be one of: %s' % ", ".join(dialect_names)) if args.dialect == 'sqlalchemy': print(sqla_head, file=file) for datafile in args.datafile: if is_sqlalchemy_url.search(datafile): table_names_for_insert = [] for tbl in sqlalchemy_table_sources(datafile): t = generate_one(tbl, args, table_name=tbl.generator.name, file=file) if t.data: table_names_for_insert.append(tbl.generator.name) if args.inserts and args.dialect == 'sqlalchemy': print(sqla_inserter_call(table_names_for_insert), file=file) if t and args.inserts: for seq_update in emit_db_sequence_updates(t.source.db_engine): if args.dialect == 'sqlalchemy': print(' conn.execute("%s")' % seq_update, file=file) elif args.dialect == 'postgresql': print(seq_update, file=file) else: generate_one(datafile, args, file=file)
def function[generate, parameter[args, namespace, file]]: constant[ Genereate DDL from data sources named. :args: String or list of strings to be parsed for arguments :namespace: Namespace to extract arguments from :file: Write to this open file object (default stdout) ] if call[name[hasattr], parameter[name[args], constant[split]]] begin[:] variable[args] assign[=] call[name[args].split, parameter[]] variable[args] assign[=] call[name[parser].parse_args, parameter[name[args], name[namespace]]] call[name[set_logging], parameter[name[args]]] call[name[logging].info, parameter[call[name[str], parameter[name[args]]]]] if compare[name[args].dialect in tuple[[<ast.Constant object at 0x7da1b1970610>, <ast.Constant object at 0x7da1b1971300>, <ast.Constant object at 0x7da1b1970340>]]] begin[:] name[args].dialect assign[=] constant[postgresql] if call[name[args].dialect.startswith, parameter[constant[dj]]] begin[:] name[args].dialect assign[=] constant[django] if compare[name[args].dialect <ast.NotIn object at 0x7da2590d7190> name[dialect_names]] begin[:] <ast.Raise object at 0x7da1b19709d0> if compare[name[args].dialect equal[==] constant[sqlalchemy]] begin[:] call[name[print], parameter[name[sqla_head]]] for taget[name[datafile]] in starred[name[args].datafile] begin[:] if call[name[is_sqlalchemy_url].search, parameter[name[datafile]]] begin[:] variable[table_names_for_insert] assign[=] list[[]] for taget[name[tbl]] in starred[call[name[sqlalchemy_table_sources], parameter[name[datafile]]]] begin[:] variable[t] assign[=] call[name[generate_one], parameter[name[tbl], name[args]]] if name[t].data begin[:] call[name[table_names_for_insert].append, parameter[name[tbl].generator.name]] if <ast.BoolOp object at 0x7da1b1970700> begin[:] call[name[print], parameter[call[name[sqla_inserter_call], parameter[name[table_names_for_insert]]]]] if <ast.BoolOp object at 0x7da1b1972350> begin[:] for taget[name[seq_update]] in starred[call[name[emit_db_sequence_updates], parameter[name[t].source.db_engine]]] begin[:] if compare[name[args].dialect equal[==] constant[sqlalchemy]] begin[:] call[name[print], parameter[binary_operation[constant[ conn.execute("%s")] <ast.Mod object at 0x7da2590d6920> name[seq_update]]]]
keyword[def] identifier[generate] ( identifier[args] = keyword[None] , identifier[namespace] = keyword[None] , identifier[file] = keyword[None] ): literal[string] keyword[if] identifier[hasattr] ( identifier[args] , literal[string] ): identifier[args] = identifier[args] . identifier[split] () identifier[args] = identifier[parser] . identifier[parse_args] ( identifier[args] , identifier[namespace] ) identifier[set_logging] ( identifier[args] ) identifier[logging] . identifier[info] ( identifier[str] ( identifier[args] )) keyword[if] identifier[args] . identifier[dialect] keyword[in] ( literal[string] , literal[string] , literal[string] ): identifier[args] . identifier[dialect] = literal[string] keyword[if] identifier[args] . identifier[dialect] . identifier[startswith] ( literal[string] ): identifier[args] . identifier[dialect] = literal[string] keyword[elif] identifier[args] . identifier[dialect] . identifier[startswith] ( literal[string] ): identifier[args] . identifier[dialect] = literal[string] keyword[if] identifier[args] . identifier[dialect] keyword[not] keyword[in] identifier[dialect_names] : keyword[raise] identifier[NotImplementedError] ( literal[string] % literal[string] . identifier[join] ( identifier[dialect_names] )) keyword[if] identifier[args] . identifier[dialect] == literal[string] : identifier[print] ( identifier[sqla_head] , identifier[file] = identifier[file] ) keyword[for] identifier[datafile] keyword[in] identifier[args] . identifier[datafile] : keyword[if] identifier[is_sqlalchemy_url] . identifier[search] ( identifier[datafile] ): identifier[table_names_for_insert] =[] keyword[for] identifier[tbl] keyword[in] identifier[sqlalchemy_table_sources] ( identifier[datafile] ): identifier[t] = identifier[generate_one] ( identifier[tbl] , identifier[args] , identifier[table_name] = identifier[tbl] . identifier[generator] . identifier[name] , identifier[file] = identifier[file] ) keyword[if] identifier[t] . identifier[data] : identifier[table_names_for_insert] . identifier[append] ( identifier[tbl] . identifier[generator] . identifier[name] ) keyword[if] identifier[args] . identifier[inserts] keyword[and] identifier[args] . identifier[dialect] == literal[string] : identifier[print] ( identifier[sqla_inserter_call] ( identifier[table_names_for_insert] ), identifier[file] = identifier[file] ) keyword[if] identifier[t] keyword[and] identifier[args] . identifier[inserts] : keyword[for] identifier[seq_update] keyword[in] identifier[emit_db_sequence_updates] ( identifier[t] . identifier[source] . identifier[db_engine] ): keyword[if] identifier[args] . identifier[dialect] == literal[string] : identifier[print] ( literal[string] % identifier[seq_update] , identifier[file] = identifier[file] ) keyword[elif] identifier[args] . identifier[dialect] == literal[string] : identifier[print] ( identifier[seq_update] , identifier[file] = identifier[file] ) keyword[else] : identifier[generate_one] ( identifier[datafile] , identifier[args] , identifier[file] = identifier[file] )
def generate(args=None, namespace=None, file=None): """ Genereate DDL from data sources named. :args: String or list of strings to be parsed for arguments :namespace: Namespace to extract arguments from :file: Write to this open file object (default stdout) """ if hasattr(args, 'split'): args = args.split() # depends on [control=['if'], data=[]] args = parser.parse_args(args, namespace) set_logging(args) logging.info(str(args)) if args.dialect in ('pg', 'pgsql', 'postgres'): args.dialect = 'postgresql' # depends on [control=['if'], data=[]] if args.dialect.startswith('dj'): args.dialect = 'django' # depends on [control=['if'], data=[]] elif args.dialect.startswith('sqla'): args.dialect = 'sqlalchemy' # depends on [control=['if'], data=[]] if args.dialect not in dialect_names: raise NotImplementedError('First arg must be one of: %s' % ', '.join(dialect_names)) # depends on [control=['if'], data=['dialect_names']] if args.dialect == 'sqlalchemy': print(sqla_head, file=file) # depends on [control=['if'], data=[]] for datafile in args.datafile: if is_sqlalchemy_url.search(datafile): table_names_for_insert = [] for tbl in sqlalchemy_table_sources(datafile): t = generate_one(tbl, args, table_name=tbl.generator.name, file=file) if t.data: table_names_for_insert.append(tbl.generator.name) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['tbl']] if args.inserts and args.dialect == 'sqlalchemy': print(sqla_inserter_call(table_names_for_insert), file=file) # depends on [control=['if'], data=[]] if t and args.inserts: for seq_update in emit_db_sequence_updates(t.source.db_engine): if args.dialect == 'sqlalchemy': print(' conn.execute("%s")' % seq_update, file=file) # depends on [control=['if'], data=[]] elif args.dialect == 'postgresql': print(seq_update, file=file) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['seq_update']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: generate_one(datafile, args, file=file) # depends on [control=['for'], data=['datafile']]
def create_type_variant(cls, type_name, type_converter): """ Create type variants for types with a cardinality field. The new type converters are based on the type converter with cardinality=1. .. code-block:: python import parse @parse.with_pattern(r'\d+') def parse_number(text): return int(text) new_type = CardinalityFieldTypeBuilder.create_type_variant( "Number+", parse_number) new_type = CardinalityFieldTypeBuilder.create_type_variant( "Number+", dict(Number=parse_number)) :param type_name: Type name with cardinality field suffix. :param type_converter: Type converter or type dictionary. :return: Type converter variant (function). :raises: ValueError, if type_name does not end with CardinalityField :raises: MissingTypeError, if type_converter is missing in type_dict """ assert isinstance(type_name, six.string_types) if not CardinalityField.matches_type(type_name): message = "type_name='%s' has no CardinalityField" % type_name raise ValueError(message) primary_name, cardinality = CardinalityField.split_type(type_name) if isinstance(type_converter, dict): type_dict = type_converter type_converter = type_dict.get(primary_name, None) if not type_converter: raise MissingTypeError(primary_name) assert callable(type_converter) type_variant = TypeBuilder.with_cardinality(cardinality, type_converter, listsep=cls.listsep) type_variant.name = type_name return type_variant
def function[create_type_variant, parameter[cls, type_name, type_converter]]: constant[ Create type variants for types with a cardinality field. The new type converters are based on the type converter with cardinality=1. .. code-block:: python import parse @parse.with_pattern(r'\d+') def parse_number(text): return int(text) new_type = CardinalityFieldTypeBuilder.create_type_variant( "Number+", parse_number) new_type = CardinalityFieldTypeBuilder.create_type_variant( "Number+", dict(Number=parse_number)) :param type_name: Type name with cardinality field suffix. :param type_converter: Type converter or type dictionary. :return: Type converter variant (function). :raises: ValueError, if type_name does not end with CardinalityField :raises: MissingTypeError, if type_converter is missing in type_dict ] assert[call[name[isinstance], parameter[name[type_name], name[six].string_types]]] if <ast.UnaryOp object at 0x7da1b247fac0> begin[:] variable[message] assign[=] binary_operation[constant[type_name='%s' has no CardinalityField] <ast.Mod object at 0x7da2590d6920> name[type_name]] <ast.Raise object at 0x7da1b247cbb0> <ast.Tuple object at 0x7da1b247fd90> assign[=] call[name[CardinalityField].split_type, parameter[name[type_name]]] if call[name[isinstance], parameter[name[type_converter], name[dict]]] begin[:] variable[type_dict] assign[=] name[type_converter] variable[type_converter] assign[=] call[name[type_dict].get, parameter[name[primary_name], constant[None]]] if <ast.UnaryOp object at 0x7da1b257d390> begin[:] <ast.Raise object at 0x7da1b257d900> assert[call[name[callable], parameter[name[type_converter]]]] variable[type_variant] assign[=] call[name[TypeBuilder].with_cardinality, parameter[name[cardinality], name[type_converter]]] name[type_variant].name assign[=] name[type_name] return[name[type_variant]]
keyword[def] identifier[create_type_variant] ( identifier[cls] , identifier[type_name] , identifier[type_converter] ): literal[string] keyword[assert] identifier[isinstance] ( identifier[type_name] , identifier[six] . identifier[string_types] ) keyword[if] keyword[not] identifier[CardinalityField] . identifier[matches_type] ( identifier[type_name] ): identifier[message] = literal[string] % identifier[type_name] keyword[raise] identifier[ValueError] ( identifier[message] ) identifier[primary_name] , identifier[cardinality] = identifier[CardinalityField] . identifier[split_type] ( identifier[type_name] ) keyword[if] identifier[isinstance] ( identifier[type_converter] , identifier[dict] ): identifier[type_dict] = identifier[type_converter] identifier[type_converter] = identifier[type_dict] . identifier[get] ( identifier[primary_name] , keyword[None] ) keyword[if] keyword[not] identifier[type_converter] : keyword[raise] identifier[MissingTypeError] ( identifier[primary_name] ) keyword[assert] identifier[callable] ( identifier[type_converter] ) identifier[type_variant] = identifier[TypeBuilder] . identifier[with_cardinality] ( identifier[cardinality] , identifier[type_converter] , identifier[listsep] = identifier[cls] . identifier[listsep] ) identifier[type_variant] . identifier[name] = identifier[type_name] keyword[return] identifier[type_variant]
def create_type_variant(cls, type_name, type_converter): """ Create type variants for types with a cardinality field. The new type converters are based on the type converter with cardinality=1. .. code-block:: python import parse @parse.with_pattern(r'\\d+') def parse_number(text): return int(text) new_type = CardinalityFieldTypeBuilder.create_type_variant( "Number+", parse_number) new_type = CardinalityFieldTypeBuilder.create_type_variant( "Number+", dict(Number=parse_number)) :param type_name: Type name with cardinality field suffix. :param type_converter: Type converter or type dictionary. :return: Type converter variant (function). :raises: ValueError, if type_name does not end with CardinalityField :raises: MissingTypeError, if type_converter is missing in type_dict """ assert isinstance(type_name, six.string_types) if not CardinalityField.matches_type(type_name): message = "type_name='%s' has no CardinalityField" % type_name raise ValueError(message) # depends on [control=['if'], data=[]] (primary_name, cardinality) = CardinalityField.split_type(type_name) if isinstance(type_converter, dict): type_dict = type_converter type_converter = type_dict.get(primary_name, None) if not type_converter: raise MissingTypeError(primary_name) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] assert callable(type_converter) type_variant = TypeBuilder.with_cardinality(cardinality, type_converter, listsep=cls.listsep) type_variant.name = type_name return type_variant
def get_hotp( secret, intervals_no, as_string=False, casefold=True, digest_method=hashlib.sha1, token_length=6, ): """ Get HMAC-based one-time password on the basis of given secret and interval number. :param secret: the base32-encoded string acting as secret key :type secret: str or unicode :param intervals_no: interval number used for getting different tokens, it is incremented with each use :type intervals_no: int :param as_string: True if result should be padded string, False otherwise :type as_string: bool :param casefold: True (default), if should accept also lowercase alphabet :type casefold: bool :param digest_method: method of generating digest (hashlib.sha1 by default) :type digest_method: callable :param token_length: length of the token (6 by default) :type token_length: int :return: generated HOTP token :rtype: int or str >>> get_hotp(b'MFRGGZDFMZTWQ2LK', intervals_no=1) 765705 >>> get_hotp(b'MFRGGZDFMZTWQ2LK', intervals_no=2) 816065 >>> result = get_hotp(b'MFRGGZDFMZTWQ2LK', intervals_no=2, as_string=True) >>> result == b'816065' True """ if isinstance(secret, six.string_types): # It is unicode, convert it to bytes secret = secret.encode('utf-8') # Get rid of all the spacing: secret = secret.replace(b' ', b'') try: key = base64.b32decode(secret, casefold=casefold) except (TypeError): raise TypeError('Incorrect secret') msg = struct.pack('>Q', intervals_no) hmac_digest = hmac.new(key, msg, digest_method).digest() ob = hmac_digest[19] if six.PY3 else ord(hmac_digest[19]) o = ob & 15 token_base = struct.unpack('>I', hmac_digest[o:o + 4])[0] & 0x7fffffff token = token_base % (10 ** token_length) if as_string: # TODO: should as_string=True return unicode, not bytes? return six.b('{{:0{}d}}'.format(token_length).format(token)) else: return token
def function[get_hotp, parameter[secret, intervals_no, as_string, casefold, digest_method, token_length]]: constant[ Get HMAC-based one-time password on the basis of given secret and interval number. :param secret: the base32-encoded string acting as secret key :type secret: str or unicode :param intervals_no: interval number used for getting different tokens, it is incremented with each use :type intervals_no: int :param as_string: True if result should be padded string, False otherwise :type as_string: bool :param casefold: True (default), if should accept also lowercase alphabet :type casefold: bool :param digest_method: method of generating digest (hashlib.sha1 by default) :type digest_method: callable :param token_length: length of the token (6 by default) :type token_length: int :return: generated HOTP token :rtype: int or str >>> get_hotp(b'MFRGGZDFMZTWQ2LK', intervals_no=1) 765705 >>> get_hotp(b'MFRGGZDFMZTWQ2LK', intervals_no=2) 816065 >>> result = get_hotp(b'MFRGGZDFMZTWQ2LK', intervals_no=2, as_string=True) >>> result == b'816065' True ] if call[name[isinstance], parameter[name[secret], name[six].string_types]] begin[:] variable[secret] assign[=] call[name[secret].encode, parameter[constant[utf-8]]] variable[secret] assign[=] call[name[secret].replace, parameter[constant[b' '], constant[b'']]] <ast.Try object at 0x7da1b0be30d0> variable[msg] assign[=] call[name[struct].pack, parameter[constant[>Q], name[intervals_no]]] variable[hmac_digest] assign[=] call[call[name[hmac].new, parameter[name[key], name[msg], name[digest_method]]].digest, parameter[]] variable[ob] assign[=] <ast.IfExp object at 0x7da1b0be0220> variable[o] assign[=] binary_operation[name[ob] <ast.BitAnd object at 0x7da2590d6b60> constant[15]] variable[token_base] assign[=] binary_operation[call[call[name[struct].unpack, parameter[constant[>I], call[name[hmac_digest]][<ast.Slice object at 0x7da1b0be2080>]]]][constant[0]] <ast.BitAnd object at 0x7da2590d6b60> constant[2147483647]] variable[token] assign[=] binary_operation[name[token_base] <ast.Mod object at 0x7da2590d6920> binary_operation[constant[10] ** name[token_length]]] if name[as_string] begin[:] return[call[name[six].b, parameter[call[call[constant[{{:0{}d}}].format, parameter[name[token_length]]].format, parameter[name[token]]]]]]
keyword[def] identifier[get_hotp] ( identifier[secret] , identifier[intervals_no] , identifier[as_string] = keyword[False] , identifier[casefold] = keyword[True] , identifier[digest_method] = identifier[hashlib] . identifier[sha1] , identifier[token_length] = literal[int] , ): literal[string] keyword[if] identifier[isinstance] ( identifier[secret] , identifier[six] . identifier[string_types] ): identifier[secret] = identifier[secret] . identifier[encode] ( literal[string] ) identifier[secret] = identifier[secret] . identifier[replace] ( literal[string] , literal[string] ) keyword[try] : identifier[key] = identifier[base64] . identifier[b32decode] ( identifier[secret] , identifier[casefold] = identifier[casefold] ) keyword[except] ( identifier[TypeError] ): keyword[raise] identifier[TypeError] ( literal[string] ) identifier[msg] = identifier[struct] . identifier[pack] ( literal[string] , identifier[intervals_no] ) identifier[hmac_digest] = identifier[hmac] . identifier[new] ( identifier[key] , identifier[msg] , identifier[digest_method] ). identifier[digest] () identifier[ob] = identifier[hmac_digest] [ literal[int] ] keyword[if] identifier[six] . identifier[PY3] keyword[else] identifier[ord] ( identifier[hmac_digest] [ literal[int] ]) identifier[o] = identifier[ob] & literal[int] identifier[token_base] = identifier[struct] . identifier[unpack] ( literal[string] , identifier[hmac_digest] [ identifier[o] : identifier[o] + literal[int] ])[ literal[int] ]& literal[int] identifier[token] = identifier[token_base] %( literal[int] ** identifier[token_length] ) keyword[if] identifier[as_string] : keyword[return] identifier[six] . identifier[b] ( literal[string] . identifier[format] ( identifier[token_length] ). identifier[format] ( identifier[token] )) keyword[else] : keyword[return] identifier[token]
def get_hotp(secret, intervals_no, as_string=False, casefold=True, digest_method=hashlib.sha1, token_length=6): """ Get HMAC-based one-time password on the basis of given secret and interval number. :param secret: the base32-encoded string acting as secret key :type secret: str or unicode :param intervals_no: interval number used for getting different tokens, it is incremented with each use :type intervals_no: int :param as_string: True if result should be padded string, False otherwise :type as_string: bool :param casefold: True (default), if should accept also lowercase alphabet :type casefold: bool :param digest_method: method of generating digest (hashlib.sha1 by default) :type digest_method: callable :param token_length: length of the token (6 by default) :type token_length: int :return: generated HOTP token :rtype: int or str >>> get_hotp(b'MFRGGZDFMZTWQ2LK', intervals_no=1) 765705 >>> get_hotp(b'MFRGGZDFMZTWQ2LK', intervals_no=2) 816065 >>> result = get_hotp(b'MFRGGZDFMZTWQ2LK', intervals_no=2, as_string=True) >>> result == b'816065' True """ if isinstance(secret, six.string_types): # It is unicode, convert it to bytes secret = secret.encode('utf-8') # depends on [control=['if'], data=[]] # Get rid of all the spacing: secret = secret.replace(b' ', b'') try: key = base64.b32decode(secret, casefold=casefold) # depends on [control=['try'], data=[]] except TypeError: raise TypeError('Incorrect secret') # depends on [control=['except'], data=[]] msg = struct.pack('>Q', intervals_no) hmac_digest = hmac.new(key, msg, digest_method).digest() ob = hmac_digest[19] if six.PY3 else ord(hmac_digest[19]) o = ob & 15 token_base = struct.unpack('>I', hmac_digest[o:o + 4])[0] & 2147483647 token = token_base % 10 ** token_length if as_string: # TODO: should as_string=True return unicode, not bytes? return six.b('{{:0{}d}}'.format(token_length).format(token)) # depends on [control=['if'], data=[]] else: return token
def global_fixes(): """Yield multiple (code, function) tuples.""" for function in list(globals().values()): if inspect.isfunction(function): arguments = _get_parameters(function) if arguments[:1] != ['source']: continue code = extract_code_from_function(function) if code: yield (code, function)
def function[global_fixes, parameter[]]: constant[Yield multiple (code, function) tuples.] for taget[name[function]] in starred[call[name[list], parameter[call[call[name[globals], parameter[]].values, parameter[]]]]] begin[:] if call[name[inspect].isfunction, parameter[name[function]]] begin[:] variable[arguments] assign[=] call[name[_get_parameters], parameter[name[function]]] if compare[call[name[arguments]][<ast.Slice object at 0x7da18fe92a10>] not_equal[!=] list[[<ast.Constant object at 0x7da18fe93d60>]]] begin[:] continue variable[code] assign[=] call[name[extract_code_from_function], parameter[name[function]]] if name[code] begin[:] <ast.Yield object at 0x7da18fe92f20>
keyword[def] identifier[global_fixes] (): literal[string] keyword[for] identifier[function] keyword[in] identifier[list] ( identifier[globals] (). identifier[values] ()): keyword[if] identifier[inspect] . identifier[isfunction] ( identifier[function] ): identifier[arguments] = identifier[_get_parameters] ( identifier[function] ) keyword[if] identifier[arguments] [: literal[int] ]!=[ literal[string] ]: keyword[continue] identifier[code] = identifier[extract_code_from_function] ( identifier[function] ) keyword[if] identifier[code] : keyword[yield] ( identifier[code] , identifier[function] )
def global_fixes(): """Yield multiple (code, function) tuples.""" for function in list(globals().values()): if inspect.isfunction(function): arguments = _get_parameters(function) if arguments[:1] != ['source']: continue # depends on [control=['if'], data=[]] code = extract_code_from_function(function) if code: yield (code, function) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['function']]
def add_aggregation_columns( df, *, group_cols: Union[str, List[str]], aggregations: Dict[str, Agg] ): """ Add new columns containing aggregations values on existing columns --- ### Parameters *mandatory :* - `group_cols` (*str* or *list*): columns used to aggregate the data - `aggregations` (*dict*): keys are name of new columns and values are aggregation functions Examples of aggregation functions : 'sum', 'max' Available aggregation functions are listed [here]( https://pandas.pydata.org/pandas-docs/stable/user_guide/groupby.html#aggregation) --- ### Example **Input** | ENTITY | YEAR | VALUE_1 | VALUE_2 | |:------:|:----:|:-------:|:-------:| | A | 2017 | 10 | 3 | | A | 2017 | 20 | 1 | | A | 2018 | 10 | 5 | | A | 2018 | 30 | 4 | | B | 2017 | 60 | 4 | | B | 2017 | 40 | 3 | | B | 2018 | 50 | 7 | | B | 2018 | 60 | 6 | ```cson add_aggregation_columns: group_cols: ['ENTITY', 'YEAR'] aggregations: sum_value1: VALUE_1: 'sum' # sum of `VALUE_1` put in `sum_value1` column max_value1: VALUE_1: 'max' # max of `VALUE_1` put in `max_value1` column mean_value2: VALUE_2: 'mean' # mean of `VALUE_2` put in `mean_value2` column ] ``` **Output** | ENTITY | YEAR | VALUE_1 | VALUE_2 | sum_value1 | max_value1 | mean_value2 | |:------:|:----:|:-------:|:-------:|:----------:|:----------:|:-----------:| | A | 2017 | 10 | 3 | 30 | 20 | 2.0 | | A | 2017 | 20 | 1 | 30 | 20 | 2.0 | | A | 2018 | 10 | 5 | 40 | 30 | 4.5 | | A | 2018 | 30 | 4 | 40 | 30 | 4.5 | | B | 2017 | 60 | 4 | 100 | 60 | 3.5 | | B | 2017 | 40 | 3 | 100 | 60 | 3.5 | | B | 2018 | 50 | 7 | 110 | 60 | 6.5 | | B | 2018 | 60 | 6 | 110 | 60 | 6.5 | """ group = df.groupby(group_cols) for new_col, aggs in aggregations.items(): assert len(aggs) == 1 (col, agg), *_ = aggs.items() df[new_col] = group[col].transform(agg) return df
def function[add_aggregation_columns, parameter[df]]: constant[ Add new columns containing aggregations values on existing columns --- ### Parameters *mandatory :* - `group_cols` (*str* or *list*): columns used to aggregate the data - `aggregations` (*dict*): keys are name of new columns and values are aggregation functions Examples of aggregation functions : 'sum', 'max' Available aggregation functions are listed [here]( https://pandas.pydata.org/pandas-docs/stable/user_guide/groupby.html#aggregation) --- ### Example **Input** | ENTITY | YEAR | VALUE_1 | VALUE_2 | |:------:|:----:|:-------:|:-------:| | A | 2017 | 10 | 3 | | A | 2017 | 20 | 1 | | A | 2018 | 10 | 5 | | A | 2018 | 30 | 4 | | B | 2017 | 60 | 4 | | B | 2017 | 40 | 3 | | B | 2018 | 50 | 7 | | B | 2018 | 60 | 6 | ```cson add_aggregation_columns: group_cols: ['ENTITY', 'YEAR'] aggregations: sum_value1: VALUE_1: 'sum' # sum of `VALUE_1` put in `sum_value1` column max_value1: VALUE_1: 'max' # max of `VALUE_1` put in `max_value1` column mean_value2: VALUE_2: 'mean' # mean of `VALUE_2` put in `mean_value2` column ] ``` **Output** | ENTITY | YEAR | VALUE_1 | VALUE_2 | sum_value1 | max_value1 | mean_value2 | |:------:|:----:|:-------:|:-------:|:----------:|:----------:|:-----------:| | A | 2017 | 10 | 3 | 30 | 20 | 2.0 | | A | 2017 | 20 | 1 | 30 | 20 | 2.0 | | A | 2018 | 10 | 5 | 40 | 30 | 4.5 | | A | 2018 | 30 | 4 | 40 | 30 | 4.5 | | B | 2017 | 60 | 4 | 100 | 60 | 3.5 | | B | 2017 | 40 | 3 | 100 | 60 | 3.5 | | B | 2018 | 50 | 7 | 110 | 60 | 6.5 | | B | 2018 | 60 | 6 | 110 | 60 | 6.5 | ] variable[group] assign[=] call[name[df].groupby, parameter[name[group_cols]]] for taget[tuple[[<ast.Name object at 0x7da1b03bab00>, <ast.Name object at 0x7da1b03b8d90>]]] in starred[call[name[aggregations].items, parameter[]]] begin[:] assert[compare[call[name[len], parameter[name[aggs]]] equal[==] constant[1]]] <ast.Tuple object at 0x7da1b03b8b50> assign[=] call[name[aggs].items, parameter[]] call[name[df]][name[new_col]] assign[=] call[call[name[group]][name[col]].transform, parameter[name[agg]]] return[name[df]]
keyword[def] identifier[add_aggregation_columns] ( identifier[df] ,*, identifier[group_cols] : identifier[Union] [ identifier[str] , identifier[List] [ identifier[str] ]], identifier[aggregations] : identifier[Dict] [ identifier[str] , identifier[Agg] ] ): literal[string] identifier[group] = identifier[df] . identifier[groupby] ( identifier[group_cols] ) keyword[for] identifier[new_col] , identifier[aggs] keyword[in] identifier[aggregations] . identifier[items] (): keyword[assert] identifier[len] ( identifier[aggs] )== literal[int] ( identifier[col] , identifier[agg] ),* identifier[_] = identifier[aggs] . identifier[items] () identifier[df] [ identifier[new_col] ]= identifier[group] [ identifier[col] ]. identifier[transform] ( identifier[agg] ) keyword[return] identifier[df]
def add_aggregation_columns(df, *, group_cols: Union[str, List[str]], aggregations: Dict[str, Agg]): """ Add new columns containing aggregations values on existing columns --- ### Parameters *mandatory :* - `group_cols` (*str* or *list*): columns used to aggregate the data - `aggregations` (*dict*): keys are name of new columns and values are aggregation functions Examples of aggregation functions : 'sum', 'max' Available aggregation functions are listed [here]( https://pandas.pydata.org/pandas-docs/stable/user_guide/groupby.html#aggregation) --- ### Example **Input** | ENTITY | YEAR | VALUE_1 | VALUE_2 | |:------:|:----:|:-------:|:-------:| | A | 2017 | 10 | 3 | | A | 2017 | 20 | 1 | | A | 2018 | 10 | 5 | | A | 2018 | 30 | 4 | | B | 2017 | 60 | 4 | | B | 2017 | 40 | 3 | | B | 2018 | 50 | 7 | | B | 2018 | 60 | 6 | ```cson add_aggregation_columns: group_cols: ['ENTITY', 'YEAR'] aggregations: sum_value1: VALUE_1: 'sum' # sum of `VALUE_1` put in `sum_value1` column max_value1: VALUE_1: 'max' # max of `VALUE_1` put in `max_value1` column mean_value2: VALUE_2: 'mean' # mean of `VALUE_2` put in `mean_value2` column ] ``` **Output** | ENTITY | YEAR | VALUE_1 | VALUE_2 | sum_value1 | max_value1 | mean_value2 | |:------:|:----:|:-------:|:-------:|:----------:|:----------:|:-----------:| | A | 2017 | 10 | 3 | 30 | 20 | 2.0 | | A | 2017 | 20 | 1 | 30 | 20 | 2.0 | | A | 2018 | 10 | 5 | 40 | 30 | 4.5 | | A | 2018 | 30 | 4 | 40 | 30 | 4.5 | | B | 2017 | 60 | 4 | 100 | 60 | 3.5 | | B | 2017 | 40 | 3 | 100 | 60 | 3.5 | | B | 2018 | 50 | 7 | 110 | 60 | 6.5 | | B | 2018 | 60 | 6 | 110 | 60 | 6.5 | """ group = df.groupby(group_cols) for (new_col, aggs) in aggregations.items(): assert len(aggs) == 1 ((col, agg), *_) = aggs.items() df[new_col] = group[col].transform(agg) # depends on [control=['for'], data=[]] return df
def is_prime(n, mr_rounds=25): """Test whether n is probably prime See <https://en.wikipedia.org/wiki/Primality_test#Probabilistic_tests> Arguments: n (int): the number to be tested mr_rounds (int, optional): number of Miller-Rabin iterations to run; defaults to 25 iterations, which is what the GMP library uses Returns: bool: when this function returns False, `n` is composite (not prime); when it returns True, `n` is prime with overwhelming probability """ # as an optimization we quickly detect small primes using the list above if n <= first_primes[-1]: return n in first_primes # for small dividors (relatively frequent), euclidean division is best for p in first_primes: if n % p == 0: return False # the actual generic test; give a false prime with probability 2⁻⁵⁰ return miller_rabin(n, mr_rounds)
def function[is_prime, parameter[n, mr_rounds]]: constant[Test whether n is probably prime See <https://en.wikipedia.org/wiki/Primality_test#Probabilistic_tests> Arguments: n (int): the number to be tested mr_rounds (int, optional): number of Miller-Rabin iterations to run; defaults to 25 iterations, which is what the GMP library uses Returns: bool: when this function returns False, `n` is composite (not prime); when it returns True, `n` is prime with overwhelming probability ] if compare[name[n] less_or_equal[<=] call[name[first_primes]][<ast.UnaryOp object at 0x7da18dc05810>]] begin[:] return[compare[name[n] in name[first_primes]]] for taget[name[p]] in starred[name[first_primes]] begin[:] if compare[binary_operation[name[n] <ast.Mod object at 0x7da2590d6920> name[p]] equal[==] constant[0]] begin[:] return[constant[False]] return[call[name[miller_rabin], parameter[name[n], name[mr_rounds]]]]
keyword[def] identifier[is_prime] ( identifier[n] , identifier[mr_rounds] = literal[int] ): literal[string] keyword[if] identifier[n] <= identifier[first_primes] [- literal[int] ]: keyword[return] identifier[n] keyword[in] identifier[first_primes] keyword[for] identifier[p] keyword[in] identifier[first_primes] : keyword[if] identifier[n] % identifier[p] == literal[int] : keyword[return] keyword[False] keyword[return] identifier[miller_rabin] ( identifier[n] , identifier[mr_rounds] )
def is_prime(n, mr_rounds=25): """Test whether n is probably prime See <https://en.wikipedia.org/wiki/Primality_test#Probabilistic_tests> Arguments: n (int): the number to be tested mr_rounds (int, optional): number of Miller-Rabin iterations to run; defaults to 25 iterations, which is what the GMP library uses Returns: bool: when this function returns False, `n` is composite (not prime); when it returns True, `n` is prime with overwhelming probability """ # as an optimization we quickly detect small primes using the list above if n <= first_primes[-1]: return n in first_primes # depends on [control=['if'], data=['n']] # for small dividors (relatively frequent), euclidean division is best for p in first_primes: if n % p == 0: return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['p']] # the actual generic test; give a false prime with probability 2⁻⁵⁰ return miller_rabin(n, mr_rounds)
def calculate_total_amt(self, items={}): """Returns the total amount/cost of items in the current invoice""" _items = items.items() or self.items.items() return sum(float(x[1].total_price) for x in _items)
def function[calculate_total_amt, parameter[self, items]]: constant[Returns the total amount/cost of items in the current invoice] variable[_items] assign[=] <ast.BoolOp object at 0x7da1b10d6d10> return[call[name[sum], parameter[<ast.GeneratorExp object at 0x7da1b10d6da0>]]]
keyword[def] identifier[calculate_total_amt] ( identifier[self] , identifier[items] ={}): literal[string] identifier[_items] = identifier[items] . identifier[items] () keyword[or] identifier[self] . identifier[items] . identifier[items] () keyword[return] identifier[sum] ( identifier[float] ( identifier[x] [ literal[int] ]. identifier[total_price] ) keyword[for] identifier[x] keyword[in] identifier[_items] )
def calculate_total_amt(self, items={}): """Returns the total amount/cost of items in the current invoice""" _items = items.items() or self.items.items() return sum((float(x[1].total_price) for x in _items))
def fromLink(self, link): """ Factory Method. Fetches article data from given link and builds the object """ soup = get_article_soup(link) head = soup.find_all('article',class_='')[0] parts = link.split('/') id = '%s-%s'%(parts[0],parts[-1]) issue = parts[0].split('-')[-1] #fetching head title = head.find("h1").contents[0] if head.find("h1") else '' tagline = head.find("h2").contents[0] if head.find("h2") else '' body = '' #fetching body if len(soup.find_all('article',class_='main-body')) > 0: body = soup.find_all('article',class_='main-body')[0].find(class_='inner') author = '' #fetching author if len(soup.find_all('aside')) > 0: aside = soup.find_all('aside')[0] if soup.find_all('aside')[0] else '' author = Author.from_soup(aside) return Article(id=id,title=title,tagline=tagline,body=body,issue=issue,link='http://thezine.biz/%s'%link,author=author)
def function[fromLink, parameter[self, link]]: constant[ Factory Method. Fetches article data from given link and builds the object ] variable[soup] assign[=] call[name[get_article_soup], parameter[name[link]]] variable[head] assign[=] call[call[name[soup].find_all, parameter[constant[article]]]][constant[0]] variable[parts] assign[=] call[name[link].split, parameter[constant[/]]] variable[id] assign[=] binary_operation[constant[%s-%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da204347520>, <ast.Subscript object at 0x7da204345480>]]] variable[issue] assign[=] call[call[call[name[parts]][constant[0]].split, parameter[constant[-]]]][<ast.UnaryOp object at 0x7da204345840>] variable[title] assign[=] <ast.IfExp object at 0x7da204344250> variable[tagline] assign[=] <ast.IfExp object at 0x7da204344b50> variable[body] assign[=] constant[] if compare[call[name[len], parameter[call[name[soup].find_all, parameter[constant[article]]]]] greater[>] constant[0]] begin[:] variable[body] assign[=] call[call[call[name[soup].find_all, parameter[constant[article]]]][constant[0]].find, parameter[]] variable[author] assign[=] constant[] if compare[call[name[len], parameter[call[name[soup].find_all, parameter[constant[aside]]]]] greater[>] constant[0]] begin[:] variable[aside] assign[=] <ast.IfExp object at 0x7da204346cb0> variable[author] assign[=] call[name[Author].from_soup, parameter[name[aside]]] return[call[name[Article], parameter[]]]
keyword[def] identifier[fromLink] ( identifier[self] , identifier[link] ): literal[string] identifier[soup] = identifier[get_article_soup] ( identifier[link] ) identifier[head] = identifier[soup] . identifier[find_all] ( literal[string] , identifier[class_] = literal[string] )[ literal[int] ] identifier[parts] = identifier[link] . identifier[split] ( literal[string] ) identifier[id] = literal[string] %( identifier[parts] [ literal[int] ], identifier[parts] [- literal[int] ]) identifier[issue] = identifier[parts] [ literal[int] ]. identifier[split] ( literal[string] )[- literal[int] ] identifier[title] = identifier[head] . identifier[find] ( literal[string] ). identifier[contents] [ literal[int] ] keyword[if] identifier[head] . identifier[find] ( literal[string] ) keyword[else] literal[string] identifier[tagline] = identifier[head] . identifier[find] ( literal[string] ). identifier[contents] [ literal[int] ] keyword[if] identifier[head] . identifier[find] ( literal[string] ) keyword[else] literal[string] identifier[body] = literal[string] keyword[if] identifier[len] ( identifier[soup] . identifier[find_all] ( literal[string] , identifier[class_] = literal[string] ))> literal[int] : identifier[body] = identifier[soup] . identifier[find_all] ( literal[string] , identifier[class_] = literal[string] )[ literal[int] ]. identifier[find] ( identifier[class_] = literal[string] ) identifier[author] = literal[string] keyword[if] identifier[len] ( identifier[soup] . identifier[find_all] ( literal[string] ))> literal[int] : identifier[aside] = identifier[soup] . identifier[find_all] ( literal[string] )[ literal[int] ] keyword[if] identifier[soup] . identifier[find_all] ( literal[string] )[ literal[int] ] keyword[else] literal[string] identifier[author] = identifier[Author] . identifier[from_soup] ( identifier[aside] ) keyword[return] identifier[Article] ( identifier[id] = identifier[id] , identifier[title] = identifier[title] , identifier[tagline] = identifier[tagline] , identifier[body] = identifier[body] , identifier[issue] = identifier[issue] , identifier[link] = literal[string] % identifier[link] , identifier[author] = identifier[author] )
def fromLink(self, link): """ Factory Method. Fetches article data from given link and builds the object """ soup = get_article_soup(link) head = soup.find_all('article', class_='')[0] parts = link.split('/') id = '%s-%s' % (parts[0], parts[-1]) issue = parts[0].split('-')[-1] #fetching head title = head.find('h1').contents[0] if head.find('h1') else '' tagline = head.find('h2').contents[0] if head.find('h2') else '' body = '' #fetching body if len(soup.find_all('article', class_='main-body')) > 0: body = soup.find_all('article', class_='main-body')[0].find(class_='inner') # depends on [control=['if'], data=[]] author = '' #fetching author if len(soup.find_all('aside')) > 0: aside = soup.find_all('aside')[0] if soup.find_all('aside')[0] else '' author = Author.from_soup(aside) # depends on [control=['if'], data=[]] return Article(id=id, title=title, tagline=tagline, body=body, issue=issue, link='http://thezine.biz/%s' % link, author=author)
def white_move(self): """ Calls the white player's ``generate_move()`` method and updates the board with the move returned. """ move = self.player_white.generate_move(self.position) move = make_legal(move, self.position) self.position.update(move)
def function[white_move, parameter[self]]: constant[ Calls the white player's ``generate_move()`` method and updates the board with the move returned. ] variable[move] assign[=] call[name[self].player_white.generate_move, parameter[name[self].position]] variable[move] assign[=] call[name[make_legal], parameter[name[move], name[self].position]] call[name[self].position.update, parameter[name[move]]]
keyword[def] identifier[white_move] ( identifier[self] ): literal[string] identifier[move] = identifier[self] . identifier[player_white] . identifier[generate_move] ( identifier[self] . identifier[position] ) identifier[move] = identifier[make_legal] ( identifier[move] , identifier[self] . identifier[position] ) identifier[self] . identifier[position] . identifier[update] ( identifier[move] )
def white_move(self): """ Calls the white player's ``generate_move()`` method and updates the board with the move returned. """ move = self.player_white.generate_move(self.position) move = make_legal(move, self.position) self.position.update(move)
def load_mode_validator(obs_mode, node): """Load observing mode validator""" nval = node.get('validator') if nval is None: pass elif isinstance(nval, str): # load function obs_mode.validator = import_object(nval) else: raise TypeError('validator must be None or a string') return obs_mode
def function[load_mode_validator, parameter[obs_mode, node]]: constant[Load observing mode validator] variable[nval] assign[=] call[name[node].get, parameter[constant[validator]]] if compare[name[nval] is constant[None]] begin[:] pass return[name[obs_mode]]
keyword[def] identifier[load_mode_validator] ( identifier[obs_mode] , identifier[node] ): literal[string] identifier[nval] = identifier[node] . identifier[get] ( literal[string] ) keyword[if] identifier[nval] keyword[is] keyword[None] : keyword[pass] keyword[elif] identifier[isinstance] ( identifier[nval] , identifier[str] ): identifier[obs_mode] . identifier[validator] = identifier[import_object] ( identifier[nval] ) keyword[else] : keyword[raise] identifier[TypeError] ( literal[string] ) keyword[return] identifier[obs_mode]
def load_mode_validator(obs_mode, node): """Load observing mode validator""" nval = node.get('validator') if nval is None: pass # depends on [control=['if'], data=[]] elif isinstance(nval, str): # load function obs_mode.validator = import_object(nval) # depends on [control=['if'], data=[]] else: raise TypeError('validator must be None or a string') return obs_mode
def nodeprep(string, allow_unassigned=False): """ Process the given `string` using the Nodeprep (`RFC 6122`_) profile. In the error cases defined in `RFC 3454`_ (stringprep), a :class:`ValueError` is raised. """ chars = list(string) _nodeprep_do_mapping(chars) do_normalization(chars) check_prohibited_output( chars, ( stringprep.in_table_c11, stringprep.in_table_c12, stringprep.in_table_c21, stringprep.in_table_c22, stringprep.in_table_c3, stringprep.in_table_c4, stringprep.in_table_c5, stringprep.in_table_c6, stringprep.in_table_c7, stringprep.in_table_c8, stringprep.in_table_c9, lambda x: x in _nodeprep_prohibited )) check_bidi(chars) if not allow_unassigned: check_unassigned( chars, ( stringprep.in_table_a1, ) ) return "".join(chars)
def function[nodeprep, parameter[string, allow_unassigned]]: constant[ Process the given `string` using the Nodeprep (`RFC 6122`_) profile. In the error cases defined in `RFC 3454`_ (stringprep), a :class:`ValueError` is raised. ] variable[chars] assign[=] call[name[list], parameter[name[string]]] call[name[_nodeprep_do_mapping], parameter[name[chars]]] call[name[do_normalization], parameter[name[chars]]] call[name[check_prohibited_output], parameter[name[chars], tuple[[<ast.Attribute object at 0x7da20c6e7e50>, <ast.Attribute object at 0x7da20c6e6c20>, <ast.Attribute object at 0x7da20c6e62f0>, <ast.Attribute object at 0x7da20c6e66e0>, <ast.Attribute object at 0x7da20c6e5c90>, <ast.Attribute object at 0x7da20c6e6290>, <ast.Attribute object at 0x7da20c6e6ef0>, <ast.Attribute object at 0x7da20c6e6f80>, <ast.Attribute object at 0x7da20c6e61d0>, <ast.Attribute object at 0x7da20c6e54e0>, <ast.Attribute object at 0x7da20c6e7c40>, <ast.Lambda object at 0x7da20c6e66b0>]]]] call[name[check_bidi], parameter[name[chars]]] if <ast.UnaryOp object at 0x7da18f721b10> begin[:] call[name[check_unassigned], parameter[name[chars], tuple[[<ast.Attribute object at 0x7da18f721b70>]]]] return[call[constant[].join, parameter[name[chars]]]]
keyword[def] identifier[nodeprep] ( identifier[string] , identifier[allow_unassigned] = keyword[False] ): literal[string] identifier[chars] = identifier[list] ( identifier[string] ) identifier[_nodeprep_do_mapping] ( identifier[chars] ) identifier[do_normalization] ( identifier[chars] ) identifier[check_prohibited_output] ( identifier[chars] , ( identifier[stringprep] . identifier[in_table_c11] , identifier[stringprep] . identifier[in_table_c12] , identifier[stringprep] . identifier[in_table_c21] , identifier[stringprep] . identifier[in_table_c22] , identifier[stringprep] . identifier[in_table_c3] , identifier[stringprep] . identifier[in_table_c4] , identifier[stringprep] . identifier[in_table_c5] , identifier[stringprep] . identifier[in_table_c6] , identifier[stringprep] . identifier[in_table_c7] , identifier[stringprep] . identifier[in_table_c8] , identifier[stringprep] . identifier[in_table_c9] , keyword[lambda] identifier[x] : identifier[x] keyword[in] identifier[_nodeprep_prohibited] )) identifier[check_bidi] ( identifier[chars] ) keyword[if] keyword[not] identifier[allow_unassigned] : identifier[check_unassigned] ( identifier[chars] , ( identifier[stringprep] . identifier[in_table_a1] , ) ) keyword[return] literal[string] . identifier[join] ( identifier[chars] )
def nodeprep(string, allow_unassigned=False): """ Process the given `string` using the Nodeprep (`RFC 6122`_) profile. In the error cases defined in `RFC 3454`_ (stringprep), a :class:`ValueError` is raised. """ chars = list(string) _nodeprep_do_mapping(chars) do_normalization(chars) check_prohibited_output(chars, (stringprep.in_table_c11, stringprep.in_table_c12, stringprep.in_table_c21, stringprep.in_table_c22, stringprep.in_table_c3, stringprep.in_table_c4, stringprep.in_table_c5, stringprep.in_table_c6, stringprep.in_table_c7, stringprep.in_table_c8, stringprep.in_table_c9, lambda x: x in _nodeprep_prohibited)) check_bidi(chars) if not allow_unassigned: check_unassigned(chars, (stringprep.in_table_a1,)) # depends on [control=['if'], data=[]] return ''.join(chars)
def build_object(self, obj): """Override django-bakery to skip profiles that raise 404""" try: build_path = self.get_build_path(obj) self.request = self.create_request(build_path) self.request.user = AnonymousUser() self.set_kwargs(obj) self.build_file(build_path, self.get_content()) except Http404: # cleanup directory self.unbuild_object(obj)
def function[build_object, parameter[self, obj]]: constant[Override django-bakery to skip profiles that raise 404] <ast.Try object at 0x7da1b0eb9600>
keyword[def] identifier[build_object] ( identifier[self] , identifier[obj] ): literal[string] keyword[try] : identifier[build_path] = identifier[self] . identifier[get_build_path] ( identifier[obj] ) identifier[self] . identifier[request] = identifier[self] . identifier[create_request] ( identifier[build_path] ) identifier[self] . identifier[request] . identifier[user] = identifier[AnonymousUser] () identifier[self] . identifier[set_kwargs] ( identifier[obj] ) identifier[self] . identifier[build_file] ( identifier[build_path] , identifier[self] . identifier[get_content] ()) keyword[except] identifier[Http404] : identifier[self] . identifier[unbuild_object] ( identifier[obj] )
def build_object(self, obj): """Override django-bakery to skip profiles that raise 404""" try: build_path = self.get_build_path(obj) self.request = self.create_request(build_path) self.request.user = AnonymousUser() self.set_kwargs(obj) self.build_file(build_path, self.get_content()) # depends on [control=['try'], data=[]] except Http404: # cleanup directory self.unbuild_object(obj) # depends on [control=['except'], data=[]]
def hash(self): """ Returns a hash of this render configuration from the variable, renderer, and time_index parameters. Used for caching the full-extent, native projection render so that subsequent requests can be served by a warp operation only. """ renderer_str = "{}|{}|{}|{}".format( self.renderer.__class__.__name__, self.renderer.colormap, self.renderer.fill_value, self.renderer.background_color ) if isinstance(self.renderer, StretchedRenderer): renderer_str = "{}|{}|{}".format(renderer_str, self.renderer.method, self.renderer.colorspace) elif isinstance(self.renderer, UniqueValuesRenderer): renderer_str = "{}|{}".format(renderer_str, self.renderer.labels) return hash("{}/{}/{}".format(self.variable.pk, renderer_str, self.time_index))
def function[hash, parameter[self]]: constant[ Returns a hash of this render configuration from the variable, renderer, and time_index parameters. Used for caching the full-extent, native projection render so that subsequent requests can be served by a warp operation only. ] variable[renderer_str] assign[=] call[constant[{}|{}|{}|{}].format, parameter[name[self].renderer.__class__.__name__, name[self].renderer.colormap, name[self].renderer.fill_value, name[self].renderer.background_color]] if call[name[isinstance], parameter[name[self].renderer, name[StretchedRenderer]]] begin[:] variable[renderer_str] assign[=] call[constant[{}|{}|{}].format, parameter[name[renderer_str], name[self].renderer.method, name[self].renderer.colorspace]] return[call[name[hash], parameter[call[constant[{}/{}/{}].format, parameter[name[self].variable.pk, name[renderer_str], name[self].time_index]]]]]
keyword[def] identifier[hash] ( identifier[self] ): literal[string] identifier[renderer_str] = literal[string] . identifier[format] ( identifier[self] . identifier[renderer] . identifier[__class__] . identifier[__name__] , identifier[self] . identifier[renderer] . identifier[colormap] , identifier[self] . identifier[renderer] . identifier[fill_value] , identifier[self] . identifier[renderer] . identifier[background_color] ) keyword[if] identifier[isinstance] ( identifier[self] . identifier[renderer] , identifier[StretchedRenderer] ): identifier[renderer_str] = literal[string] . identifier[format] ( identifier[renderer_str] , identifier[self] . identifier[renderer] . identifier[method] , identifier[self] . identifier[renderer] . identifier[colorspace] ) keyword[elif] identifier[isinstance] ( identifier[self] . identifier[renderer] , identifier[UniqueValuesRenderer] ): identifier[renderer_str] = literal[string] . identifier[format] ( identifier[renderer_str] , identifier[self] . identifier[renderer] . identifier[labels] ) keyword[return] identifier[hash] ( literal[string] . identifier[format] ( identifier[self] . identifier[variable] . identifier[pk] , identifier[renderer_str] , identifier[self] . identifier[time_index] ))
def hash(self): """ Returns a hash of this render configuration from the variable, renderer, and time_index parameters. Used for caching the full-extent, native projection render so that subsequent requests can be served by a warp operation only. """ renderer_str = '{}|{}|{}|{}'.format(self.renderer.__class__.__name__, self.renderer.colormap, self.renderer.fill_value, self.renderer.background_color) if isinstance(self.renderer, StretchedRenderer): renderer_str = '{}|{}|{}'.format(renderer_str, self.renderer.method, self.renderer.colorspace) # depends on [control=['if'], data=[]] elif isinstance(self.renderer, UniqueValuesRenderer): renderer_str = '{}|{}'.format(renderer_str, self.renderer.labels) # depends on [control=['if'], data=[]] return hash('{}/{}/{}'.format(self.variable.pk, renderer_str, self.time_index))
def build_sample_smoother_problem_friedman82(N=200): """Sample problem from supersmoother publication.""" x = numpy.random.uniform(size=N) err = numpy.random.standard_normal(N) y = numpy.sin(2 * math.pi * (1 - x) ** 2) + x * err return x, y
def function[build_sample_smoother_problem_friedman82, parameter[N]]: constant[Sample problem from supersmoother publication.] variable[x] assign[=] call[name[numpy].random.uniform, parameter[]] variable[err] assign[=] call[name[numpy].random.standard_normal, parameter[name[N]]] variable[y] assign[=] binary_operation[call[name[numpy].sin, parameter[binary_operation[binary_operation[constant[2] * name[math].pi] * binary_operation[binary_operation[constant[1] - name[x]] ** constant[2]]]]] + binary_operation[name[x] * name[err]]] return[tuple[[<ast.Name object at 0x7da1b19a00a0>, <ast.Name object at 0x7da1b19a2e30>]]]
keyword[def] identifier[build_sample_smoother_problem_friedman82] ( identifier[N] = literal[int] ): literal[string] identifier[x] = identifier[numpy] . identifier[random] . identifier[uniform] ( identifier[size] = identifier[N] ) identifier[err] = identifier[numpy] . identifier[random] . identifier[standard_normal] ( identifier[N] ) identifier[y] = identifier[numpy] . identifier[sin] ( literal[int] * identifier[math] . identifier[pi] *( literal[int] - identifier[x] )** literal[int] )+ identifier[x] * identifier[err] keyword[return] identifier[x] , identifier[y]
def build_sample_smoother_problem_friedman82(N=200): """Sample problem from supersmoother publication.""" x = numpy.random.uniform(size=N) err = numpy.random.standard_normal(N) y = numpy.sin(2 * math.pi * (1 - x) ** 2) + x * err return (x, y)
def disconnect(self, id): # pylint: disable=invalid-name,redefined-builtin """Close proxy connection to a device's management interface. :param id: Device ID as an int. """ return self.service.post(self.base+str(id)+'/disconnect/')
def function[disconnect, parameter[self, id]]: constant[Close proxy connection to a device's management interface. :param id: Device ID as an int. ] return[call[name[self].service.post, parameter[binary_operation[binary_operation[name[self].base + call[name[str], parameter[name[id]]]] + constant[/disconnect/]]]]]
keyword[def] identifier[disconnect] ( identifier[self] , identifier[id] ): literal[string] keyword[return] identifier[self] . identifier[service] . identifier[post] ( identifier[self] . identifier[base] + identifier[str] ( identifier[id] )+ literal[string] )
def disconnect(self, id): # pylint: disable=invalid-name,redefined-builtin "Close proxy connection to a device's management interface.\n\n :param id: Device ID as an int.\n " return self.service.post(self.base + str(id) + '/disconnect/')
def write_extra_data(self, stream: WriteStream) -> None: """Writes the param container and string pointer arrays. Unlike other write_extra_data functions, this can be called before write().""" if self.params: stream.align(8) if self._params_offset_writer: self._params_offset_writer.write_current_offset(stream) else: self._params_offset = stream.tell() self.params.write(stream) if self.actions: stream.align(8) if self._actions_offset_writer: self._actions_offset_writer.write_current_offset(stream) else: self._actions_offset = stream.tell() for s in self.actions: stream.write_string_ref(s.v) if self.queries: stream.align(8) if self._queries_offset_writer: self._queries_offset_writer.write_current_offset(stream) else: self._queries_offset = stream.tell() for s in self.queries: stream.write_string_ref(s.v)
def function[write_extra_data, parameter[self, stream]]: constant[Writes the param container and string pointer arrays. Unlike other write_extra_data functions, this can be called before write().] if name[self].params begin[:] call[name[stream].align, parameter[constant[8]]] if name[self]._params_offset_writer begin[:] call[name[self]._params_offset_writer.write_current_offset, parameter[name[stream]]] call[name[self].params.write, parameter[name[stream]]] if name[self].actions begin[:] call[name[stream].align, parameter[constant[8]]] if name[self]._actions_offset_writer begin[:] call[name[self]._actions_offset_writer.write_current_offset, parameter[name[stream]]] for taget[name[s]] in starred[name[self].actions] begin[:] call[name[stream].write_string_ref, parameter[name[s].v]] if name[self].queries begin[:] call[name[stream].align, parameter[constant[8]]] if name[self]._queries_offset_writer begin[:] call[name[self]._queries_offset_writer.write_current_offset, parameter[name[stream]]] for taget[name[s]] in starred[name[self].queries] begin[:] call[name[stream].write_string_ref, parameter[name[s].v]]
keyword[def] identifier[write_extra_data] ( identifier[self] , identifier[stream] : identifier[WriteStream] )-> keyword[None] : literal[string] keyword[if] identifier[self] . identifier[params] : identifier[stream] . identifier[align] ( literal[int] ) keyword[if] identifier[self] . identifier[_params_offset_writer] : identifier[self] . identifier[_params_offset_writer] . identifier[write_current_offset] ( identifier[stream] ) keyword[else] : identifier[self] . identifier[_params_offset] = identifier[stream] . identifier[tell] () identifier[self] . identifier[params] . identifier[write] ( identifier[stream] ) keyword[if] identifier[self] . identifier[actions] : identifier[stream] . identifier[align] ( literal[int] ) keyword[if] identifier[self] . identifier[_actions_offset_writer] : identifier[self] . identifier[_actions_offset_writer] . identifier[write_current_offset] ( identifier[stream] ) keyword[else] : identifier[self] . identifier[_actions_offset] = identifier[stream] . identifier[tell] () keyword[for] identifier[s] keyword[in] identifier[self] . identifier[actions] : identifier[stream] . identifier[write_string_ref] ( identifier[s] . identifier[v] ) keyword[if] identifier[self] . identifier[queries] : identifier[stream] . identifier[align] ( literal[int] ) keyword[if] identifier[self] . identifier[_queries_offset_writer] : identifier[self] . identifier[_queries_offset_writer] . identifier[write_current_offset] ( identifier[stream] ) keyword[else] : identifier[self] . identifier[_queries_offset] = identifier[stream] . identifier[tell] () keyword[for] identifier[s] keyword[in] identifier[self] . identifier[queries] : identifier[stream] . identifier[write_string_ref] ( identifier[s] . identifier[v] )
def write_extra_data(self, stream: WriteStream) -> None: """Writes the param container and string pointer arrays. Unlike other write_extra_data functions, this can be called before write().""" if self.params: stream.align(8) if self._params_offset_writer: self._params_offset_writer.write_current_offset(stream) # depends on [control=['if'], data=[]] else: self._params_offset = stream.tell() self.params.write(stream) # depends on [control=['if'], data=[]] if self.actions: stream.align(8) if self._actions_offset_writer: self._actions_offset_writer.write_current_offset(stream) # depends on [control=['if'], data=[]] else: self._actions_offset = stream.tell() for s in self.actions: stream.write_string_ref(s.v) # depends on [control=['for'], data=['s']] # depends on [control=['if'], data=[]] if self.queries: stream.align(8) if self._queries_offset_writer: self._queries_offset_writer.write_current_offset(stream) # depends on [control=['if'], data=[]] else: self._queries_offset = stream.tell() for s in self.queries: stream.write_string_ref(s.v) # depends on [control=['for'], data=['s']] # depends on [control=['if'], data=[]]
def serialize_operator_equal(self, op): """ Serializer for :meth:`SpiffWorkflow.operators.Equal`. Example:: <equals> <value>text</value> <value><attribute>foobar</attribute></value> <value><path>foobar</path></value> </equals> """ elem = etree.Element('equals') return self.serialize_value_list(elem, op.args)
def function[serialize_operator_equal, parameter[self, op]]: constant[ Serializer for :meth:`SpiffWorkflow.operators.Equal`. Example:: <equals> <value>text</value> <value><attribute>foobar</attribute></value> <value><path>foobar</path></value> </equals> ] variable[elem] assign[=] call[name[etree].Element, parameter[constant[equals]]] return[call[name[self].serialize_value_list, parameter[name[elem], name[op].args]]]
keyword[def] identifier[serialize_operator_equal] ( identifier[self] , identifier[op] ): literal[string] identifier[elem] = identifier[etree] . identifier[Element] ( literal[string] ) keyword[return] identifier[self] . identifier[serialize_value_list] ( identifier[elem] , identifier[op] . identifier[args] )
def serialize_operator_equal(self, op): """ Serializer for :meth:`SpiffWorkflow.operators.Equal`. Example:: <equals> <value>text</value> <value><attribute>foobar</attribute></value> <value><path>foobar</path></value> </equals> """ elem = etree.Element('equals') return self.serialize_value_list(elem, op.args)
def read(file, system): """Parse an ANDES card file into internal variables""" try: fid = open(file, 'r') raw_file = fid.readlines() except IOError: print('* IOError while reading input card file.') return ret_dict = dict() ret_dict['outfile'] = file.split('.')[0].lower() + '.py' key, val = None, None for idx, line in enumerate(raw_file): line = line.strip() if not line: continue if line.startswith('#'): continue elif '#' in line: line = line.split('#')[0] if '=' in line: # defining a field key, val = line.split('=') key, val = key.strip(), val.strip() val = [] if val == '' else val ret_dict.update({key: val}) if val: val = val.split(';') else: val.extend(line.split(';')) if val: val = de_blank(val) ret_dict[key] = val ret_dict_ord = dict(ret_dict) for key, val in ret_dict.items(): if not val: continue if type(val) == list: if ':' in val[0]: new_val = {} # return in a dictionary new_val_ord = [ ] # return in an ordered list with the dict keys at 0 for item in val: try: m, n = item.split(':') except ValueError: print('* Error: check line <{}>'.format(item)) return m, n = m.strip(), n.strip() if ',' in n: n = n.split(',') n = de_blank(n) n = [to_number(i) for i in n] else: n = to_number(n) new_val.update({m.strip(): n}) new_val_ord.append([m.strip(), n]) ret_dict[key] = new_val ret_dict_ord[key] = new_val_ord ret_dict['name'] = ret_dict['name'][0] ret_dict['doc_string'] = ret_dict['doc_string'][0] ret_dict['group'] = ret_dict['group'][0] ret_dict['service_keys'] = list(ret_dict['service_eq'].keys()) ret_dict['consts'] = list(ret_dict['data'].keys()) + list( ret_dict['service_eq'].keys()) ret_dict['init1_eq'] = ret_dict_ord['init1_eq'] ret_dict['service_eq'] = ret_dict_ord['service_eq'] ret_dict['ctrl'] = ret_dict_ord['ctrl'] copy_algebs = [] copy_states = [] for item in ret_dict['ctrl']: key, val = item if val[3] == 'y': copy_algebs.append(key) elif val[3] == 'x': copy_states.append(key) elif val[3] == 'c': ret_dict['consts'].append(key) ret_dict['copy_algebs'] = copy_algebs ret_dict['copy_states'] = copy_states return run(system, **ret_dict)
def function[read, parameter[file, system]]: constant[Parse an ANDES card file into internal variables] <ast.Try object at 0x7da207f9bca0> variable[ret_dict] assign[=] call[name[dict], parameter[]] call[name[ret_dict]][constant[outfile]] assign[=] binary_operation[call[call[call[name[file].split, parameter[constant[.]]]][constant[0]].lower, parameter[]] + constant[.py]] <ast.Tuple object at 0x7da207f99ba0> assign[=] tuple[[<ast.Constant object at 0x7da207f994e0>, <ast.Constant object at 0x7da207f98520>]] for taget[tuple[[<ast.Name object at 0x7da207f9a140>, <ast.Name object at 0x7da207f988b0>]]] in starred[call[name[enumerate], parameter[name[raw_file]]]] begin[:] variable[line] assign[=] call[name[line].strip, parameter[]] if <ast.UnaryOp object at 0x7da207f9b700> begin[:] continue if call[name[line].startswith, parameter[constant[#]]] begin[:] continue if compare[constant[=] in name[line]] begin[:] <ast.Tuple object at 0x7da207f9ab30> assign[=] call[name[line].split, parameter[constant[=]]] <ast.Tuple object at 0x7da207f9b580> assign[=] tuple[[<ast.Call object at 0x7da207f9af50>, <ast.Call object at 0x7da207f9be50>]] variable[val] assign[=] <ast.IfExp object at 0x7da207f9b7c0> call[name[ret_dict].update, parameter[dictionary[[<ast.Name object at 0x7da207f992a0>], [<ast.Name object at 0x7da207f9a050>]]]] if name[val] begin[:] variable[val] assign[=] call[name[val].split, parameter[constant[;]]] if name[val] begin[:] variable[val] assign[=] call[name[de_blank], parameter[name[val]]] call[name[ret_dict]][name[key]] assign[=] name[val] variable[ret_dict_ord] assign[=] call[name[dict], parameter[name[ret_dict]]] for taget[tuple[[<ast.Name object at 0x7da20e74bd90>, <ast.Name object at 0x7da20e74b370>]]] in starred[call[name[ret_dict].items, parameter[]]] begin[:] if <ast.UnaryOp object at 0x7da20e748400> begin[:] continue if compare[call[name[type], parameter[name[val]]] equal[==] name[list]] begin[:] if compare[constant[:] in call[name[val]][constant[0]]] begin[:] variable[new_val] assign[=] dictionary[[], []] variable[new_val_ord] assign[=] list[[]] for taget[name[item]] in starred[name[val]] begin[:] <ast.Try object at 0x7da20e74b7f0> <ast.Tuple object at 0x7da20e74b1c0> assign[=] tuple[[<ast.Call object at 0x7da20e74be50>, <ast.Call object at 0x7da20e7497b0>]] if compare[constant[,] in name[n]] begin[:] variable[n] assign[=] call[name[n].split, parameter[constant[,]]] variable[n] assign[=] call[name[de_blank], parameter[name[n]]] variable[n] assign[=] <ast.ListComp object at 0x7da18f00de10> call[name[new_val].update, parameter[dictionary[[<ast.Call object at 0x7da18f00e3e0>], [<ast.Name object at 0x7da18f00e470>]]]] call[name[new_val_ord].append, parameter[list[[<ast.Call object at 0x7da18f00e3b0>, <ast.Name object at 0x7da18f00e1d0>]]]] call[name[ret_dict]][name[key]] assign[=] name[new_val] call[name[ret_dict_ord]][name[key]] assign[=] name[new_val_ord] call[name[ret_dict]][constant[name]] assign[=] call[call[name[ret_dict]][constant[name]]][constant[0]] call[name[ret_dict]][constant[doc_string]] assign[=] call[call[name[ret_dict]][constant[doc_string]]][constant[0]] call[name[ret_dict]][constant[group]] assign[=] call[call[name[ret_dict]][constant[group]]][constant[0]] call[name[ret_dict]][constant[service_keys]] assign[=] call[name[list], parameter[call[call[name[ret_dict]][constant[service_eq]].keys, parameter[]]]] call[name[ret_dict]][constant[consts]] assign[=] binary_operation[call[name[list], parameter[call[call[name[ret_dict]][constant[data]].keys, parameter[]]]] + call[name[list], parameter[call[call[name[ret_dict]][constant[service_eq]].keys, parameter[]]]]] call[name[ret_dict]][constant[init1_eq]] assign[=] call[name[ret_dict_ord]][constant[init1_eq]] call[name[ret_dict]][constant[service_eq]] assign[=] call[name[ret_dict_ord]][constant[service_eq]] call[name[ret_dict]][constant[ctrl]] assign[=] call[name[ret_dict_ord]][constant[ctrl]] variable[copy_algebs] assign[=] list[[]] variable[copy_states] assign[=] list[[]] for taget[name[item]] in starred[call[name[ret_dict]][constant[ctrl]]] begin[:] <ast.Tuple object at 0x7da18f00d450> assign[=] name[item] if compare[call[name[val]][constant[3]] equal[==] constant[y]] begin[:] call[name[copy_algebs].append, parameter[name[key]]] call[name[ret_dict]][constant[copy_algebs]] assign[=] name[copy_algebs] call[name[ret_dict]][constant[copy_states]] assign[=] name[copy_states] return[call[name[run], parameter[name[system]]]]
keyword[def] identifier[read] ( identifier[file] , identifier[system] ): literal[string] keyword[try] : identifier[fid] = identifier[open] ( identifier[file] , literal[string] ) identifier[raw_file] = identifier[fid] . identifier[readlines] () keyword[except] identifier[IOError] : identifier[print] ( literal[string] ) keyword[return] identifier[ret_dict] = identifier[dict] () identifier[ret_dict] [ literal[string] ]= identifier[file] . identifier[split] ( literal[string] )[ literal[int] ]. identifier[lower] ()+ literal[string] identifier[key] , identifier[val] = keyword[None] , keyword[None] keyword[for] identifier[idx] , identifier[line] keyword[in] identifier[enumerate] ( identifier[raw_file] ): identifier[line] = identifier[line] . identifier[strip] () keyword[if] keyword[not] identifier[line] : keyword[continue] keyword[if] identifier[line] . identifier[startswith] ( literal[string] ): keyword[continue] keyword[elif] literal[string] keyword[in] identifier[line] : identifier[line] = identifier[line] . identifier[split] ( literal[string] )[ literal[int] ] keyword[if] literal[string] keyword[in] identifier[line] : identifier[key] , identifier[val] = identifier[line] . identifier[split] ( literal[string] ) identifier[key] , identifier[val] = identifier[key] . identifier[strip] (), identifier[val] . identifier[strip] () identifier[val] =[] keyword[if] identifier[val] == literal[string] keyword[else] identifier[val] identifier[ret_dict] . identifier[update] ({ identifier[key] : identifier[val] }) keyword[if] identifier[val] : identifier[val] = identifier[val] . identifier[split] ( literal[string] ) keyword[else] : identifier[val] . identifier[extend] ( identifier[line] . identifier[split] ( literal[string] )) keyword[if] identifier[val] : identifier[val] = identifier[de_blank] ( identifier[val] ) identifier[ret_dict] [ identifier[key] ]= identifier[val] identifier[ret_dict_ord] = identifier[dict] ( identifier[ret_dict] ) keyword[for] identifier[key] , identifier[val] keyword[in] identifier[ret_dict] . identifier[items] (): keyword[if] keyword[not] identifier[val] : keyword[continue] keyword[if] identifier[type] ( identifier[val] )== identifier[list] : keyword[if] literal[string] keyword[in] identifier[val] [ literal[int] ]: identifier[new_val] ={} identifier[new_val_ord] =[ ] keyword[for] identifier[item] keyword[in] identifier[val] : keyword[try] : identifier[m] , identifier[n] = identifier[item] . identifier[split] ( literal[string] ) keyword[except] identifier[ValueError] : identifier[print] ( literal[string] . identifier[format] ( identifier[item] )) keyword[return] identifier[m] , identifier[n] = identifier[m] . identifier[strip] (), identifier[n] . identifier[strip] () keyword[if] literal[string] keyword[in] identifier[n] : identifier[n] = identifier[n] . identifier[split] ( literal[string] ) identifier[n] = identifier[de_blank] ( identifier[n] ) identifier[n] =[ identifier[to_number] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[n] ] keyword[else] : identifier[n] = identifier[to_number] ( identifier[n] ) identifier[new_val] . identifier[update] ({ identifier[m] . identifier[strip] (): identifier[n] }) identifier[new_val_ord] . identifier[append] ([ identifier[m] . identifier[strip] (), identifier[n] ]) identifier[ret_dict] [ identifier[key] ]= identifier[new_val] identifier[ret_dict_ord] [ identifier[key] ]= identifier[new_val_ord] identifier[ret_dict] [ literal[string] ]= identifier[ret_dict] [ literal[string] ][ literal[int] ] identifier[ret_dict] [ literal[string] ]= identifier[ret_dict] [ literal[string] ][ literal[int] ] identifier[ret_dict] [ literal[string] ]= identifier[ret_dict] [ literal[string] ][ literal[int] ] identifier[ret_dict] [ literal[string] ]= identifier[list] ( identifier[ret_dict] [ literal[string] ]. identifier[keys] ()) identifier[ret_dict] [ literal[string] ]= identifier[list] ( identifier[ret_dict] [ literal[string] ]. identifier[keys] ())+ identifier[list] ( identifier[ret_dict] [ literal[string] ]. identifier[keys] ()) identifier[ret_dict] [ literal[string] ]= identifier[ret_dict_ord] [ literal[string] ] identifier[ret_dict] [ literal[string] ]= identifier[ret_dict_ord] [ literal[string] ] identifier[ret_dict] [ literal[string] ]= identifier[ret_dict_ord] [ literal[string] ] identifier[copy_algebs] =[] identifier[copy_states] =[] keyword[for] identifier[item] keyword[in] identifier[ret_dict] [ literal[string] ]: identifier[key] , identifier[val] = identifier[item] keyword[if] identifier[val] [ literal[int] ]== literal[string] : identifier[copy_algebs] . identifier[append] ( identifier[key] ) keyword[elif] identifier[val] [ literal[int] ]== literal[string] : identifier[copy_states] . identifier[append] ( identifier[key] ) keyword[elif] identifier[val] [ literal[int] ]== literal[string] : identifier[ret_dict] [ literal[string] ]. identifier[append] ( identifier[key] ) identifier[ret_dict] [ literal[string] ]= identifier[copy_algebs] identifier[ret_dict] [ literal[string] ]= identifier[copy_states] keyword[return] identifier[run] ( identifier[system] ,** identifier[ret_dict] )
def read(file, system): """Parse an ANDES card file into internal variables""" try: fid = open(file, 'r') raw_file = fid.readlines() # depends on [control=['try'], data=[]] except IOError: print('* IOError while reading input card file.') return # depends on [control=['except'], data=[]] ret_dict = dict() ret_dict['outfile'] = file.split('.')[0].lower() + '.py' (key, val) = (None, None) for (idx, line) in enumerate(raw_file): line = line.strip() if not line: continue # depends on [control=['if'], data=[]] if line.startswith('#'): continue # depends on [control=['if'], data=[]] elif '#' in line: line = line.split('#')[0] # depends on [control=['if'], data=['line']] if '=' in line: # defining a field (key, val) = line.split('=') (key, val) = (key.strip(), val.strip()) val = [] if val == '' else val ret_dict.update({key: val}) if val: val = val.split(';') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['line']] else: val.extend(line.split(';')) if val: val = de_blank(val) ret_dict[key] = val # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] ret_dict_ord = dict(ret_dict) for (key, val) in ret_dict.items(): if not val: continue # depends on [control=['if'], data=[]] if type(val) == list: if ':' in val[0]: new_val = {} # return in a dictionary new_val_ord = [] # return in an ordered list with the dict keys at 0 for item in val: try: (m, n) = item.split(':') # depends on [control=['try'], data=[]] except ValueError: print('* Error: check line <{}>'.format(item)) return # depends on [control=['except'], data=[]] (m, n) = (m.strip(), n.strip()) if ',' in n: n = n.split(',') n = de_blank(n) n = [to_number(i) for i in n] # depends on [control=['if'], data=['n']] else: n = to_number(n) new_val.update({m.strip(): n}) new_val_ord.append([m.strip(), n]) # depends on [control=['for'], data=['item']] ret_dict[key] = new_val ret_dict_ord[key] = new_val_ord # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] ret_dict['name'] = ret_dict['name'][0] ret_dict['doc_string'] = ret_dict['doc_string'][0] ret_dict['group'] = ret_dict['group'][0] ret_dict['service_keys'] = list(ret_dict['service_eq'].keys()) ret_dict['consts'] = list(ret_dict['data'].keys()) + list(ret_dict['service_eq'].keys()) ret_dict['init1_eq'] = ret_dict_ord['init1_eq'] ret_dict['service_eq'] = ret_dict_ord['service_eq'] ret_dict['ctrl'] = ret_dict_ord['ctrl'] copy_algebs = [] copy_states = [] for item in ret_dict['ctrl']: (key, val) = item if val[3] == 'y': copy_algebs.append(key) # depends on [control=['if'], data=[]] elif val[3] == 'x': copy_states.append(key) # depends on [control=['if'], data=[]] elif val[3] == 'c': ret_dict['consts'].append(key) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']] ret_dict['copy_algebs'] = copy_algebs ret_dict['copy_states'] = copy_states return run(system, **ret_dict)
def timer(func): """Time a method and print its duration after return """ name = func.__name__ @wraps(func) def timed_func(self, *args, **kwargs): # pylint: disable=missing-docstring _start = time.time() out = func(self, *args, **kwargs) self.log(2, '{0} took {1:.1f} sec'.format(name, time.time() - _start)) return out return timed_func
def function[timer, parameter[func]]: constant[Time a method and print its duration after return ] variable[name] assign[=] name[func].__name__ def function[timed_func, parameter[self]]: variable[_start] assign[=] call[name[time].time, parameter[]] variable[out] assign[=] call[name[func], parameter[name[self], <ast.Starred object at 0x7da204621b70>]] call[name[self].log, parameter[constant[2], call[constant[{0} took {1:.1f} sec].format, parameter[name[name], binary_operation[call[name[time].time, parameter[]] - name[_start]]]]]] return[name[out]] return[name[timed_func]]
keyword[def] identifier[timer] ( identifier[func] ): literal[string] identifier[name] = identifier[func] . identifier[__name__] @ identifier[wraps] ( identifier[func] ) keyword[def] identifier[timed_func] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ): identifier[_start] = identifier[time] . identifier[time] () identifier[out] = identifier[func] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ) identifier[self] . identifier[log] ( literal[int] , literal[string] . identifier[format] ( identifier[name] , identifier[time] . identifier[time] ()- identifier[_start] )) keyword[return] identifier[out] keyword[return] identifier[timed_func]
def timer(func): """Time a method and print its duration after return """ name = func.__name__ @wraps(func) def timed_func(self, *args, **kwargs): # pylint: disable=missing-docstring _start = time.time() out = func(self, *args, **kwargs) self.log(2, '{0} took {1:.1f} sec'.format(name, time.time() - _start)) return out return timed_func
def delete_snapshot(snapshots_ids=None, config="root"): ''' Deletes an snapshot config Configuration name. (Default: root) snapshots_ids List of the snapshots IDs to be deleted. CLI example: .. code-block:: bash salt '*' snapper.delete_snapshot 54 salt '*' snapper.delete_snapshot config=root 54 salt '*' snapper.delete_snapshot config=root snapshots_ids=[54,55,56] ''' if not snapshots_ids: raise CommandExecutionError('Error: No snapshot ID has been provided') try: current_snapshots_ids = [x['id'] for x in list_snapshots(config)] if not isinstance(snapshots_ids, list): snapshots_ids = [snapshots_ids] if not set(snapshots_ids).issubset(set(current_snapshots_ids)): raise CommandExecutionError( "Error: Snapshots '{0}' not found".format(", ".join( [six.text_type(x) for x in set(snapshots_ids).difference( set(current_snapshots_ids))])) ) snapper.DeleteSnapshots(config, snapshots_ids) return {config: {"ids": snapshots_ids, "status": "deleted"}} except dbus.DBusException as exc: raise CommandExecutionError(_dbus_exception_to_reason(exc, locals()))
def function[delete_snapshot, parameter[snapshots_ids, config]]: constant[ Deletes an snapshot config Configuration name. (Default: root) snapshots_ids List of the snapshots IDs to be deleted. CLI example: .. code-block:: bash salt '*' snapper.delete_snapshot 54 salt '*' snapper.delete_snapshot config=root 54 salt '*' snapper.delete_snapshot config=root snapshots_ids=[54,55,56] ] if <ast.UnaryOp object at 0x7da1b1cb1d80> begin[:] <ast.Raise object at 0x7da1b1cb3d60> <ast.Try object at 0x7da1b1cb2aa0>
keyword[def] identifier[delete_snapshot] ( identifier[snapshots_ids] = keyword[None] , identifier[config] = literal[string] ): literal[string] keyword[if] keyword[not] identifier[snapshots_ids] : keyword[raise] identifier[CommandExecutionError] ( literal[string] ) keyword[try] : identifier[current_snapshots_ids] =[ identifier[x] [ literal[string] ] keyword[for] identifier[x] keyword[in] identifier[list_snapshots] ( identifier[config] )] keyword[if] keyword[not] identifier[isinstance] ( identifier[snapshots_ids] , identifier[list] ): identifier[snapshots_ids] =[ identifier[snapshots_ids] ] keyword[if] keyword[not] identifier[set] ( identifier[snapshots_ids] ). identifier[issubset] ( identifier[set] ( identifier[current_snapshots_ids] )): keyword[raise] identifier[CommandExecutionError] ( literal[string] . identifier[format] ( literal[string] . identifier[join] ( [ identifier[six] . identifier[text_type] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[set] ( identifier[snapshots_ids] ). identifier[difference] ( identifier[set] ( identifier[current_snapshots_ids] ))])) ) identifier[snapper] . identifier[DeleteSnapshots] ( identifier[config] , identifier[snapshots_ids] ) keyword[return] { identifier[config] :{ literal[string] : identifier[snapshots_ids] , literal[string] : literal[string] }} keyword[except] identifier[dbus] . identifier[DBusException] keyword[as] identifier[exc] : keyword[raise] identifier[CommandExecutionError] ( identifier[_dbus_exception_to_reason] ( identifier[exc] , identifier[locals] ()))
def delete_snapshot(snapshots_ids=None, config='root'): """ Deletes an snapshot config Configuration name. (Default: root) snapshots_ids List of the snapshots IDs to be deleted. CLI example: .. code-block:: bash salt '*' snapper.delete_snapshot 54 salt '*' snapper.delete_snapshot config=root 54 salt '*' snapper.delete_snapshot config=root snapshots_ids=[54,55,56] """ if not snapshots_ids: raise CommandExecutionError('Error: No snapshot ID has been provided') # depends on [control=['if'], data=[]] try: current_snapshots_ids = [x['id'] for x in list_snapshots(config)] if not isinstance(snapshots_ids, list): snapshots_ids = [snapshots_ids] # depends on [control=['if'], data=[]] if not set(snapshots_ids).issubset(set(current_snapshots_ids)): raise CommandExecutionError("Error: Snapshots '{0}' not found".format(', '.join([six.text_type(x) for x in set(snapshots_ids).difference(set(current_snapshots_ids))]))) # depends on [control=['if'], data=[]] snapper.DeleteSnapshots(config, snapshots_ids) return {config: {'ids': snapshots_ids, 'status': 'deleted'}} # depends on [control=['try'], data=[]] except dbus.DBusException as exc: raise CommandExecutionError(_dbus_exception_to_reason(exc, locals())) # depends on [control=['except'], data=['exc']]
def _LookUpSeasonDirectory(self, showID, showDir, seasonNum): """ Look up season directory. First attempt to find match from database, otherwise search TV show directory. If no match is found in the database the user can choose to accept a match from the TV show directory, enter a new directory name to use or accept an autogenerated name. Parameters ---------- showID : int Show ID number showDir : string Path to show file directory seasonNum : int Season number Returns ---------- string Name of season directory to use. This can be a blank string to use the root show directory, an autogenerated string or a user given string. """ goodlogging.Log.Info("RENAMER", "Looking up season directory for show {0}".format(showID)) goodlogging.Log.IncreaseIndent() # Look up existing season folder from database seasonDirName = self._db.SearchSeasonDirTable(showID, seasonNum) if seasonDirName is not None: goodlogging.Log.Info("RENAMER", "Found season directory match from database: {0}".format(seasonDirName)) else: # Look up existing season folder in show directory goodlogging.Log.Info("RENAMER", "Looking up season directory (Season {0}) in {1}".format(seasonNum, showDir)) if os.path.isdir(showDir) is False: goodlogging.Log.Info("RENAMER", "Show directory ({0}) is not an existing directory".format(showDir)) seasonDirName = self._CreateNewSeasonDir(seasonNum) else: matchDirList = [] for dirName in os.listdir(showDir): subDir = os.path.join(showDir, dirName) if os.path.isdir(subDir): seasonResult = re.findall("Season", dirName) if len(seasonResult) > 0: numResult = re.findall("[0-9]+", dirName) numResult = set(numResult) if len(numResult) == 1: if int(numResult.pop()) == int(seasonNum): matchDirList.append(dirName) if self._skipUserInput is True: if len(matchDirList) == 1: userAcceptance = matchDirList[0] goodlogging.Log.Info("RENAMER", "Automatic selection of season directory: {0}".format(seasonDirName)) else: userAcceptance = None goodlogging.Log.Info("RENAMER", "Could not make automatic selection of season directory") else: listDirPrompt = "enter 'ls' to list all items in show directory" userAcceptance = util.UserAcceptance(matchDirList, promptComment = listDirPrompt, xStrOverride = "to create new season directory") if userAcceptance in matchDirList: seasonDirName = userAcceptance elif userAcceptance is None: seasonDirName = self._CreateNewSeasonDir(seasonNum) else: recursiveSelectionComplete = False promptOnly = False dirLookup = userAcceptance while recursiveSelectionComplete is False: dirList = os.listdir(showDir) if dirLookup.lower() == 'ls': dirLookup = '' promptOnly = True if len(dirList) == 0: goodlogging.Log.Info("RENAMER", "Show directory is empty") else: goodlogging.Log.Info("RENAMER", "Show directory contains: {0}".format(', '.join(dirList))) else: matchDirList = util.GetBestMatch(dirLookup, dirList) response = util.UserAcceptance(matchDirList, promptComment = listDirPrompt, promptOnly = promptOnly, xStrOverride = "to create new season directory") promptOnly = False if response in matchDirList: seasonDirName = response recursiveSelectionComplete = True elif response is None: seasonDirName = self._CreateNewSeasonDir(seasonNum) recursiveSelectionComplete = True else: dirLookup = response # Add season directory to database if seasonDirName is not None: self._db.AddSeasonDirTable(showID, seasonNum, seasonDirName) goodlogging.Log.DecreaseIndent() return seasonDirName
def function[_LookUpSeasonDirectory, parameter[self, showID, showDir, seasonNum]]: constant[ Look up season directory. First attempt to find match from database, otherwise search TV show directory. If no match is found in the database the user can choose to accept a match from the TV show directory, enter a new directory name to use or accept an autogenerated name. Parameters ---------- showID : int Show ID number showDir : string Path to show file directory seasonNum : int Season number Returns ---------- string Name of season directory to use. This can be a blank string to use the root show directory, an autogenerated string or a user given string. ] call[name[goodlogging].Log.Info, parameter[constant[RENAMER], call[constant[Looking up season directory for show {0}].format, parameter[name[showID]]]]] call[name[goodlogging].Log.IncreaseIndent, parameter[]] variable[seasonDirName] assign[=] call[name[self]._db.SearchSeasonDirTable, parameter[name[showID], name[seasonNum]]] if compare[name[seasonDirName] is_not constant[None]] begin[:] call[name[goodlogging].Log.Info, parameter[constant[RENAMER], call[constant[Found season directory match from database: {0}].format, parameter[name[seasonDirName]]]]] call[name[goodlogging].Log.DecreaseIndent, parameter[]] return[name[seasonDirName]]
keyword[def] identifier[_LookUpSeasonDirectory] ( identifier[self] , identifier[showID] , identifier[showDir] , identifier[seasonNum] ): literal[string] identifier[goodlogging] . identifier[Log] . identifier[Info] ( literal[string] , literal[string] . identifier[format] ( identifier[showID] )) identifier[goodlogging] . identifier[Log] . identifier[IncreaseIndent] () identifier[seasonDirName] = identifier[self] . identifier[_db] . identifier[SearchSeasonDirTable] ( identifier[showID] , identifier[seasonNum] ) keyword[if] identifier[seasonDirName] keyword[is] keyword[not] keyword[None] : identifier[goodlogging] . identifier[Log] . identifier[Info] ( literal[string] , literal[string] . identifier[format] ( identifier[seasonDirName] )) keyword[else] : identifier[goodlogging] . identifier[Log] . identifier[Info] ( literal[string] , literal[string] . identifier[format] ( identifier[seasonNum] , identifier[showDir] )) keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[showDir] ) keyword[is] keyword[False] : identifier[goodlogging] . identifier[Log] . identifier[Info] ( literal[string] , literal[string] . identifier[format] ( identifier[showDir] )) identifier[seasonDirName] = identifier[self] . identifier[_CreateNewSeasonDir] ( identifier[seasonNum] ) keyword[else] : identifier[matchDirList] =[] keyword[for] identifier[dirName] keyword[in] identifier[os] . identifier[listdir] ( identifier[showDir] ): identifier[subDir] = identifier[os] . identifier[path] . identifier[join] ( identifier[showDir] , identifier[dirName] ) keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[subDir] ): identifier[seasonResult] = identifier[re] . identifier[findall] ( literal[string] , identifier[dirName] ) keyword[if] identifier[len] ( identifier[seasonResult] )> literal[int] : identifier[numResult] = identifier[re] . identifier[findall] ( literal[string] , identifier[dirName] ) identifier[numResult] = identifier[set] ( identifier[numResult] ) keyword[if] identifier[len] ( identifier[numResult] )== literal[int] : keyword[if] identifier[int] ( identifier[numResult] . identifier[pop] ())== identifier[int] ( identifier[seasonNum] ): identifier[matchDirList] . identifier[append] ( identifier[dirName] ) keyword[if] identifier[self] . identifier[_skipUserInput] keyword[is] keyword[True] : keyword[if] identifier[len] ( identifier[matchDirList] )== literal[int] : identifier[userAcceptance] = identifier[matchDirList] [ literal[int] ] identifier[goodlogging] . identifier[Log] . identifier[Info] ( literal[string] , literal[string] . identifier[format] ( identifier[seasonDirName] )) keyword[else] : identifier[userAcceptance] = keyword[None] identifier[goodlogging] . identifier[Log] . identifier[Info] ( literal[string] , literal[string] ) keyword[else] : identifier[listDirPrompt] = literal[string] identifier[userAcceptance] = identifier[util] . identifier[UserAcceptance] ( identifier[matchDirList] , identifier[promptComment] = identifier[listDirPrompt] , identifier[xStrOverride] = literal[string] ) keyword[if] identifier[userAcceptance] keyword[in] identifier[matchDirList] : identifier[seasonDirName] = identifier[userAcceptance] keyword[elif] identifier[userAcceptance] keyword[is] keyword[None] : identifier[seasonDirName] = identifier[self] . identifier[_CreateNewSeasonDir] ( identifier[seasonNum] ) keyword[else] : identifier[recursiveSelectionComplete] = keyword[False] identifier[promptOnly] = keyword[False] identifier[dirLookup] = identifier[userAcceptance] keyword[while] identifier[recursiveSelectionComplete] keyword[is] keyword[False] : identifier[dirList] = identifier[os] . identifier[listdir] ( identifier[showDir] ) keyword[if] identifier[dirLookup] . identifier[lower] ()== literal[string] : identifier[dirLookup] = literal[string] identifier[promptOnly] = keyword[True] keyword[if] identifier[len] ( identifier[dirList] )== literal[int] : identifier[goodlogging] . identifier[Log] . identifier[Info] ( literal[string] , literal[string] ) keyword[else] : identifier[goodlogging] . identifier[Log] . identifier[Info] ( literal[string] , literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[dirList] ))) keyword[else] : identifier[matchDirList] = identifier[util] . identifier[GetBestMatch] ( identifier[dirLookup] , identifier[dirList] ) identifier[response] = identifier[util] . identifier[UserAcceptance] ( identifier[matchDirList] , identifier[promptComment] = identifier[listDirPrompt] , identifier[promptOnly] = identifier[promptOnly] , identifier[xStrOverride] = literal[string] ) identifier[promptOnly] = keyword[False] keyword[if] identifier[response] keyword[in] identifier[matchDirList] : identifier[seasonDirName] = identifier[response] identifier[recursiveSelectionComplete] = keyword[True] keyword[elif] identifier[response] keyword[is] keyword[None] : identifier[seasonDirName] = identifier[self] . identifier[_CreateNewSeasonDir] ( identifier[seasonNum] ) identifier[recursiveSelectionComplete] = keyword[True] keyword[else] : identifier[dirLookup] = identifier[response] keyword[if] identifier[seasonDirName] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[_db] . identifier[AddSeasonDirTable] ( identifier[showID] , identifier[seasonNum] , identifier[seasonDirName] ) identifier[goodlogging] . identifier[Log] . identifier[DecreaseIndent] () keyword[return] identifier[seasonDirName]
def _LookUpSeasonDirectory(self, showID, showDir, seasonNum): """ Look up season directory. First attempt to find match from database, otherwise search TV show directory. If no match is found in the database the user can choose to accept a match from the TV show directory, enter a new directory name to use or accept an autogenerated name. Parameters ---------- showID : int Show ID number showDir : string Path to show file directory seasonNum : int Season number Returns ---------- string Name of season directory to use. This can be a blank string to use the root show directory, an autogenerated string or a user given string. """ goodlogging.Log.Info('RENAMER', 'Looking up season directory for show {0}'.format(showID)) goodlogging.Log.IncreaseIndent() # Look up existing season folder from database seasonDirName = self._db.SearchSeasonDirTable(showID, seasonNum) if seasonDirName is not None: goodlogging.Log.Info('RENAMER', 'Found season directory match from database: {0}'.format(seasonDirName)) # depends on [control=['if'], data=['seasonDirName']] else: # Look up existing season folder in show directory goodlogging.Log.Info('RENAMER', 'Looking up season directory (Season {0}) in {1}'.format(seasonNum, showDir)) if os.path.isdir(showDir) is False: goodlogging.Log.Info('RENAMER', 'Show directory ({0}) is not an existing directory'.format(showDir)) seasonDirName = self._CreateNewSeasonDir(seasonNum) # depends on [control=['if'], data=[]] else: matchDirList = [] for dirName in os.listdir(showDir): subDir = os.path.join(showDir, dirName) if os.path.isdir(subDir): seasonResult = re.findall('Season', dirName) if len(seasonResult) > 0: numResult = re.findall('[0-9]+', dirName) numResult = set(numResult) if len(numResult) == 1: if int(numResult.pop()) == int(seasonNum): matchDirList.append(dirName) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['dirName']] if self._skipUserInput is True: if len(matchDirList) == 1: userAcceptance = matchDirList[0] goodlogging.Log.Info('RENAMER', 'Automatic selection of season directory: {0}'.format(seasonDirName)) # depends on [control=['if'], data=[]] else: userAcceptance = None goodlogging.Log.Info('RENAMER', 'Could not make automatic selection of season directory') # depends on [control=['if'], data=[]] else: listDirPrompt = "enter 'ls' to list all items in show directory" userAcceptance = util.UserAcceptance(matchDirList, promptComment=listDirPrompt, xStrOverride='to create new season directory') if userAcceptance in matchDirList: seasonDirName = userAcceptance # depends on [control=['if'], data=['userAcceptance']] elif userAcceptance is None: seasonDirName = self._CreateNewSeasonDir(seasonNum) # depends on [control=['if'], data=[]] else: recursiveSelectionComplete = False promptOnly = False dirLookup = userAcceptance while recursiveSelectionComplete is False: dirList = os.listdir(showDir) if dirLookup.lower() == 'ls': dirLookup = '' promptOnly = True if len(dirList) == 0: goodlogging.Log.Info('RENAMER', 'Show directory is empty') # depends on [control=['if'], data=[]] else: goodlogging.Log.Info('RENAMER', 'Show directory contains: {0}'.format(', '.join(dirList))) # depends on [control=['if'], data=[]] else: matchDirList = util.GetBestMatch(dirLookup, dirList) response = util.UserAcceptance(matchDirList, promptComment=listDirPrompt, promptOnly=promptOnly, xStrOverride='to create new season directory') promptOnly = False if response in matchDirList: seasonDirName = response recursiveSelectionComplete = True # depends on [control=['if'], data=['response']] elif response is None: seasonDirName = self._CreateNewSeasonDir(seasonNum) recursiveSelectionComplete = True # depends on [control=['if'], data=[]] else: dirLookup = response # depends on [control=['while'], data=['recursiveSelectionComplete']] # Add season directory to database if seasonDirName is not None: self._db.AddSeasonDirTable(showID, seasonNum, seasonDirName) # depends on [control=['if'], data=['seasonDirName']] goodlogging.Log.DecreaseIndent() return seasonDirName
def make_entry_point(patches, original_entry_point): """Use this to make a console_script entry point for your application which applies patches. :param patches: iterable of pymonkey patches to apply. Ex: ('my-patch,) :param original_entry_point: Such as 'pip' """ def entry(argv=None): argv = argv if argv is not None else sys.argv[1:] return main( tuple(patches) + ('--', original_entry_point) + tuple(argv) ) return entry
def function[make_entry_point, parameter[patches, original_entry_point]]: constant[Use this to make a console_script entry point for your application which applies patches. :param patches: iterable of pymonkey patches to apply. Ex: ('my-patch,) :param original_entry_point: Such as 'pip' ] def function[entry, parameter[argv]]: variable[argv] assign[=] <ast.IfExp object at 0x7da1b191fd60> return[call[name[main], parameter[binary_operation[binary_operation[call[name[tuple], parameter[name[patches]]] + tuple[[<ast.Constant object at 0x7da1b191cc40>, <ast.Name object at 0x7da1b191f2b0>]]] + call[name[tuple], parameter[name[argv]]]]]]] return[name[entry]]
keyword[def] identifier[make_entry_point] ( identifier[patches] , identifier[original_entry_point] ): literal[string] keyword[def] identifier[entry] ( identifier[argv] = keyword[None] ): identifier[argv] = identifier[argv] keyword[if] identifier[argv] keyword[is] keyword[not] keyword[None] keyword[else] identifier[sys] . identifier[argv] [ literal[int] :] keyword[return] identifier[main] ( identifier[tuple] ( identifier[patches] )+( literal[string] , identifier[original_entry_point] )+ identifier[tuple] ( identifier[argv] ) ) keyword[return] identifier[entry]
def make_entry_point(patches, original_entry_point): """Use this to make a console_script entry point for your application which applies patches. :param patches: iterable of pymonkey patches to apply. Ex: ('my-patch,) :param original_entry_point: Such as 'pip' """ def entry(argv=None): argv = argv if argv is not None else sys.argv[1:] return main(tuple(patches) + ('--', original_entry_point) + tuple(argv)) return entry
def CREATE(self, value, offset, size): """Create a new account with associated code""" tx = self.world.last_transaction # At this point last and current tx are the same. address = tx.address if tx.result == 'RETURN': self.world.set_code(tx.address, tx.return_data) else: self.world.delete_account(address) address = 0 return address
def function[CREATE, parameter[self, value, offset, size]]: constant[Create a new account with associated code] variable[tx] assign[=] name[self].world.last_transaction variable[address] assign[=] name[tx].address if compare[name[tx].result equal[==] constant[RETURN]] begin[:] call[name[self].world.set_code, parameter[name[tx].address, name[tx].return_data]] return[name[address]]
keyword[def] identifier[CREATE] ( identifier[self] , identifier[value] , identifier[offset] , identifier[size] ): literal[string] identifier[tx] = identifier[self] . identifier[world] . identifier[last_transaction] identifier[address] = identifier[tx] . identifier[address] keyword[if] identifier[tx] . identifier[result] == literal[string] : identifier[self] . identifier[world] . identifier[set_code] ( identifier[tx] . identifier[address] , identifier[tx] . identifier[return_data] ) keyword[else] : identifier[self] . identifier[world] . identifier[delete_account] ( identifier[address] ) identifier[address] = literal[int] keyword[return] identifier[address]
def CREATE(self, value, offset, size): """Create a new account with associated code""" tx = self.world.last_transaction # At this point last and current tx are the same. address = tx.address if tx.result == 'RETURN': self.world.set_code(tx.address, tx.return_data) # depends on [control=['if'], data=[]] else: self.world.delete_account(address) address = 0 return address